Merge p3yk branch with the trunk up to revision 45595. This breaks a fair
number of tests, all because of the codecs/_multibytecodecs issue described
here (it's not a Py3K issue, just something Py3K discovers):
http://mail.python.org/pipermail/python-dev/2006-April/064051.html

Hye-Shik Chang promised to look for a fix, so no need to fix it here. The
tests that are expected to break are:

test_codecencodings_cn
test_codecencodings_hk
test_codecencodings_jp
test_codecencodings_kr
test_codecencodings_tw
test_codecs
test_multibytecodec

This merge fixes an actual test failure (test_weakref) in this branch,
though, so I believe merging is the right thing to do anyway.
diff --git a/Demo/parser/unparse.py b/Demo/parser/unparse.py
index dd75c22..510cdb0 100644
--- a/Demo/parser/unparse.py
+++ b/Demo/parser/unparse.py
@@ -1,5 +1,8 @@
 "Usage: unparse.py <path to source file>"
 import sys
+import _ast
+import cStringIO
+import os
 
 class Unparser:
     """Methods in this class recursively traverse an AST and
@@ -70,6 +73,18 @@
             if a.asname:
                 self.write(" as "+a.asname)
 
+    def _ImportFrom(self, t):
+        self.fill("from ")
+        self.write(t.module)
+        self.write(" import ")
+        for i, a in enumerate(t.names):
+            if i == 0:
+                self.write(", ")
+            self.write(a.name)
+            if a.asname:
+                self.write(" as "+a.asname)
+        # XXX(jpe) what is level for?
+
     def _Assign(self, t):
         self.fill()
         for target in t.targets:
@@ -88,6 +103,36 @@
         if t.value:
             self.dispatch(t.value)
 
+    def _Pass(self, t):
+        self.fill("pass")
+
+    def _Break(self, t):
+        self.fill("break")
+
+    def _Continue(self, t):
+        self.fill("continue")
+
+    def _Delete(self, t):
+        self.fill("del ")
+        self.dispatch(t.targets)
+
+    def _Assert(self, t):
+        self.fill("assert ")
+        self.dispatch(t.test)
+        if t.msg:
+            self.write(", ")
+            self.dispatch(t.msg)
+
+    def _Exec(self, t):
+        self.fill("exec ")
+        self.dispatch(t.body)
+        if t.globals:
+            self.write(" in ")
+            self.dispatch(t.globals)
+        if t.locals:
+            self.write(", ")
+            self.dispatch(t.locals)
+
     def _Print(self, t):
         self.fill("print ")
         do_comma = False
@@ -102,6 +147,67 @@
         if not t.nl:
             self.write(",")
 
+    def _Global(self, t):
+        self.fill("global")
+        for i, n in enumerate(t.names):
+            if i != 0:
+                self.write(",")
+            self.write(" " + n)
+
+    def _Yield(self, t):
+        self.fill("yield")
+        if t.value:
+            self.write(" (")
+            self.dispatch(t.value)
+            self.write(")")
+
+    def _Raise(self, t):
+        self.fill('raise ')
+        if t.type:
+            self.dispatch(t.type)
+        if t.inst:
+            self.write(", ")
+            self.dispatch(t.inst)
+        if t.tback:
+            self.write(", ")
+            self.dispatch(t.tback)
+
+    def _TryExcept(self, t):
+        self.fill("try")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+        for ex in t.handlers:
+            self.dispatch(ex)
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _TryFinally(self, t):
+        self.fill("try")
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
+        self.fill("finally")
+        self.enter()
+        self.dispatch(t.finalbody)
+        self.leave()
+
+    def _excepthandler(self, t):
+        self.fill("except ")
+        if t.type:
+            self.dispatch(t.type)
+        if t.name:
+            self.write(", ")
+            self.dispatch(t.name)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
     def _ClassDef(self, t):
         self.write("\n")
         self.fill("class "+t.name)
@@ -119,23 +225,11 @@
         self.write("\n")
         self.fill("def "+t.name + "(")
         self.dispatch(t.args)
+        self.write(")")
         self.enter()
         self.dispatch(t.body)
         self.leave()
 
-    def _If(self, t):
-        self.fill("if ")
-        self.dispatch(t.test)
-        self.enter()
-        # XXX elif?
-        self.dispatch(t.body)
-        self.leave()
-        if t.orelse:
-            self.fill("else")
-            self.enter()
-            self.dispatch(t.orelse)
-            self.leave()
-
     def _For(self, t):
         self.fill("for ")
         self.dispatch(t.target)
@@ -150,6 +244,41 @@
             self.dispatch(t.orelse)
             self.leave
 
+    def _If(self, t):
+        self.fill("if ")
+        self.dispatch(t.test)
+        self.enter()
+        # XXX elif?
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave()
+
+    def _While(self, t):
+        self.fill("while ")
+        self.dispatch(t.test)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+        if t.orelse:
+            self.fill("else")
+            self.enter()
+            self.dispatch(t.orelse)
+            self.leave
+
+    def _With(self, t):
+        self.fill("with ")
+        self.dispatch(t.context_expr)
+        if t.optional_vars:
+            self.write(" as ")
+            self.dispatch(t.optional_vars)
+        self.enter()
+        self.dispatch(t.body)
+        self.leave()
+
     # expr
     def _Str(self, tree):
         self.write(repr(tree.s))
@@ -157,6 +286,11 @@
     def _Name(self, t):
         self.write(t.id)
 
+    def _Repr(self, t):
+        self.write("`")
+        self.dispatch(t.value)
+        self.write("`")
+
     def _Num(self, t):
         self.write(repr(t.n))
 
@@ -167,6 +301,37 @@
             self.write(", ")
         self.write("]")
 
+    def _ListComp(self, t):
+        self.write("[")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write("]")
+
+    def _GeneratorExp(self, t):
+        self.write("(")
+        self.dispatch(t.elt)
+        for gen in t.generators:
+            self.dispatch(gen)
+        self.write(")")
+
+    def _comprehension(self, t):
+        self.write(" for ")
+        self.dispatch(t.target)
+        self.write(" in ")
+        self.dispatch(t.iter)
+        for if_clause in t.ifs:
+            self.write(" if ")
+            self.dispatch(if_clause)
+
+    def _IfExp(self, t):
+        self.dispatch(t.body)
+        self.write(" if ")
+        self.dispatch(t.test)
+        if t.orelse:
+            self.write(" else ")
+            self.dispatch(t.orelse)
+
     def _Dict(self, t):
         self.write("{")
         for k,v in zip(t.keys, t.values):
@@ -194,8 +359,8 @@
         self.write(")")
 
     binop = { "Add":"+", "Sub":"-", "Mult":"*", "Div":"/", "Mod":"%",
-                    "RShift":"<<", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
-                    "FloorDiv":"//"}
+                    "LShift":">>", "RShift":"<<", "BitOr":"|", "BitXor":"^", "BitAnd":"&",
+                    "FloorDiv":"//", "Pow": "**"}
     def _BinOp(self, t):
         self.write("(")
         self.dispatch(t.left)
@@ -213,6 +378,15 @@
             self.dispatch(e)
             self.write(")")
 
+    boolops = {_ast.And: 'and', _ast.Or: 'or'}
+    def _BoolOp(self, t):
+        self.write("(")
+        self.dispatch(t.values[0])
+        for v in t.values[1:]:
+            self.write(" %s " % self.boolops[t.op.__class__])
+            self.dispatch(v)
+        self.write(")")
+
     def _Attribute(self,t):
         self.dispatch(t.value)
         self.write(".")
@@ -234,12 +408,12 @@
             if comma: self.write(", ")
             else: comma = True
             self.write("*")
-            self.dispatch(t.stararg)
+            self.dispatch(t.starargs)
         if t.kwargs:
             if comma: self.write(", ")
             else: comma = True
             self.write("**")
-            self.dispatch(t.stararg)
+            self.dispatch(t.kwargs)
         self.write(")")
 
     def _Subscript(self, t):
@@ -249,6 +423,9 @@
         self.write("]")
 
     # slice
+    def _Ellipsis(self, t):
+        self.write("...")
+
     def _Index(self, t):
         self.dispatch(t.value)
 
@@ -262,6 +439,12 @@
             self.write(":")
             self.dispatch(t.step)
 
+    def _ExtSlice(self, t):
+        for i, d in enumerate(t.dims):
+            if i != 0:
+                self.write(': ')
+            self.dispatch(d)
+
     # others
     def _arguments(self, t):
         first = True
@@ -283,13 +466,51 @@
         if t.kwarg:
             if first:first = False
             else: self.write(", ")
-            self.write("**"+self.kwarg)
-        self.write(")")
+            self.write("**"+t.kwarg)
 
-def roundtrip(filename):
+    def _keyword(self, t):
+        self.write(t.arg)
+        self.write("=")
+        self.dispatch(t.value)
+
+    def _Lambda(self, t):
+        self.write("lambda ")
+        self.dispatch(t.args)
+        self.write(": ")
+        self.dispatch(t.body)
+
+def roundtrip(filename, output=sys.stdout):
     source = open(filename).read()
     tree = compile(source, filename, "exec", 0x400)
-    Unparser(tree)
+    Unparser(tree, output)
+
+
+
+def testdir(a):
+    try:
+        names = [n for n in os.listdir(a) if n.endswith('.py')]
+    except OSError:
+        print >> sys.stderr, "Directory not readable: %s" % a
+    else:
+        for n in names:
+            fullname = os.path.join(a, n)
+            if os.path.isfile(fullname):
+                output = cStringIO.StringIO()
+                print 'Testing %s' % fullname
+                try:
+                    roundtrip(fullname, output)
+                except Exception, e:
+                    print '  Failed to compile, exception is %s' % repr(e)
+            elif os.path.isdir(fullname):
+                testdir(fullname)
+
+def main(args):
+    if args[0] == '--testdir':
+        for a in args[1:]:
+            testdir(a)
+    else:
+        for a in args:
+            roundtrip(a)
 
 if __name__=='__main__':
-    roundtrip(sys.argv[1])
+    main(sys.argv[1:])
diff --git a/Demo/pdist/makechangelog.py b/Demo/pdist/makechangelog.py
index b26f30b..1ffa588 100755
--- a/Demo/pdist/makechangelog.py
+++ b/Demo/pdist/makechangelog.py
@@ -6,7 +6,7 @@
 
 import sys
 import string
-import regex
+import re
 import getopt
 import time
 
@@ -35,9 +35,9 @@
     for rev in allrevs:
         formatrev(rev, prefix)
 
-parsedateprog = regex.compile(
-    '^date: \([0-9]+\)/\([0-9]+\)/\([0-9]+\) ' +
-    '\([0-9]+\):\([0-9]+\):\([0-9]+\);  author: \([^ ;]+\)')
+parsedateprog = re.compile(
+    '^date: ([0-9]+)/([0-9]+)/([0-9]+) ' +
+    '([0-9]+):([0-9]+):([0-9]+);  author: ([^ ;]+)')
 
 authormap = {
     'guido': 'Guido van Rossum  <guido@cnri.reston.va.us>',
@@ -70,7 +70,7 @@
         print
         print
 
-startprog = regex.compile("^Working file: \(.*\)$")
+startprog = re.compile("^Working file: (.*)$")
 
 def getnextfile(f):
     while 1:
diff --git a/Demo/pdist/rcsbump b/Demo/pdist/rcsbump
index e4e9ed5..4fa078e 100755
--- a/Demo/pdist/rcsbump
+++ b/Demo/pdist/rcsbump
@@ -6,12 +6,12 @@
 # Python script for bumping up an RCS major revision number.
 
 import sys
-import regex
+import re
 import rcslib
 import string
 
 WITHLOCK = 1
-majorrev_re = regex.compile('^[0-9]+')
+majorrev_re = re.compile('^[0-9]+')
 
 dir = rcslib.RCS()
 
diff --git a/Demo/pdist/rcslib.py b/Demo/pdist/rcslib.py
index d5f7b65..3e63869 100755
--- a/Demo/pdist/rcslib.py
+++ b/Demo/pdist/rcslib.py
@@ -8,7 +8,7 @@
 
 import fnmatch
 import os
-import regsub
+import re
 import string
 import tempfile
 
@@ -150,7 +150,7 @@
             cmd = 'ci %s%s -t%s %s %s' % \
                   (lockflag, rev, f.name, otherflags, name)
         else:
-            message = regsub.gsub('\([\\"$`]\)', '\\\\\\1', message)
+            message = re.sub(r'([\"$`])', r'\\\1', message)
             cmd = 'ci %s%s -m"%s" %s %s' % \
                   (lockflag, rev, message, otherflags, name)
         return self._system(cmd)
diff --git a/Demo/scripts/eqfix.py b/Demo/scripts/eqfix.py
index 165ca49..35c43aa 100755
--- a/Demo/scripts/eqfix.py
+++ b/Demo/scripts/eqfix.py
@@ -29,7 +29,7 @@
 # into a program for a different change to Python programs...
 
 import sys
-import regex
+import re
 import os
 from stat import *
 import string
@@ -53,7 +53,7 @@
             if fix(arg): bad = 1
     sys.exit(bad)
 
-ispythonprog = regex.compile('^[a-zA-Z0-9_]+\.py$')
+ispythonprog = re.compile('^[a-zA-Z0-9_]+\.py$')
 def ispython(name):
     return ispythonprog.match(name) >= 0
 
@@ -104,7 +104,7 @@
         if lineno == 1 and g is None and line[:2] == '#!':
             # Check for non-Python scripts
             words = string.split(line[2:])
-            if words and regex.search('[pP]ython', words[0]) < 0:
+            if words and re.search('[pP]ython', words[0]) < 0:
                 msg = filename + ': ' + words[0]
                 msg = msg + ' script; not fixed\n'
                 err(msg)
diff --git a/Demo/scripts/ftpstats.py b/Demo/scripts/ftpstats.py
index b37a58d..5c1599e 100755
--- a/Demo/scripts/ftpstats.py
+++ b/Demo/scripts/ftpstats.py
@@ -13,12 +13,12 @@
 
 import os
 import sys
-import regex
+import re
 import string
 import getopt
 
-pat = '^\([a-zA-Z0-9 :]*\)!\(.*\)!\(.*\)!\([<>].*\)!\([0-9]+\)!\([0-9]+\)$'
-prog = regex.compile(pat)
+pat = '^([a-zA-Z0-9 :]*)!(.*)!(.*)!([<>].*)!([0-9]+)!([0-9]+)$'
+prog = re.compile(pat)
 
 def main():
     maxitems = 25
diff --git a/Demo/scripts/mboxconvert.py b/Demo/scripts/mboxconvert.py
index 502d774..8c462f3 100755
--- a/Demo/scripts/mboxconvert.py
+++ b/Demo/scripts/mboxconvert.py
@@ -10,7 +10,7 @@
 import os
 import stat
 import getopt
-import regex
+import re
 
 def main():
     dofile = mmdf
@@ -45,7 +45,7 @@
     if sts:
         sys.exit(sts)
 
-numeric = regex.compile('[1-9][0-9]*')
+numeric = re.compile('[1-9][0-9]*')
 
 def mh(dir):
     sts = 0
diff --git a/Demo/scripts/update.py b/Demo/scripts/update.py
index 32ad6c8..c936026 100755
--- a/Demo/scripts/update.py
+++ b/Demo/scripts/update.py
@@ -8,10 +8,10 @@
 
 import os
 import sys
-import regex
+import re
 
-pat = '^\([^: \t\n]+\):\([1-9][0-9]*\):'
-prog = regex.compile(pat)
+pat = '^([^: \t\n]+):([1-9][0-9]*):'
+prog = re.compile(pat)
 
 class FileObj:
     def __init__(self, filename):
diff --git a/Demo/sockets/mcast.py b/Demo/sockets/mcast.py
index 122dad7..1abd305 100755
--- a/Demo/sockets/mcast.py
+++ b/Demo/sockets/mcast.py
@@ -13,7 +13,6 @@
 import sys
 import time
 import struct
-import regsub
 from socket import *
 
 
diff --git a/Demo/tix/grid.py b/Demo/tix/grid.py
new file mode 100644
index 0000000..07ca87f
--- /dev/null
+++ b/Demo/tix/grid.py
@@ -0,0 +1,28 @@
+###
+import Tix as tk
+from pprint import pprint
+
+r= tk.Tk()
+r.title("test")
+
+l=tk.Label(r, name="a_label")
+l.pack()
+
+class MyGrid(tk.Grid):
+    def __init__(self, *args, **kwargs):
+        kwargs['editnotify']= self.editnotify
+        tk.Grid.__init__(self, *args, **kwargs)
+    def editnotify(self, x, y):
+        return True
+
+g = MyGrid(r, name="a_grid",
+selectunit="cell")
+g.pack(fill=tk.BOTH)
+for x in xrange(5):
+    for y in xrange(5):
+        g.set(x,y,text=str((x,y)))
+
+c = tk.Button(r, text="Close", command=r.destroy)
+c.pack()
+
+tk.mainloop()
diff --git a/Demo/tkinter/guido/ManPage.py b/Demo/tkinter/guido/ManPage.py
index 911961e..221af88 100755
--- a/Demo/tkinter/guido/ManPage.py
+++ b/Demo/tkinter/guido/ManPage.py
@@ -1,6 +1,6 @@
 # Widget to display a man page
 
-import regex
+import re
 from Tkinter import *
 from Tkinter import _tkinter
 from ScrolledText import ScrolledText
@@ -11,10 +11,10 @@
 
 # XXX Recognizing footers is system dependent
 # (This one works for IRIX 5.2 and Solaris 2.2)
-footerprog = regex.compile(
+footerprog = re.compile(
         '^     Page [1-9][0-9]*[ \t]+\|^.*Last change:.*[1-9][0-9]*\n')
-emptyprog = regex.compile('^[ \t]*\n')
-ulprog = regex.compile('^[ \t]*[Xv!_][Xv!_ \t]*\n')
+emptyprog = re.compile('^[ \t]*\n')
+ulprog = re.compile('^[ \t]*[Xv!_][Xv!_ \t]*\n')
 
 # Basic Man Page class -- does not disable editing
 class EditableManPage(ScrolledText):
diff --git a/Demo/tkinter/guido/mbox.py b/Demo/tkinter/guido/mbox.py
index 9b16f6b..3c36d88 100755
--- a/Demo/tkinter/guido/mbox.py
+++ b/Demo/tkinter/guido/mbox.py
@@ -4,7 +4,7 @@
 
 import os
 import sys
-import regex
+import re
 import getopt
 import string
 import mhlib
@@ -157,7 +157,7 @@
     scanmenu.unpost()
     scanmenu.invoke('active')
 
-scanparser = regex.compile('^ *\([0-9]+\)')
+scanparser = re.compile('^ *([0-9]+)')
 
 def open_folder(e=None):
     global folder, mhf
diff --git a/Demo/tkinter/guido/tkman.py b/Demo/tkinter/guido/tkman.py
index 11d9690..6b0b641 100755
--- a/Demo/tkinter/guido/tkman.py
+++ b/Demo/tkinter/guido/tkman.py
@@ -5,7 +5,7 @@
 import sys
 import os
 import string
-import regex
+import re
 from Tkinter import *
 from ManPage import ManPage
 
@@ -208,15 +208,15 @@
             print 'Empty search string'
             return
         if not self.casevar.get():
-            map = regex.casefold
+            map = re.IGNORECASE
         else:
             map = None
         try:
             if map:
-                prog = regex.compile(search, map)
+                prog = re.compile(search, map)
             else:
-                prog = regex.compile(search)
-        except regex.error, msg:
+                prog = re.compile(search)
+        except re.error, msg:
             self.frame.bell()
             print 'Regex error:', msg
             return
diff --git a/Doc/Makefile.deps b/Doc/Makefile.deps
index 832402d..20c0688 100644
--- a/Doc/Makefile.deps
+++ b/Doc/Makefile.deps
@@ -126,6 +126,7 @@
 	lib/libwarnings.tex \
 	lib/libimp.tex \
 	lib/libzipimport.tex \
+	lib/librunpy.tex \
 	lib/libpkgutil.tex \
 	lib/libparser.tex \
 	lib/libbltin.tex \
diff --git a/Doc/api/concrete.tex b/Doc/api/concrete.tex
index 5521b80..1982bae 100644
--- a/Doc/api/concrete.tex
+++ b/Doc/api/concrete.tex
@@ -156,7 +156,7 @@
   Create a new integer object with a value of \var{ival}.
 
   The current implementation keeps an array of integer objects for all
-  integers between \code{-1} and \code{100}, when you create an int in
+  integers between \code{-5} and \code{256}, when you create an int in
   that range you actually just get back a reference to the existing
   object. So it should be possible to change the value of \code{1}.  I
   suspect the behaviour of Python in this case is undefined. :-)
@@ -333,7 +333,9 @@
   The pointer value can be retrieved from the resulting value using
   \cfunction{PyLong_AsVoidPtr()}.
   \versionadded{1.5.2}
-\end{cfuncdesc}
+  \versionchanged[If the integer is larger than LONG_MAX,
+  a positive long integer is returned]{2.5}
+ \end{cfuncdesc}
 
 \begin{cfuncdesc}{long}{PyLong_AsLong}{PyObject *pylong}
   Return a C \ctype{long} representation of the contents of
@@ -394,6 +396,8 @@
   produce a usable \ctype{void} pointer for values created with
   \cfunction{PyLong_FromVoidPtr()}.
   \versionadded{1.5.2}
+  \versionchanged[For values outside 0..LONG_MAX, both signed and
+  unsigned integers are acccepted]{2.5}
 \end{cfuncdesc}
 
 
@@ -1803,8 +1807,9 @@
 
 \begin{cfuncdesc}{PyObject*}{PyList_GetItem}{PyObject *list, Py_ssize_t index}
   Return the object at position \var{pos} in the list pointed to by
-  \var{p}.  If \var{pos} is out of bounds, return \NULL{} and set an
-  \exception{IndexError} exception.
+  \var{p}.  The position must be positive, indexing from the end of the
+  list is not supported.  If \var{pos} is out of bounds, return \NULL{}
+  and set an \exception{IndexError} exception.
 \end{cfuncdesc}
 
 \begin{cfuncdesc}{PyObject*}{PyList_GET_ITEM}{PyObject *list, Py_ssize_t i}
@@ -2264,8 +2269,8 @@
 \begin{cfuncdesc}{PyObject*}{PyFunction_New}{PyObject *code,
                                              PyObject *globals}
   Return a new function object associated with the code object
-  \var{code}. \var{globals} must be a dictionary with the the global
-  varaibles accessible to the function.
+  \var{code}. \var{globals} must be a dictionary with the global
+  variables accessible to the function.
 
   The function's docstring, name and \var{__module__} are retrieved
   from the code object, the argument defaults and closure are set to
@@ -2811,6 +2816,7 @@
 
 \begin{cfuncdesc}{PyObject*}{PyGen_New}{PyFrameObject *frame}
   Create and return a new generator object based on the \var{frame} object.
+  A reference to \var{frame} is stolen by this function.
   The parameter must not be \NULL{}.
 \end{cfuncdesc}
 
@@ -3025,9 +3031,7 @@
 \cfunction{PyNumber_Or()}, \cfunction{PyNumber_Xor()},
 \cfunction{PyNumber_InPlaceAdd()}, \cfunction{PyNumber_InPlaceSubtract()},
 \cfunction{PyNumber_InPlaceOr()}, and \cfunction{PyNumber_InPlaceXor()}).
-Note, \cfunction{PyNumber_InPlaceSubtract()} is also useful clearing
-clearing a set (\code{s-=s}).                         
-                      
+
 \begin{ctypedesc}{PySetObject}
   This subtype of \ctype{PyObject} is used to hold the internal data for
   both \class{set} and \class{frozenset} objects.  It is like a
@@ -3111,7 +3115,6 @@
   \class{frozenset}, or an instance of a subtype.                         
 \end{cfuncdesc}
 
-
 The following functions are available for instances of \class{set} or
 its subtypes but not for instances of \class{frozenset} or its subtypes.
 
@@ -3142,4 +3145,6 @@
   of \class{set} or its subtype.                        
 \end{cfuncdesc}
 
-
+\begin{cfuncdesc}{int}{PySet_Clear}{PyObject *set}
+  Empty an existing set of all elements.
+\end{cfuncdesc}
diff --git a/Doc/api/intro.tex b/Doc/api/intro.tex
index 608d562..96f18ec 100644
--- a/Doc/api/intro.tex
+++ b/Doc/api/intro.tex
@@ -569,8 +569,11 @@
 Sometimes, it is desirable to ``uninitialize'' Python.  For instance, 
 the application may want to start over (make another call to 
 \cfunction{Py_Initialize()}) or the application is simply done with its 
-use of Python and wants to free all memory allocated by Python.  This
+use of Python and wants to free memory allocated by Python.  This
 can be accomplished by calling \cfunction{Py_Finalize()}.  The function
 \cfunction{Py_IsInitialized()}\ttindex{Py_IsInitialized()} returns
 true if Python is currently in the initialized state.  More
 information about these functions is given in a later chapter.
+Notice that \cfunction{Py_Finalize} does \emph{not} free all memory
+allocated by the Python interpreter, e.g. memory allocated by extension
+modules currently cannot be released.
diff --git a/Doc/api/memory.tex b/Doc/api/memory.tex
index 3dbe9a5..4bc2c7a 100644
--- a/Doc/api/memory.tex
+++ b/Doc/api/memory.tex
@@ -195,9 +195,7 @@
 In addition to the functions aimed at handling raw memory blocks from
 the Python heap, objects in Python are allocated and released with
 \cfunction{PyObject_New()}, \cfunction{PyObject_NewVar()} and
-\cfunction{PyObject_Del()}, or with their corresponding macros
-\cfunction{PyObject_NEW()}, \cfunction{PyObject_NEW_VAR()} and
-\cfunction{PyObject_DEL()}.
+\cfunction{PyObject_Del()}.
 
 These will be explained in the next chapter on defining and
 implementing new object types in C.
diff --git a/Doc/api/newtypes.tex b/Doc/api/newtypes.tex
index b7e25b9..2d758b0 100644
--- a/Doc/api/newtypes.tex
+++ b/Doc/api/newtypes.tex
@@ -62,23 +62,6 @@
   after this call as the memory is no longer a valid Python object.
 \end{cfuncdesc}
 
-\begin{cfuncdesc}{\var{TYPE}*}{PyObject_NEW}{TYPE, PyTypeObject *type}
-  Macro version of \cfunction{PyObject_New()}, to gain performance at
-  the expense of safety.  This does not check \var{type} for a \NULL{}
-  value.
-\end{cfuncdesc}
-
-\begin{cfuncdesc}{\var{TYPE}*}{PyObject_NEW_VAR}{TYPE, PyTypeObject *type,
-                                                Py_ssize_t size}
-  Macro version of \cfunction{PyObject_NewVar()}, to gain performance
-  at the expense of safety.  This does not check \var{type} for a
-  \NULL{} value.
-\end{cfuncdesc}
-
-\begin{cfuncdesc}{void}{PyObject_DEL}{PyObject *op}
-  Macro version of \cfunction{PyObject_Del()}.
-\end{cfuncdesc}
-
 \begin{cfuncdesc}{PyObject*}{Py_InitModule}{char *name,
                                             PyMethodDef *methods}
   Create a new module object based on a name and table of functions,
diff --git a/Doc/api/refcounts.dat b/Doc/api/refcounts.dat
index f3bd32e..7bba011 100644
--- a/Doc/api/refcounts.dat
+++ b/Doc/api/refcounts.dat
@@ -31,6 +31,9 @@
 # The parameter names are as they appear in the API manual, not the source
 # code. 
 
+PyBool_FromLong:PyObject*::+1:
+PyBool_FromLong:long:v:0:
+
 PyBuffer_FromObject:PyObject*::+1:
 PyBuffer_FromObject:PyObject*:base:+1:
 PyBuffer_FromObject:int:offset::
@@ -110,6 +113,35 @@
 PyComplex_RealAsDouble:double:::
 PyComplex_RealAsDouble:PyObject*:op:0:
 
+PyDate_FromDate:PyObject*::+1:
+PyDate_FromDate:int:year::
+PyDate_FromDate:int:month::
+PyDate_FromDate:int:day::
+
+PyDate_FromTimestamp:PyObject*::+1:
+PyDate_FromTimestamp:PyObject*:args:0:
+
+PyDateTime_FromDateAndTime:PyObject*::+1:
+PyDateTime_FromDateAndTime:int:year::
+PyDateTime_FromDateAndTime:int:month::
+PyDateTime_FromDateAndTime:int:day::
+PyDateTime_FromDateAndTime:int:hour::
+PyDateTime_FromDateAndTime:int:minute::
+PyDateTime_FromDateAndTime:int:second::
+PyDateTime_FromDateAndTime:int:usecond::
+
+PyDateTime_FromTimestamp:PyObject*::+1:
+PyDateTime_FromTimestamp:PyObject*:args:0:
+
+PyDelta_FromDSU:PyObject*::+1:
+PyDelta_FromDSU:int:days::
+PyDelta_FromDSU:int:seconds::
+PyDelta_FromDSU:int:useconds::
+
+PyDescr_NewClassMethod:PyObject*::+1:
+PyDescr_NewClassMethod:PyTypeObject*:type::
+PyDescr_NewClassMethod:PyMethodDef*:method::
+
 PyDescr_NewGetSet:PyObject*::+1:
 PyDescr_NewGetSet:PyTypeObject*:type::
 PyDescr_NewGetSet:PyGetSetDef*:getset::
@@ -226,6 +258,15 @@
 PyErr_Restore:PyObject*:value:-1:
 PyErr_Restore:PyObject*:traceback:-1:
 
+PyErr_SetExcFromWindowsErr:PyObject*::null:
+PyErr_SetExcFromWindowsErr:PyObject*:type:0:
+PyErr_SetExcFromWindowsErr:int:ierr::
+
+PyErr_SetExcFromWindowsErrWithFilename:PyObject*::null:
+PyErr_SetExcFromWindowsErrWithFilename:PyObject*:type:0:
+PyErr_SetExcFromWindowsErrWithFilename:int:ierr::
+PyErr_SetExcFromWindowsErrWithFilename:char*:filename::
+
 PyErr_SetFromErrno:PyObject*::null:
 PyErr_SetFromErrno:PyObject*:type:0:
 
@@ -337,6 +378,13 @@
 PyFloat_FromDouble:PyObject*::+1:
 PyFloat_FromDouble:double:v::
 
+PyFloat_FromString:PyObject*::+1:
+PyFloat_FromString:PyObject*:str:0:
+PyFloat_FromString:char**:pend:0:ignored
+
+PyFrozenSet_New:PyObject*::+1:
+PyFrozenSet_New:PyObject*:iterable:0:
+
 PyFunction_GetClosure:PyObject*::0:
 PyFunction_GetClosure:PyObject*:op:0:
 
@@ -364,6 +412,9 @@
 PyFunction_SetDefaults:PyObject*:op:0:
 PyFunction_SetDefaults:PyObject*:defaults:+1:
 
+PyGen_New:PyObject*::+1:
+PyGen_New:PyFrameObject*:frame:0:
+
 Py_InitModule:PyObject*::0:
 Py_InitModule:char*:name::
 Py_InitModule:PyMethodDef[]:methods::
@@ -432,6 +483,14 @@
 PyInt_FromLong:PyObject*::+1:
 PyInt_FromLong:long:ival::
 
+PyInt_FromString:PyObject*::+1:
+PyInt_FromString:char*:str:0:
+PyInt_FromString:char**:pend:0:
+PyInt_FromString:int:base:0:
+
+PyInt_FromSsize_t:PyObject*::+1:
+PyInt_FromSsize_t:Py_ssize_t:ival::
+
 PyInt_GetMax:long:::
 
 PyInterpreterState_Clear:void:::
@@ -939,6 +998,31 @@
 PyRun_File:PyObject*:globals:0:
 PyRun_File:PyObject*:locals:0:
 
+PyRun_FileEx:PyObject*::+1:??? -- same as eval_code2()
+PyRun_FileEx:FILE*:fp::
+PyRun_FileEx:char*:filename::
+PyRun_FileEx:int:start::
+PyRun_FileEx:PyObject*:globals:0:
+PyRun_FileEx:PyObject*:locals:0:
+PyRun_FileEx:int:closeit::
+
+PyRun_FileFlags:PyObject*::+1:??? -- same as eval_code2()
+PyRun_FileFlags:FILE*:fp::
+PyRun_FileFlags:char*:filename::
+PyRun_FileFlags:int:start::
+PyRun_FileFlags:PyObject*:globals:0:
+PyRun_FileFlags:PyObject*:locals:0:
+PyRun_FileFlags:PyCompilerFlags*:flags::
+
+PyRun_FileExFlags:PyObject*::+1:??? -- same as eval_code2()
+PyRun_FileExFlags:FILE*:fp::
+PyRun_FileExFlags:char*:filename::
+PyRun_FileExFlags:int:start::
+PyRun_FileExFlags:PyObject*:globals:0:
+PyRun_FileExFlags:PyObject*:locals:0:
+PyRun_FileExFlags:int:closeit::
+PyRun_FileExFlags:PyCompilerFlags*:flags::
+
 PyRun_InteractiveLoop:int:::
 PyRun_InteractiveLoop:FILE*:fp::
 PyRun_InteractiveLoop:char*:filename::
@@ -960,6 +1044,13 @@
 PyRun_String:PyObject*:globals:0:
 PyRun_String:PyObject*:locals:0:
 
+PyRun_StringFlags:PyObject*::+1:??? -- same as eval_code2()
+PyRun_StringFlags:char*:str::
+PyRun_StringFlags:int:start::
+PyRun_StringFlags:PyObject*:globals:0:
+PyRun_StringFlags:PyObject*:locals:0:
+PyRun_StringFlags:PyCompilerFlags*:flags::
+
 PySeqIter_New:PyObject*::+1:
 PySeqIter_New:PyObject*:seq::
 
@@ -1053,6 +1144,9 @@
 PySet_Discard:PyObject*:set:0:
 PySet_Discard:PyObject*:key:-1:no effect if key not found
 
+PySet_New:PyObject*::+1:
+PySet_New:PyObject*:iterable:0:
+
 PySet_Pop:PyObject*::0:or returns NULL and raises KeyError if set is empty
 PySet_Pop:PyObject*:set:0:
 
@@ -1167,6 +1261,12 @@
 PyThreadState_Swap:PyThreadState*:::
 PyThreadState_Swap:PyThreadState*:tstate::
 
+PyTime_FromTime:PyObject*::+1:
+PyTime_FromTime:int:hour::
+PyTime_FromTime:int:minute::
+PyTime_FromTime:int:second::
+PyTime_FromTime:int:usecond::
+
 PyTuple_Check:int:::
 PyTuple_Check:PyObject*:p:0:
 
@@ -1186,6 +1286,10 @@
 PyTuple_New:PyObject*::+1:
 PyTuple_New:int:len::
 
+PyTuple_Pack:PyObject*::+1:
+PyTuple_Pack:int:len::
+PyTuple_Pack:PyObject*:...:0:
+
 PyTuple_SET_ITEM:void:::
 PyTuple_SET_ITEM:PyTupleObject*:p:0:
 PyTuple_SET_ITEM:int:pos::
@@ -1298,6 +1402,19 @@
 PyUnicode_Decode:const char*:encoding::
 PyUnicode_Decode:const char*:errors::
 
+PyUnicode_DecodeUTF16Stateful:PyObject*::+1:
+PyUnicode_DecodeUTF16Stateful:const char*:s::
+PyUnicode_DecodeUTF16Stateful:int:size::
+PyUnicode_DecodeUTF16Stateful:const char*:errors::
+PyUnicode_DecodeUTF16Stateful:int*:byteorder::
+PyUnicode_DecodeUTF16Stateful:int*:consumed::
+
+PyUnicode_DecodeUTF8Stateful:PyObject*::+1:
+PyUnicode_DecodeUTF8Stateful:const char*:s::
+PyUnicode_DecodeUTF8Stateful:int:size::
+PyUnicode_DecodeUTF8Stateful:const char*:errors::
+PyUnicode_DecodeUTF8Stateful:int*:consumed::
+
 PyUnicode_Encode:PyObject*::+1:
 PyUnicode_Encode:const Py_UNICODE*:s::
 PyUnicode_Encode:int:size::
@@ -1513,6 +1630,12 @@
 Py_CompileString:char*:filename::
 Py_CompileString:int:start::
 
+Py_CompileStringFlags:PyObject*::+1:
+Py_CompileStringFlags:char*:str::
+Py_CompileStringFlags:char*:filename::
+Py_CompileStringFlags:int:start::
+Py_CompileStringFlags:PyCompilerFlags*:flags::
+
 Py_DECREF:void:::
 Py_DECREF:PyObject*:o:-1:
 
diff --git a/Doc/commontex/boilerplate.tex b/Doc/commontex/boilerplate.tex
index b4c9f48..55a4184 100644
--- a/Doc/commontex/boilerplate.tex
+++ b/Doc/commontex/boilerplate.tex
@@ -5,5 +5,5 @@
 	Email: \email{docs@python.org}
 }
 
-\date{\today}			% XXX update before final release!
+\date{5th April 2006}			% XXX update before final release!
 \input{patchlevel}		% include Python version information
diff --git a/Doc/commontex/license.tex b/Doc/commontex/license.tex
index 525ce8a..d1554c2 100644
--- a/Doc/commontex/license.tex
+++ b/Doc/commontex/license.tex
@@ -49,6 +49,8 @@
   \linev{2.4}{2.3}{2004}{PSF}{yes}
   \linev{2.4.1}{2.4}{2005}{PSF}{yes}
   \linev{2.4.2}{2.4.1}{2005}{PSF}{yes}
+  \linev{2.4.3}{2.4.2}{2006}{PSF}{yes}
+  \linev{2.5}{2.4}{2006}{PSF}{yes}
 \end{tablev}
 
 \note{GPL-compatible doesn't mean that we're distributing
@@ -430,26 +432,49 @@
 The source code for the \module{md5} module contains the following notice:
 
 \begin{verbatim}
-Copyright (C) 1991-2, RSA Data Security, Inc. Created 1991. All
-rights reserved.
+  Copyright (C) 1999, 2002 Aladdin Enterprises.  All rights reserved.
 
-License to copy and use this software is granted provided that it
-is identified as the "RSA Data Security, Inc. MD5 Message-Digest
-Algorithm" in all material mentioning or referencing this software
-or this function.
+  This software is provided 'as-is', without any express or implied
+  warranty.  In no event will the authors be held liable for any damages
+  arising from the use of this software.
 
-License is also granted to make and use derivative works provided
-that such works are identified as "derived from the RSA Data
-Security, Inc. MD5 Message-Digest Algorithm" in all material
-mentioning or referencing the derived work.
+  Permission is granted to anyone to use this software for any purpose,
+  including commercial applications, and to alter it and redistribute it
+  freely, subject to the following restrictions:
 
-RSA Data Security, Inc. makes no representations concerning either
-the merchantability of this software or the suitability of this
-software for any particular purpose. It is provided "as is"
-without express or implied warranty of any kind.
+  1. The origin of this software must not be misrepresented; you must not
+     claim that you wrote the original software. If you use this software
+     in a product, an acknowledgment in the product documentation would be
+     appreciated but is not required.
+  2. Altered source versions must be plainly marked as such, and must not be
+     misrepresented as being the original software.
+  3. This notice may not be removed or altered from any source distribution.
 
-These notices must be retained in any copies of any part of this
-documentation and/or software.
+  L. Peter Deutsch
+  ghost@aladdin.com
+
+  Independent implementation of MD5 (RFC 1321).
+
+  This code implements the MD5 Algorithm defined in RFC 1321, whose
+  text is available at
+	http://www.ietf.org/rfc/rfc1321.txt
+  The code is derived from the text of the RFC, including the test suite
+  (section A.5) but excluding the rest of Appendix A.  It does not include
+  any code or documentation that is identified in the RFC as being
+  copyrighted.
+
+  The original and principal author of md5.h is L. Peter Deutsch
+  <ghost@aladdin.com>.  Other authors are noted in the change history
+  that follows (in reverse chronological order):
+
+  2002-04-13 lpd Removed support for non-ANSI compilers; removed
+	references to Ghostscript; clarified derivation from RFC 1321;
+	now handles byte order either statically or dynamically.
+  1999-11-04 lpd Edited comments slightly for automatic TOC extraction.
+  1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5);
+	added conditionalization for C++ compilation from Martin
+	Purschke <purschke@bnl.gov>.
+  1999-05-03 lpd Original version.
 \end{verbatim}
 
 
diff --git a/Doc/dist/dist.tex b/Doc/dist/dist.tex
index d6ddad8..3ba51d0 100644
--- a/Doc/dist/dist.tex
+++ b/Doc/dist/dist.tex
@@ -1467,7 +1467,7 @@
   \lineii{\%description (section)}{\option{long\_description}}
 \end{tableii}
 
-Additionally, there many options in \file{.spec} files that don't have
+Additionally, there are many options in \file{.spec} files that don't have
 corresponding options in the setup script.  Most of these are handled
 through options to the \command{bdist\_rpm} command as follows:
 
@@ -1737,6 +1737,8 @@
 \chapter{Uploading Packages to the Package Index}
 \label{package-upload}
 
+\versionadded{2.5}
+
 The Python Package Index (PyPI) not only stores the package info, but also 
 the package data if the author of the package wishes to. The distutils
 command \command{upload} pushes the distribution files to PyPI.
@@ -1754,8 +1756,21 @@
 distributions named on the command line for the invocation including
 the \command{upload} command are uploaded.
 
-The \command{upload} command uses the username and password stored in
-the file \file{\$HOME/.pypirc}, see section~\ref{pypirc}.
+The \command{upload} command uses the username, password, and repository
+URL from the \file{\$HOME/.pypirc} file (see section~\ref{pypirc} for
+more on this file).
+
+You can use the \programopt{--sign} option to tell \command{upload} to
+sign each uploaded file using GPG (GNU Privacy Guard).  The 
+\program{gpg} program must be available for execution on the system
+\envvar{PATH}.  You can also specify which key to use for signing
+using the \programopt{--identity=\var{name}} option.
+
+Other \command{upload} options include 
+\programopt{--repository=\var{url}} (which lets you override the
+repository setting from \file{\$HOME/.pypirc}), and
+\programopt{--show-response} (which displays the full response text
+from the PyPI server for help in debugging upload problems).
 
 \chapter{Examples}
 \label{examples}
diff --git a/Doc/howto/regex.tex b/Doc/howto/regex.tex
index 87fdad2..f9867ae 100644
--- a/Doc/howto/regex.tex
+++ b/Doc/howto/regex.tex
@@ -33,11 +33,8 @@
 
 The \module{re} module was added in Python 1.5, and provides
 Perl-style regular expression patterns.  Earlier versions of Python
-came with the \module{regex} module, which provides Emacs-style
-patterns.  Emacs-style patterns are slightly less readable and
-don't provide as many features, so there's not much reason to use
-the \module{regex} module when writing new code, though you might
-encounter old code that uses it.
+came with the \module{regex} module, which provided Emacs-style
+patterns.  \module{regex} module was removed in Python 2.5.
 
 Regular expressions (or REs) are essentially a tiny, highly
 specialized programming language embedded inside Python and made
@@ -1458,7 +1455,7 @@
 by O'Reilly.  Unfortunately, it exclusively concentrates on Perl and
 Java's flavours of regular expressions, and doesn't contain any Python
 material at all, so it won't be useful as a reference for programming
-in Python.  (The first edition covered Python's now-obsolete
+in Python.  (The first edition covered Python's now-removed
 \module{regex} module, which won't help you much.)  Consider checking
 it out from your library.
 
diff --git a/Doc/lib/compiler.tex b/Doc/lib/compiler.tex
index e619a9a..f0926e7 100644
--- a/Doc/lib/compiler.tex
+++ b/Doc/lib/compiler.tex
@@ -40,9 +40,9 @@
 
 \begin{funcdesc}{parse}{buf}
 Returns an abstract syntax tree for the Python source code in \var{buf}.
-The function raises SyntaxError if there is an error in the source
-code.  The return value is a \class{compiler.ast.Module} instance that
-contains the tree.  
+The function raises \exception{SyntaxError} if there is an error in the
+source code.  The return value is a \class{compiler.ast.Module} instance
+that contains the tree.  
 \end{funcdesc}
 
 \begin{funcdesc}{parseFile}{path}
diff --git a/Doc/lib/email-dir.py b/Doc/lib/email-dir.py
index 2d89a2f..c04f57d 100644
--- a/Doc/lib/email-dir.py
+++ b/Doc/lib/email-dir.py
@@ -1,83 +1,69 @@
 #!/usr/bin/env python
 
-"""Send the contents of a directory as a MIME message.
+"""Send the contents of a directory as a MIME message."""
 
-Usage: dirmail [options] from to [to ...]*
-
-Options:
-    -h / --help
-        Print this message and exit.
-
-    -d directory
-    --directory=directory
-        Mail the contents of the specified directory, otherwise use the
-        current directory.  Only the regular files in the directory are sent,
-        and we don't recurse to subdirectories.
-
-`from' is the email address of the sender of the message.
-
-`to' is the email address of the recipient of the message, and multiple
-recipients may be given.
-
-The email is sent by forwarding to your local SMTP server, which then does the
-normal delivery process.  Your local machine must be running an SMTP server.
-"""
-
-import sys
 import os
-import getopt
+import sys
 import smtplib
 # For guessing MIME type based on file name extension
 import mimetypes
 
-from email import Encoders
-from email.Message import Message
-from email.MIMEAudio import MIMEAudio
-from email.MIMEBase import MIMEBase
-from email.MIMEMultipart import MIMEMultipart
-from email.MIMEImage import MIMEImage
-from email.MIMEText import MIMEText
+from optparse import OptionParser
+
+from email import encoders
+from email.message import Message
+from email.mime.audio import MIMEAudio
+from email.mime.base import MIMEBase
+from email.mime.image import MIMEImage
+from email.mime.multipart import MIMEMultipart
+from email.mime.text import MIMEText
 
 COMMASPACE = ', '
 
 
-def usage(code, msg=''):
-    print >> sys.stderr, __doc__
-    if msg:
-        print >> sys.stderr, msg
-    sys.exit(code)
-
-
 def main():
-    try:
-        opts, args = getopt.getopt(sys.argv[1:], 'hd:', ['help', 'directory='])
-    except getopt.error, msg:
-        usage(1, msg)
+    parser = OptionParser(usage="""\
+Send the contents of a directory as a MIME message.
 
-    dir = os.curdir
-    for opt, arg in opts:
-        if opt in ('-h', '--help'):
-            usage(0)
-        elif opt in ('-d', '--directory'):
-            dir = arg
+Usage: %prog [options]
 
-    if len(args) < 2:
-        usage(1)
-
-    sender = args[0]
-    recips = args[1:]
-
+Unless the -o option is given, the email is sent by forwarding to your local
+SMTP server, which then does the normal delivery process.  Your local machine
+must be running an SMTP server.
+""")
+    parser.add_option('-d', '--directory',
+                      type='string', action='store',
+                      help="""Mail the contents of the specified directory,
+                      otherwise use the current directory.  Only the regular
+                      files in the directory are sent, and we don't recurse to
+                      subdirectories.""")
+    parser.add_option('-o', '--output',
+                      type='string', action='store', metavar='FILE',
+                      help="""Print the composed message to FILE instead of
+                      sending the message to the SMTP server.""")
+    parser.add_option('-s', '--sender',
+                      type='string', action='store', metavar='SENDER',
+                      help='The value of the From: header (required)')
+    parser.add_option('-r', '--recipient',
+                      type='string', action='append', metavar='RECIPIENT',
+                      default=[], dest='recipients',
+                      help='A To: header value (at least one required)')
+    opts, args = parser.parse_args()
+    if not opts.sender or not opts.recipients:
+        parser.print_help()
+        sys.exit(1)
+    directory = opts.directory
+    if not directory:
+        directory = '.'
     # Create the enclosing (outer) message
     outer = MIMEMultipart()
-    outer['Subject'] = 'Contents of directory %s' % os.path.abspath(dir)
-    outer['To'] = COMMASPACE.join(recips)
-    outer['From'] = sender
+    outer['Subject'] = 'Contents of directory %s' % os.path.abspath(directory)
+    outer['To'] = COMMASPACE.join(opts.recipients)
+    outer['From'] = opts.sender
     outer.preamble = 'You will not see this in a MIME-aware mail reader.\n'
-    # To guarantee the message ends with a newline
-    outer.epilogue = ''
 
-    for filename in os.listdir(dir):
-        path = os.path.join(dir, filename)
+    for filename in os.listdir(directory):
+        path = os.path.join(directory, filename)
         if not os.path.isfile(path):
             continue
         # Guess the content type based on the file's extension.  Encoding
@@ -108,16 +94,21 @@
             msg.set_payload(fp.read())
             fp.close()
             # Encode the payload using Base64
-            Encoders.encode_base64(msg)
+            encoders.encode_base64(msg)
         # Set the filename parameter
         msg.add_header('Content-Disposition', 'attachment', filename=filename)
         outer.attach(msg)
-
-    # Now send the message
-    s = smtplib.SMTP()
-    s.connect()
-    s.sendmail(sender, recips, outer.as_string())
-    s.close()
+    # Now send or store the message
+    composed = outer.as_string()
+    if opts.output:
+        fp = open(opts.output, 'w')
+        fp.write(composed)
+        fp.close()
+    else:
+        s = smtplib.SMTP()
+        s.connect()
+        s.sendmail(opts.sender, opts.recipients, composed)
+        s.close()
 
 
 if __name__ == '__main__':
diff --git a/Doc/lib/email-mime.py b/Doc/lib/email-mime.py
index 048a59f..5097253 100644
--- a/Doc/lib/email-mime.py
+++ b/Doc/lib/email-mime.py
@@ -2,8 +2,8 @@
 import smtplib
 
 # Here are the email package modules we'll need
-from email.MIMEImage import MIMEImage
-from email.MIMEMultipart import MIMEMultipart
+from email.mime.image import MIMEImage
+from email.mime.multipart import MIMEMultipart
 
 COMMASPACE = ', '
 
@@ -15,8 +15,6 @@
 msg['From'] = me
 msg['To'] = COMMASPACE.join(family)
 msg.preamble = 'Our family reunion'
-# Guarantees the message ends in a newline
-msg.epilogue = ''
 
 # Assume we know that the image files are all in PNG format
 for file in pngfiles:
diff --git a/Doc/lib/email-simple.py b/Doc/lib/email-simple.py
index a445f1b..44152a4 100644
--- a/Doc/lib/email-simple.py
+++ b/Doc/lib/email-simple.py
@@ -2,7 +2,7 @@
 import smtplib
 
 # Import the email modules we'll need
-from email.MIMEText import MIMEText
+from email.mime.text import MIMEText
 
 # Open a plain text file for reading.  For this example, assume that
 # the text file contains only ASCII characters.
diff --git a/Doc/lib/email-unpack.py b/Doc/lib/email-unpack.py
index b166fdb..fc05d99 100644
--- a/Doc/lib/email-unpack.py
+++ b/Doc/lib/email-unpack.py
@@ -1,59 +1,44 @@
 #!/usr/bin/env python
 
-"""Unpack a MIME message into a directory of files.
+"""Unpack a MIME message into a directory of files."""
 
-Usage: unpackmail [options] msgfile
-
-Options:
-    -h / --help
-        Print this message and exit.
-
-    -d directory
-    --directory=directory
-        Unpack the MIME message into the named directory, which will be
-        created if it doesn't already exist.
-
-msgfile is the path to the file containing the MIME message.
-"""
-
-import sys
 import os
-import getopt
+import sys
+import email
 import errno
 import mimetypes
-import email
 
-
-def usage(code, msg=''):
-    print >> sys.stderr, __doc__
-    if msg:
-        print >> sys.stderr, msg
-    sys.exit(code)
+from optparse import OptionParser
 
 
 def main():
-    try:
-        opts, args = getopt.getopt(sys.argv[1:], 'hd:', ['help', 'directory='])
-    except getopt.error, msg:
-        usage(1, msg)
+    parser = OptionParser(usage="""\
+Unpack a MIME message into a directory of files.
 
-    dir = os.curdir
-    for opt, arg in opts:
-        if opt in ('-h', '--help'):
-            usage(0)
-        elif opt in ('-d', '--directory'):
-            dir = arg
+Usage: %prog [options] msgfile
+""")
+    parser.add_option('-d', '--directory',
+                      type='string', action='store',
+                      help="""Unpack the MIME message into the named
+                      directory, which will be created if it doesn't already
+                      exist.""")
+    opts, args = parser.parse_args()
+    if not opts.directory:
+        parser.print_help()
+        sys.exit(1)
 
     try:
         msgfile = args[0]
     except IndexError:
-        usage(1)
+        parser.print_help()
+        sys.exit(1)
 
     try:
-        os.mkdir(dir)
+        os.mkdir(opts.directory)
     except OSError, e:
         # Ignore directory exists error
-        if e.errno <> errno.EEXIST: raise
+        if e.errno <> errno.EEXIST:
+            raise
 
     fp = open(msgfile)
     msg = email.message_from_file(fp)
@@ -74,8 +59,8 @@
                 ext = '.bin'
             filename = 'part-%03d%s' % (counter, ext)
         counter += 1
-        fp = open(os.path.join(dir, filename), 'wb')
-        fp.write(part.get_payload(decode=1))
+        fp = open(os.path.join(opts.directory, filename), 'wb')
+        fp.write(part.get_payload(decode=True))
         fp.close()
 
 
diff --git a/Doc/lib/email.tex b/Doc/lib/email.tex
index 3a90e22..6853325 100644
--- a/Doc/lib/email.tex
+++ b/Doc/lib/email.tex
@@ -1,4 +1,4 @@
-% Copyright (C) 2001-2004 Python Software Foundation
+% Copyright (C) 2001-2006 Python Software Foundation
 % Author: barry@python.org (Barry Warsaw)
 
 \section{\module{email} ---
@@ -18,10 +18,10 @@
 such as \refmodule{rfc822}, \refmodule{mimetools},
 \refmodule{multifile}, and other non-standard packages such as
 \module{mimecntl}.  It is specifically \emph{not} designed to do any
-sending of email messages to SMTP (\rfc{2821}) servers; that is the
-function of the \refmodule{smtplib} module.  The \module{email}
-package attempts to be as RFC-compliant as possible, supporting in
-addition to \rfc{2822}, such MIME-related RFCs as
+sending of email messages to SMTP (\rfc{2821}), NNTP, or other servers; those
+are functions of modules such as \refmodule{smtplib} and \refmodule{nntplib}.
+The \module{email} package attempts to be as RFC-compliant as possible,
+supporting in addition to \rfc{2822}, such MIME-related RFCs as
 \rfc{2045}, \rfc{2046}, \rfc{2047}, and \rfc{2231}.
 
 The primary distinguishing feature of the \module{email} package is
@@ -41,7 +41,7 @@
 should be common in applications: an email message is read as flat
 text from a file or other source, the text is parsed to produce the
 object structure of the email message, this structure is manipulated,
-and finally rendered back into flat text.
+and finally, the object tree is rendered back into flat text.
 
 It is perfectly feasible to create the object structure out of whole
 cloth --- i.e. completely from scratch.  From there, a similar
@@ -56,6 +56,7 @@
 
 \begin{seealso}
     \seemodule{smtplib}{SMTP protocol client}
+    \seemodule{nntplib}{NNTP protocol client}
 \end{seealso}
 
 \subsection{Representing an email message}
@@ -88,22 +89,51 @@
 \subsection{Iterators}
 \input{emailiter}
 
-\subsection{Package History}
+\subsection{Package History\label{email-pkg-history}}
 
-Version 1 of the \module{email} package was bundled with Python
-releases up to Python 2.2.1.  Version 2 was developed for the Python
-2.3 release, and backported to Python 2.2.2.  It was also available as
-a separate distutils-based package, and is compatible back to Python 2.1.
+This table describes the release history of the email package, corresponding
+to the version of Python that the package was released with.  For purposes of
+this document, when you see a note about change or added versions, these refer
+to the Python version the change was made it, \emph{not} the email package
+version.  This table also describes the Python compatibility of each version
+of the package.
 
-\module{email} version 3.0 was released with Python 2.4 and as a separate
-distutils-based package.  It is compatible back to Python 2.3.
+\begin{tableiii}{l|l|l}{constant}{email version}{distributed with}{compatible with}
+\lineiii{1.x}{Python 2.2.0 to Python 2.2.1}{\emph{no longer supported}}
+\lineiii{2.5}{Python 2.2.2+ and Python 2.3}{Python 2.1 to 2.5}
+\lineiii{3.0}{Python 2.4}{Python 2.3 to 2.5}
+\lineiii{4.0}{Python 2.5}{Python 2.3 to 2.5}
+\end{tableiii}
 
-Here are the differences between \module{email} version 3 and version 2:
+Here are the major differences between \module{email} verson 4 and version 3:
+
+\begin{itemize}
+\item All modules have been renamed according to \pep{8} standards.  For
+      example, the version 3 module \module{email.Message} was renamed to
+      \module{email.message} in version 4.
+
+\item A new subpackage \module{email.mime} was added and all the version 3
+      \module{email.MIME*} modules were renamed and situated into the
+      \module{email.mime} subpackage.  For example, the version 3 module
+      \module{email.MIMEText} was renamed to \module{email.mime.text}.
+
+      \emph{Note that the version 3 names will continue to work until Python
+      2.6}.
+
+\item The \module{email.mime.application} module was added, which contains the
+      \class{MIMEApplication} class.
+
+\item Methods that were deprecated in version 3 have been removed.  These
+      include \method{Generator.__call__()}, \method{Message.get_type()},
+      \method{Message.get_main_type()}, \method{Message.get_subtype()}.
+\end{itemize}
+
+Here are the major differences between \module{email} version 3 and version 2:
 
 \begin{itemize}
 \item The \class{FeedParser} class was introduced, and the \class{Parser}
       class was implemented in terms of the \class{FeedParser}.  All parsing
-      there for is non-strict, and parsing will make a best effort never to
+      therefore is non-strict, and parsing will make a best effort never to
       raise an exception.  Problems found while parsing messages are stored in
       the message's \var{defect} attribute.
 
@@ -117,7 +147,7 @@
       \method{Generator.__call__()}, \method{Message.get_type()},
       \method{Message.get_main_type()}, \method{Message.get_subtype()}, and
       the \var{strict} argument to the \class{Parser} class.  These are
-      expected to be removed in email 3.1.
+      expected to be removed in future versions.
 
 \item Support for Pythons earlier than 2.3 has been removed.
 \end{itemize}
@@ -278,12 +308,12 @@
 \item The method \method{getpayloadastext()} was removed.  Similar
       functionality
       is supported by the \class{DecodedGenerator} class in the
-      \refmodule{email.Generator} module.
+      \refmodule{email.generator} module.
 
 \item The method \method{getbodyastext()} was removed.  You can get
       similar functionality by creating an iterator with
       \function{typed_subpart_iterator()} in the
-      \refmodule{email.Iterators} module.
+      \refmodule{email.iterators} module.
 \end{itemize}
 
 The \class{Parser} class has no differences in its public interface.
@@ -295,7 +325,7 @@
 in \rfc{1894}.}.
 
 The \class{Generator} class has no differences in its public
-interface.  There is a new class in the \refmodule{email.Generator}
+interface.  There is a new class in the \refmodule{email.generator}
 module though, called \class{DecodedGenerator} which provides most of
 the functionality previously available in the
 \method{Message.getpayloadastext()} method.
@@ -329,11 +359,11 @@
 
 \module{mimelib} provided some utility functions in its
 \module{address} and \module{date} modules.  All of these functions
-have been moved to the \refmodule{email.Utils} module.
+have been moved to the \refmodule{email.utils} module.
 
 The \code{MsgReader} class/module has been removed.  Its functionality
 is most closely supported in the \function{body_line_iterator()}
-function in the \refmodule{email.Iterators} module.
+function in the \refmodule{email.iterators} module.
 
 \subsection{Examples}
 
diff --git a/Doc/lib/emailcharsets.tex b/Doc/lib/emailcharsets.tex
index 18f2a01..e0be68a 100644
--- a/Doc/lib/emailcharsets.tex
+++ b/Doc/lib/emailcharsets.tex
@@ -1,4 +1,4 @@
-\declaremodule{standard}{email.Charset}
+\declaremodule{standard}{email.charset}
 \modulesynopsis{Character Sets}
 
 This module provides a class \class{Charset} for representing
@@ -7,6 +7,8 @@
 manipulating this registry.  Instances of \class{Charset} are used in
 several other modules within the \module{email} package.
 
+Import this class from the \module{email.charset} module.
+
 \versionadded{2.2.2}
 
 \begin{classdesc}{Charset}{\optional{input_charset}}
@@ -153,7 +155,7 @@
 for multibyte character sets, which have line length issues (multibyte
 characters must be split on a character, not a byte boundary); use the
 higher-level \class{Header} class to deal with these issues (see
-\refmodule{email.Header}).  \var{convert} defaults to \code{False}.
+\refmodule{email.header}).  \var{convert} defaults to \code{False}.
 
 The type of encoding (base64 or quoted-printable) will be based on
 the \var{header_encoding} attribute.
@@ -188,7 +190,7 @@
 This method allows you to compare two \class{Charset} instances for inequality.
 \end{methoddesc}
 
-The \module{email.Charset} module also provides the following
+The \module{email.charset} module also provides the following
 functions for adding new entries to the global character set, alias,
 and codec registries:
 
diff --git a/Doc/lib/emailencoders.tex b/Doc/lib/emailencoders.tex
index a49e04d..3d05c2a 100644
--- a/Doc/lib/emailencoders.tex
+++ b/Doc/lib/emailencoders.tex
@@ -1,4 +1,4 @@
-\declaremodule{standard}{email.Encoders}
+\declaremodule{standard}{email.encoders}
 \modulesynopsis{Encoders for email message payloads.}
 
 When creating \class{Message} objects from scratch, you often need to
@@ -7,7 +7,7 @@
 type messages containing binary data.
 
 The \module{email} package provides some convenient encodings in its
-\module{Encoders} module.  These encoders are actually used by the
+\module{encoders} module.  These encoders are actually used by the
 \class{MIMEAudio} and \class{MIMEImage} class constructors to provide default
 encodings.  All encoder functions take exactly one argument, the message
 object to encode.  They usually extract the payload, encode it, and reset the
diff --git a/Doc/lib/emailexc.tex b/Doc/lib/emailexc.tex
index 6ac0889..3cef1d5 100644
--- a/Doc/lib/emailexc.tex
+++ b/Doc/lib/emailexc.tex
@@ -1,8 +1,8 @@
-\declaremodule{standard}{email.Errors}
+\declaremodule{standard}{email.errors}
 \modulesynopsis{The exception classes used by the email package.}
 
 The following exception classes are defined in the
-\module{email.Errors} module:
+\module{email.errors} module:
 
 \begin{excclassdesc}{MessageError}{}
 This is the base class for all exceptions that the \module{email}
@@ -59,7 +59,7 @@
 \mimetype{multipart/alternative} had a malformed header, that nested message
 object would have a defect, but the containing messages would not.
 
-All defect classes are subclassed from \class{email.Errors.MessageDefect}, but
+All defect classes are subclassed from \class{email.errors.MessageDefect}, but
 this class is \emph{not} an exception!
 
 \versionadded[All the defect classes were added]{2.4}
diff --git a/Doc/lib/emailgenerator.tex b/Doc/lib/emailgenerator.tex
index 330abc0..3415442 100644
--- a/Doc/lib/emailgenerator.tex
+++ b/Doc/lib/emailgenerator.tex
@@ -1,4 +1,4 @@
-\declaremodule{standard}{email.Generator}
+\declaremodule{standard}{email.generator}
 \modulesynopsis{Generate flat text email messages from a message structure.}
 
 One of the most common tasks is to generate the flat text of the email
@@ -8,7 +8,7 @@
 console.  Taking a message object structure and producing a flat text
 document is the job of the \class{Generator} class.
 
-Again, as with the \refmodule{email.Parser} module, you aren't limited
+Again, as with the \refmodule{email.parser} module, you aren't limited
 to the functionality of the bundled generator; you could write one
 from scratch yourself.  However the bundled generator knows how to
 generate most email in a standards-compliant way, should handle MIME
@@ -17,7 +17,8 @@
 \class{Parser} class, and back to flat text, is idempotent (the input
 is identical to the output).
 
-Here are the public methods of the \class{Generator} class:
+Here are the public methods of the \class{Generator} class, imported from the
+\module{email.generator} module:
 
 \begin{classdesc}{Generator}{outfp\optional{, mangle_from_\optional{,
     maxheaderlen}}}
@@ -40,7 +41,7 @@
 Optional \var{maxheaderlen} specifies the longest length for a
 non-continued header.  When a header line is longer than
 \var{maxheaderlen} (in characters, with tabs expanded to 8 spaces),
-the header will be split as defined in the \module{email.Header}
+the header will be split as defined in the \module{email.header.Header}
 class.  Set to zero to disable header wrapping.  The default is 78, as
 recommended (but not required) by \rfc{2822}.
 \end{classdesc}
@@ -81,9 +82,9 @@
 As a convenience, see the methods \method{Message.as_string()} and
 \code{str(aMessage)}, a.k.a. \method{Message.__str__()}, which
 simplify the generation of a formatted string representation of a
-message object.  For more detail, see \refmodule{email.Message}.
+message object.  For more detail, see \refmodule{email.message}.
 
-The \module{email.Generator} module also provides a derived class,
+The \module{email.generator} module also provides a derived class,
 called \class{DecodedGenerator} which is like the \class{Generator}
 base class, except that non-\mimetype{text} parts are substituted with
 a format string representing the part.
@@ -128,13 +129,5 @@
 \versionadded{2.2.2}
 \end{classdesc}
 
-\subsubsection{Deprecated methods}
-
-The following methods are deprecated in \module{email} version 2.
-They are documented here for completeness.
-
-\begin{methoddesc}[Generator]{__call__}{msg\optional{, unixfrom}}
-This method is identical to the \method{flatten()} method.
-
-\deprecated{2.2.2}{Use the \method{flatten()} method instead.}
-\end{methoddesc}
+\versionchanged[The previously deprecated method \method{__call__()} was
+removed]{2.5}
diff --git a/Doc/lib/emailheaders.tex b/Doc/lib/emailheaders.tex
index 2795644b..524d08c 100644
--- a/Doc/lib/emailheaders.tex
+++ b/Doc/lib/emailheaders.tex
@@ -1,4 +1,4 @@
-\declaremodule{standard}{email.Header}
+\declaremodule{standard}{email.header}
 \modulesynopsis{Representing non-ASCII headers}
 
 \rfc{2822} is the base standard that describes the format of email
@@ -15,17 +15,18 @@
 containing non-\ASCII{} characters into \rfc{2822}-compliant format.
 These RFCs include \rfc{2045}, \rfc{2046}, \rfc{2047}, and \rfc{2231}.
 The \module{email} package supports these standards in its
-\module{email.Header} and \module{email.Charset} modules.
+\module{email.header} and \module{email.charset} modules.
 
 If you want to include non-\ASCII{} characters in your email headers,
 say in the \mailheader{Subject} or \mailheader{To} fields, you should
 use the \class{Header} class and assign the field in the
 \class{Message} object to an instance of \class{Header} instead of
-using a string for the header value.  For example:
+using a string for the header value.  Import the \class{Header} class from the
+\module{email.header} module.  For example:
 
 \begin{verbatim}
->>> from email.Message import Message
->>> from email.Header import Header
+>>> from email.message import Message
+>>> from email.header import Header
 >>> msg = Message()
 >>> h = Header('p\xf6stal', 'iso-8859-1')
 >>> msg['Subject'] = h
@@ -87,7 +88,7 @@
 Append the string \var{s} to the MIME header.
 
 Optional \var{charset}, if given, should be a \class{Charset} instance
-(see \refmodule{email.Charset}) or the name of a character set, which
+(see \refmodule{email.charset}) or the name of a character set, which
 will be converted to a \class{Charset} instance.  A value of
 \code{None} (the default) means that the \var{charset} given in the
 constructor is used.
@@ -139,7 +140,7 @@
 This method allows you to compare two \class{Header} instances for inequality.
 \end{methoddesc}
 
-The \module{email.Header} module also provides the following
+The \module{email.header} module also provides the following
 convenient functions.
 
 \begin{funcdesc}{decode_header}{header}
@@ -155,7 +156,7 @@
 Here's an example:
 
 \begin{verbatim}
->>> from email.Header import decode_header
+>>> from email.header import decode_header
 >>> decode_header('=?iso-8859-1?q?p=F6stal?=')
 [('p\xf6stal', 'iso-8859-1')]
 \end{verbatim}
diff --git a/Doc/lib/emailiter.tex b/Doc/lib/emailiter.tex
index d1a8f98..ef8ef6f 100644
--- a/Doc/lib/emailiter.tex
+++ b/Doc/lib/emailiter.tex
@@ -1,8 +1,8 @@
-\declaremodule{standard}{email.Iterators}
+\declaremodule{standard}{email.iterators}
 \modulesynopsis{Iterate over a  message object tree.}
 
 Iterating over a message object tree is fairly easy with the
-\method{Message.walk()} method.  The \module{email.Iterators} module
+\method{Message.walk()} method.  The \module{email.iterators} module
 provides some useful higher level iterations over message object
 trees.
 
diff --git a/Doc/lib/emailmessage.tex b/Doc/lib/emailmessage.tex
index 9b41852..7bd7dd8 100644
--- a/Doc/lib/emailmessage.tex
+++ b/Doc/lib/emailmessage.tex
@@ -1,10 +1,11 @@
-\declaremodule{standard}{email.Message}
+\declaremodule{standard}{email.message}
 \modulesynopsis{The base class representing email messages.}
 
 The central class in the \module{email} package is the
-\class{Message} class; it is the base class for the \module{email}
-object model.  \class{Message} provides the core functionality for
-setting and querying header fields, and for accessing message bodies.
+\class{Message} class, imported from the \module{email.message} module.  It is
+the base class for the \module{email} object model.  \class{Message} provides
+the core functionality for setting and querying header fields, and for
+accessing message bodies.
 
 Conceptually, a \class{Message} object consists of \emph{headers} and
 \emph{payloads}.  Headers are \rfc{2822} style field names and
@@ -45,7 +46,7 @@
 
 \begin{verbatim}
 from cStringIO import StringIO
-from email.Generator import Generator
+from email.generator import Generator
 fp = StringIO()
 g = Generator(fp, mangle_from_=False, maxheaderlen=60)
 g.flatten(msg)
@@ -119,7 +120,7 @@
 
 \begin{methoddesc}[Message]{set_charset}{charset}
 Set the character set of the payload to \var{charset}, which can
-either be a \class{Charset} instance (see \refmodule{email.Charset}), a
+either be a \class{Charset} instance (see \refmodule{email.charset}), a
 string naming a character set,
 or \code{None}.  If it is a string, it will be converted to a
 \class{Charset} instance.  If \var{charset} is \code{None}, the
@@ -128,8 +129,8 @@
 \exception{TypeError}.
 
 The message will be assumed to be of type \mimetype{text/*} encoded with
-\code{charset.input_charset}.  It will be converted to
-\code{charset.output_charset}
+\var{charset.input_charset}.  It will be converted to
+\var{charset.output_charset}
 and encoded properly, if needed, when generating the plain text
 representation of the message.  MIME headers
 (\mailheader{MIME-Version}, \mailheader{Content-Type},
@@ -513,6 +514,9 @@
 \end{verbatim}
 \end{methoddesc}
 
+\versionchanged[The previously deprecated methods \method{get_type()},
+\method{get_main_type()}, and \method{get_subtype()} were removed]{2.5}
+
 \class{Message} objects can also optionally contain two instance
 attributes, which can be used when generating the plain text of a MIME
 message.
@@ -532,7 +536,7 @@
 is writing out the plain text representation of a MIME message, and it
 finds the message has a \var{preamble} attribute, it will write this
 text in the area between the headers and the first boundary.  See
-\refmodule{email.Parser} and \refmodule{email.Generator} for details.
+\refmodule{email.parser} and \refmodule{email.generator} for details.
 
 Note that if the message object has no preamble, the
 \var{preamble} attribute will be \code{None}.
@@ -543,58 +547,15 @@
 attribute, except that it contains text that appears between the last
 boundary and the end of the message.
 
-One note: when generating the flat text for a \mimetype{multipart}
-message that has no \var{epilogue} (using the standard
-\class{Generator} class), no newline is added after the closing
-boundary line.  If the message object has an \var{epilogue} and its
-value does not start with a newline, a newline is printed after the
-closing boundary.  This seems a little clumsy, but it makes the most
-practical sense.  The upshot is that if you want to ensure that a
-newline get printed after your closing \mimetype{multipart} boundary,
-set the \var{epilogue} to the empty string.
+\versionchanged[You do not need to set the epilogue to the empty string in
+order for the \class{Generator} to print a newline at the end of the
+file]{2.5}
 \end{datadesc}
 
 \begin{datadesc}{defects}
 The \var{defects} attribute contains a list of all the problems found when
-parsing this message.  See \refmodule{email.Errors} for a detailed description
+parsing this message.  See \refmodule{email.errors} for a detailed description
 of the possible parsing defects.
 
 \versionadded{2.4}
 \end{datadesc}
-
-\subsubsection{Deprecated methods}
-
-\versionchanged[The \method{add_payload()} method was removed; use the
-\method{attach()} method instead]{2.4}
-
-The following methods are deprecated.  They are documented here for
-completeness.
-
-\begin{methoddesc}[Message]{get_type}{\optional{failobj}}
-Return the message's content type, as a string of the form
-\mimetype{maintype/subtype} as taken from the
-\mailheader{Content-Type} header.
-The returned string is coerced to lowercase.
-
-If there is no \mailheader{Content-Type} header in the message,
-\var{failobj} is returned (defaults to \code{None}).
-
-\deprecated{2.2.2}{Use the \method{get_content_type()} method instead.}
-\end{methoddesc}
-
-\begin{methoddesc}[Message]{get_main_type}{\optional{failobj}}
-Return the message's \emph{main} content type.  This essentially returns the
-\var{maintype} part of the string returned by \method{get_type()}, with the
-same semantics for \var{failobj}.
-
-\deprecated{2.2.2}{Use the \method{get_content_maintype()} method instead.}
-\end{methoddesc}
-
-\begin{methoddesc}[Message]{get_subtype}{\optional{failobj}}
-Return the message's sub-content type.  This essentially returns the
-\var{subtype} part of the string returned by \method{get_type()}, with the
-same semantics for \var{failobj}.
-
-\deprecated{2.2.2}{Use the \method{get_content_subtype()} method instead.}
-\end{methoddesc}
-
diff --git a/Doc/lib/emailmimebase.tex b/Doc/lib/emailmimebase.tex
index 070c9a2..4735be3 100644
--- a/Doc/lib/emailmimebase.tex
+++ b/Doc/lib/emailmimebase.tex
@@ -1,3 +1,11 @@
+\declaremodule{standard}{email.mime}
+\declaremodule{standard}{email.mime.base}
+\declaremodule{standard}{email.mime.nonmultipart}
+\declaremodule{standard}{email.mime.multipart}
+\declaremodule{standard}{email.mime.audio}
+\declaremodule{standard}{email.mime.image}
+\declaremodule{standard}{email.mime.message}
+\declaremodule{standard}{email.mime.text}
 Ordinarily, you get a message object structure by passing a file or
 some text to a parser, which parses the text and returns the root
 message object.  However you can also build a complete message
@@ -6,26 +14,16 @@
 \class{Message} objects, move them around, etc.  This makes a very
 convenient interface for slicing-and-dicing MIME messages.
 
-You can create a new object structure by creating \class{Message}
-instances, adding attachments and all the appropriate headers manually.
-For MIME messages though, the \module{email} package provides some
-convenient subclasses to make things easier.  Each of these classes
-should be imported from a module with the same name as the class, from
-within the \module{email} package.  E.g.:
-
-\begin{verbatim}
-import email.MIMEImage.MIMEImage
-\end{verbatim}
-
-or
-
-\begin{verbatim}
-from email.MIMEText import MIMEText
-\end{verbatim}
+You can create a new object structure by creating \class{Message} instances,
+adding attachments and all the appropriate headers manually.  For MIME
+messages though, the \module{email} package provides some convenient
+subclasses to make things easier.
 
 Here are the classes:
 
 \begin{classdesc}{MIMEBase}{_maintype, _subtype, **_params}
+Module: \module{email.mime.base}
+
 This is the base class for all the MIME-specific subclasses of
 \class{Message}.  Ordinarily you won't create instances specifically
 of \class{MIMEBase}, although you could.  \class{MIMEBase} is provided
@@ -45,6 +43,8 @@
 \end{classdesc}
 
 \begin{classdesc}{MIMENonMultipart}{}
+Module: \module{email.mime.nonmultipart}
+
 A subclass of \class{MIMEBase}, this is an intermediate base class for
 MIME messages that are not \mimetype{multipart}.  The primary purpose
 of this class is to prevent the use of the \method{attach()} method,
@@ -57,6 +57,7 @@
 
 \begin{classdesc}{MIMEMultipart}{\optional{subtype\optional{,
     boundary\optional{, _subparts\optional{, _params}}}}}
+Module: \module{email.mime.multipart}
 
 A subclass of \class{MIMEBase}, this is an intermediate base class for
 MIME messages that are \mimetype{multipart}.  Optional \var{_subtype}
@@ -80,8 +81,31 @@
 \versionadded{2.2.2}
 \end{classdesc}
 
+\begin{classdesc}{MIMEApplication}{_data\optional{, _subtype\optional{,
+    _encoder\optional{, **_params}}}}
+Module: \module{email.mime.application}
+
+A subclass of \class{MIMENonMultipart}, the \class{MIMEApplication} class is
+used to represent MIME message objects of major type \mimetype{application}.
+\var{_data} is a string containing the raw byte data.  Optional \var{_subtype}
+specifies the MIME subtype and defaults to \mimetype{octet-stream}.  
+
+Optional \var{_encoder} is a callable (i.e. function) which will
+perform the actual encoding of the data for transport.  This
+callable takes one argument, which is the \class{MIMEApplication} instance.
+It should use \method{get_payload()} and \method{set_payload()} to
+change the payload to encoded form.  It should also add any
+\mailheader{Content-Transfer-Encoding} or other headers to the message
+object as necessary.  The default encoding is base64.  See the
+\refmodule{email.encoders} module for a list of the built-in encoders.
+
+\var{_params} are passed straight through to the base class constructor.
+\versionadded{2.5}
+\end{classdesc}
+
 \begin{classdesc}{MIMEAudio}{_audiodata\optional{, _subtype\optional{,
     _encoder\optional{, **_params}}}}
+Module: \module{email.mime.audio}
 
 A subclass of \class{MIMENonMultipart}, the \class{MIMEAudio} class
 is used to create MIME message objects of major type \mimetype{audio}.
@@ -100,13 +124,14 @@
 change the payload to encoded form.  It should also add any
 \mailheader{Content-Transfer-Encoding} or other headers to the message
 object as necessary.  The default encoding is base64.  See the
-\refmodule{email.Encoders} module for a list of the built-in encoders.
+\refmodule{email.encoders} module for a list of the built-in encoders.
 
 \var{_params} are passed straight through to the base class constructor.
 \end{classdesc}
 
 \begin{classdesc}{MIMEImage}{_imagedata\optional{, _subtype\optional{,
     _encoder\optional{, **_params}}}}
+Module: \module{email.mime.image}
 
 A subclass of \class{MIMENonMultipart}, the \class{MIMEImage} class is
 used to create MIME message objects of major type \mimetype{image}.
@@ -125,13 +150,15 @@
 change the payload to encoded form.  It should also add any
 \mailheader{Content-Transfer-Encoding} or other headers to the message
 object as necessary.  The default encoding is base64.  See the
-\refmodule{email.Encoders} module for a list of the built-in encoders.
+\refmodule{email.encoders} module for a list of the built-in encoders.
 
 \var{_params} are passed straight through to the \class{MIMEBase}
 constructor.
 \end{classdesc}
 
 \begin{classdesc}{MIMEMessage}{_msg\optional{, _subtype}}
+Module: \module{email.mime.message}
+
 A subclass of \class{MIMENonMultipart}, the \class{MIMEMessage} class
 is used to create MIME objects of main type \mimetype{message}.
 \var{_msg} is used as the payload, and must be an instance of class
@@ -143,6 +170,8 @@
 \end{classdesc}
 
 \begin{classdesc}{MIMEText}{_text\optional{, _subtype\optional{, _charset}}}
+Module: \module{email.mime.text}
+
 A subclass of \class{MIMENonMultipart}, the \class{MIMEText} class is
 used to create MIME objects of major type \mimetype{text}.
 \var{_text} is the string for the payload.  \var{_subtype} is the
diff --git a/Doc/lib/emailparser.tex b/Doc/lib/emailparser.tex
index 5fac92f..609fa40 100644
--- a/Doc/lib/emailparser.tex
+++ b/Doc/lib/emailparser.tex
@@ -1,4 +1,4 @@
-\declaremodule{standard}{email.Parser}
+\declaremodule{standard}{email.parser}
 \modulesynopsis{Parse flat text email messages to produce a message
 	        object structure.}
 
@@ -41,9 +41,10 @@
 
 \versionadded{2.4}
 
-The \class{FeedParser} provides an API that is conducive to incremental
-parsing of email messages, such as would be necessary when reading the text of
-an email message from a source that can block (e.g. a socket).  The
+The \class{FeedParser}, imported from the \module{email.feedparser} module,
+provides an API that is conducive to incremental parsing of email messages,
+such as would be necessary when reading the text of an email message from a
+source that can block (e.g. a socket).  The
 \class{FeedParser} can of course be used to parse an email message fully
 contained in a string or a file, but the classic \class{Parser} API may be
 more convenient for such use cases.  The semantics and results of the two
@@ -56,14 +57,14 @@
 job of parsing non-compliant messages, providing information about how a
 message was deemed broken.  It will populate a message object's \var{defects}
 attribute with a list of any problems it found in a message.  See the
-\refmodule{email.Errors} module for the list of defects that it can find.
+\refmodule{email.errors} module for the list of defects that it can find.
 
 Here is the API for the \class{FeedParser}:
 
 \begin{classdesc}{FeedParser}{\optional{_factory}}
 Create a \class{FeedParser} instance.  Optional \var{_factory} is a
 no-argument callable that will be called whenever a new message object is
-needed.  It defaults to the \class{email.Message.Message} class.
+needed.  It defaults to the \class{email.message.Message} class.
 \end{classdesc}
 
 \begin{methoddesc}[FeedParser]{feed}{data}
@@ -82,21 +83,22 @@
 
 \subsubsection{Parser class API}
 
-The \class{Parser} provides an API that can be used to parse a message when
-the complete contents of the message are available in a string or file.  The
-\module{email.Parser} module also provides a second class, called
+The \class{Parser} class, imported from the \module{email.parser} module,
+provides an API that can be used to parse a message when the complete contents
+of the message are available in a string or file.  The
+\module{email.parser} module also provides a second class, called
 \class{HeaderParser} which can be used if you're only interested in
 the headers of the message. \class{HeaderParser} can be much faster in
 these situations, since it does not attempt to parse the message body,
 instead setting the payload to the raw body as a string.
 \class{HeaderParser} has the same API as the \class{Parser} class.
 
-\begin{classdesc}{Parser}{\optional{_class\optional{, strict}}}
+\begin{classdesc}{Parser}{\optional{_class}}
 The constructor for the \class{Parser} class takes an optional
 argument \var{_class}.  This must be a callable factory (such as a
 function or a class), and it is used whenever a sub-message object
 needs to be created.  It defaults to \class{Message} (see
-\refmodule{email.Message}).  The factory will be called without
+\refmodule{email.message}).  The factory will be called without
 arguments.
 
 The optional \var{strict} flag is ignored.  \deprecated{2.4}{Because the
@@ -201,6 +203,6 @@
       \method{is_multipart()} method may return \code{False}.  If such
       messages were parsed with the \class{FeedParser}, they will have an
       instance of the \class{MultipartInvariantViolationDefect} class in their
-      \var{defects} attribute list.  See \refmodule{email.Errors} for
+      \var{defects} attribute list.  See \refmodule{email.errors} for
       details.
 \end{itemize}
diff --git a/Doc/lib/emailutil.tex b/Doc/lib/emailutil.tex
index 491a2b9..fe96473 100644
--- a/Doc/lib/emailutil.tex
+++ b/Doc/lib/emailutil.tex
@@ -1,7 +1,7 @@
-\declaremodule{standard}{email.Utils}
+\declaremodule{standard}{email.utils}
 \modulesynopsis{Miscellaneous email package utilities.}
 
-There are several useful utilities provided in the \module{email.Utils}
+There are several useful utilities provided in the \module{email.utils}
 module:
 
 \begin{funcdesc}{quote}{str}
@@ -38,7 +38,7 @@
 simple example that gets all the recipients of a message:
 
 \begin{verbatim}
-from email.Utils import getaddresses
+from email.utils import getaddresses
 
 tos = msg.get_all('to', [])
 ccs = msg.get_all('cc', [])
diff --git a/Doc/lib/lib.tex b/Doc/lib/lib.tex
index fad8fe7..eac35de 100644
--- a/Doc/lib/lib.tex
+++ b/Doc/lib/lib.tex
@@ -87,7 +87,6 @@
 \input{libstrings}              % String Services
 \input{libstring}
 \input{libre}
-\input{libreconvert}
 \input{libstruct}   % XXX also/better in File Formats?
 \input{libdifflib}
 \input{libstringio}
@@ -372,6 +371,7 @@
 \input{libbltin}                % really __builtin__
 \input{libmain}                 % really __main__
 \input{libwarnings}
+\input{libcontextlib}
 \input{libatexit}
 \input{libtraceback}
 \input{libfuture}               % really __future__
@@ -395,6 +395,7 @@
 \input{libzipimport}
 \input{libpkgutil}
 \input{libmodulefinder}
+\input{librunpy}
 
 
 % =============
@@ -454,8 +455,6 @@
 %\input{libcmpcache}
 %\input{libcmp}
 %\input{libni}
-%\input{libregex}
-%\input{libregsub}
 
 \chapter{Reporting Bugs}
 \input{reportingbugs}
diff --git a/Doc/lib/libarray.tex b/Doc/lib/libarray.tex
index 897310d..eaf5888 100644
--- a/Doc/lib/libarray.tex
+++ b/Doc/lib/libarray.tex
@@ -139,8 +139,8 @@
 \end{methoddesc}
 
 \begin{methoddesc}[array]{fromunicode}{s}
-Extends this array with data from the given unicode string.
-The array must be a type 'u' array; otherwise a ValueError
+Extends this array with data from the given unicode string.  The array
+must be a type \code{'u'} array; otherwise a \exception{ValueError}
 is raised.  Use \samp{array.fromstring(ustr.decode(enc))} to
 append Unicode data to an array of some other type.
 \end{methoddesc}
@@ -197,8 +197,8 @@
 
 \begin{methoddesc}[array]{tounicode}{}
 Convert the array to a unicode string.  The array must be
-a type 'u' array; otherwise a ValueError is raised.  Use
-array.tostring().decode(enc) to obtain a unicode string
+a type \code{'u'} array; otherwise a \exception{ValueError} is raised.
+Use \samp{array.tostring().decode(enc)} to obtain a unicode string
 from an array of some other type.
 \end{methoddesc}
 
diff --git a/Doc/lib/libast.tex b/Doc/lib/libast.tex
index b3c3148..b2956ae 100644
--- a/Doc/lib/libast.tex
+++ b/Doc/lib/libast.tex
@@ -47,11 +47,11 @@
 can have zero-or-more values (marked with an asterisk), the
 values are represented as Python lists.
 
-\subsection{Abstract Grammar}
+\section{Abstract Grammar}
 
 The module defines a string constant \code{__version__} which
 is the decimal subversion revision number of the file shown below.
 
 The abstract grammar is currently defined as follows:
 
-\verbatiminput{../../Parser/Python.asdl}
\ No newline at end of file
+\verbatiminput{../../Parser/Python.asdl}
diff --git a/Doc/lib/libaudioop.tex b/Doc/lib/libaudioop.tex
index 76bcdbf..52c6f3d 100644
--- a/Doc/lib/libaudioop.tex
+++ b/Doc/lib/libaudioop.tex
@@ -12,9 +12,10 @@
 modules.  All scalar items are integers, unless specified otherwise.
 
 % This para is mostly here to provide an excuse for the index entries...
-This module provides support for u-LAW and Intel/DVI ADPCM encodings.
+This module provides support for a-LAW, u-LAW and Intel/DVI ADPCM encodings.
 \index{Intel/DVI ADPCM}
 \index{ADPCM, Intel/DVI}
+\index{a-LAW}
 \index{u-LAW}
 
 A few of the more complicated operations only take 16-bit samples,
@@ -42,6 +43,13 @@
 has the width specified in \var{width}.
 \end{funcdesc}
 
+\begin{funcdesc}{alaw2lin}{fragment, width}
+Convert sound fragments in a-LAW encoding to linearly encoded sound
+fragments.  a-LAW encoding always uses 8 bits samples, so \var{width}
+refers only to the sample width of the output fragment here.
+\versionadded{2.5}
+\end{funcdesc}
+
 \begin{funcdesc}{avg}{fragment, width}
 Return the average over all samples in the fragment.
 \end{funcdesc}
@@ -98,10 +106,6 @@
 Return the value of sample \var{index} from the fragment.
 \end{funcdesc}
 
-\begin{funcdesc}{lin2lin}{fragment, width, newwidth}
-Convert samples between 1-, 2- and 4-byte formats.
-\end{funcdesc}
-
 \begin{funcdesc}{lin2adpcm}{fragment, width, state}
 Convert samples to 4 bit Intel/DVI ADPCM encoding.  ADPCM coding is an
 adaptive coding scheme, whereby each 4 bit number is the difference
@@ -117,6 +121,18 @@
 packed 2 4-bit values per byte.
 \end{funcdesc}
 
+\begin{funcdesc}{lin2alaw}{fragment, width}
+Convert samples in the audio fragment to a-LAW encoding and return
+this as a Python string.  a-LAW is an audio encoding format whereby
+you get a dynamic range of about 13 bits using only 8 bit samples.  It
+is used by the Sun audio hardware, among others.
+\versionadded{2.5}
+\end{funcdesc}
+
+\begin{funcdesc}{lin2lin}{fragment, width, newwidth}
+Convert samples between 1-, 2- and 4-byte formats.
+\end{funcdesc}
+
 \begin{funcdesc}{lin2ulaw}{fragment, width}
 Convert samples in the audio fragment to u-LAW encoding and return
 this as a Python string.  u-LAW is an audio encoding format whereby
diff --git a/Doc/lib/libbsddb.tex b/Doc/lib/libbsddb.tex
index fa7bb4b..a5cda6d 100644
--- a/Doc/lib/libbsddb.tex
+++ b/Doc/lib/libbsddb.tex
@@ -15,9 +15,8 @@
 serialize them somehow, typically using \function{marshal.dumps()} or 
 \function{pickle.dumps}.
 
-Starting with Python 2.3 the \module{bsddb} module requires the
-Berkeley DB library version 3.2 or later (it is known to work with 3.2
-through 4.3 at the time of this writing).
+The \module{bsddb} module requires a Berkeley DB library version from
+3.3 thru 4.4.
 
 \begin{seealso}
   \seeurl{http://pybsddb.sourceforge.net/}{Website with documentation
diff --git a/Doc/lib/libcalendar.tex b/Doc/lib/libcalendar.tex
index bf3a7d6..acfd2da 100644
--- a/Doc/lib/libcalendar.tex
+++ b/Doc/lib/libcalendar.tex
@@ -15,12 +15,177 @@
 week to Sunday (6) or to any other weekday.  Parameters that specify
 dates are given as integers.
 
-Most of these functions rely on the \module{datetime} module which
-uses an idealized calendar, the current Gregorian calendar indefinitely
-extended in both directions.  This matches the definition of the
-"proleptic Gregorian" calendar in Dershowitz and Reingold's book
-"Calendrical Calculations", where it's the base calendar for all
-computations.
+Most of these functions and classses rely on the \module{datetime}
+module which uses an idealized calendar, the current Gregorian
+calendar indefinitely extended in both directions.  This matches
+the definition of the "proleptic Gregorian" calendar in Dershowitz
+and Reingold's book "Calendrical Calculations", where it's the
+base calendar for all computations.
+
+\begin{classdesc}{Calendar}{\optional{firstweekday}}
+Creates a \class{Calendar} object. \var{firstweekday} is an integer
+specifying the first day of the week. \code{0} is Monday (the default),
+\code{6} is Sunday.
+
+A \class{Calendar} object provides several methods that can
+be used for preparing the calendar data for formatting. This
+class doesn't do any formatting itself. This is the job of
+subclasses.
+\versionadded{2.5}
+\end{classdesc}
+
+\class{Calendar} instances have the following methods:
+
+\begin{methoddesc}{iterweekdays}{weekday}
+Return an iterator for the week day numbers that will be used
+for one week. The first number from the iterator will be the
+same as the number returned by \method{firstweekday()}.
+\end{methoddesc}
+
+\begin{methoddesc}{itermonthdates}{year, month}
+Return an iterator for the month \var{month} (1-12) in the
+year \var{year}. This iterator will return all days (as
+\class{datetime.date} objects) for the month and all days
+before the start of the month or after the end of the month
+that are required to get a complete week.
+\end{methoddesc}
+
+\begin{methoddesc}{itermonthdays2}{year, month}
+Return an iterator for the month \var{month} in the year
+\var{year} similar to \method{itermonthdates()}. Days returned
+will be tuples consisting of a day number and a week day
+number.
+\end{methoddesc}
+
+\begin{methoddesc}{itermonthdays}{year, month}
+Return an iterator for the month \var{month} in the year
+\var{year} similar to \method{itermonthdates()}. Days returned
+will simply be day numbers.
+\end{methoddesc}
+
+\begin{methoddesc}{monthdatescalendar}{year, month}
+Return a list of the weeks in the month \var{month} of
+the \var{year} as full weeks. Weeks are lists of seven
+\class{datetime.date} objects.
+\end{methoddesc}
+
+\begin{methoddesc}{monthdays2calendar}{year, month}
+Return a list of the weeks in the month \var{month} of
+the \var{year} as full weeks. Weeks are lists of seven
+tuples of day numbers and weekday numbers.
+\end{methoddesc}
+
+\begin{methoddesc}{monthdayscalendar}{year, month}
+Return a list of the weeks in the month \var{month} of
+the \var{year} as full weeks. Weeks are lists of seven
+day numbers.
+\end{methoddesc}
+
+\begin{methoddesc}{yeardatescalendar}{year, month\optional{, width}}
+Return the data for the specified year ready for formatting. The return
+value is a list of month rows. Each month row contains up to \var{width}
+months (defaulting to 3). Each month contains between 4 and 6 weeks and
+each week contains 1--7 days. Days are \class{datetime.date} objects.
+\end{methoddesc}
+
+\begin{methoddesc}{yeardays2calendar}{year, month\optional{, width}}
+Return the data for the specified year ready for formatting (similar to
+\method{yeardatescalendar()}). Entries in the week lists are tuples of
+day numbers and weekday numbers. Day numbers outside this month are zero.
+\end{methoddesc}
+
+\begin{methoddesc}{yeardayscalendar}{year, month\optional{, width}}
+Return the data for the specified year ready for formatting (similar to
+\method{yeardatescalendar()}). Entries in the week lists are day numbers.
+Day numbers outside this month are zero.
+\end{methoddesc}
+
+
+\begin{classdesc}{TextCalendar}{\optional{firstweekday}}
+This class can be used to generate plain text calendars.
+
+\versionadded{2.5}
+\end{classdesc}
+
+\class{TextCalendar} instances have the following methods:
+
+\begin{methoddesc}{formatmonth}{theyear, themonth\optional{, w\optional{, l}}}
+Return a month's calendar in a multi-line string. If \var{w} is
+provided, it specifies the width of the date columns, which are
+centered. If \var{l} is given, it specifies the number of lines that
+each week will use. Depends on the first weekday as set by
+\function{setfirstweekday()}.
+\end{methoddesc}
+
+\begin{methoddesc}{prmonth}{theyear, themonth\optional{, w\optional{, l}}}
+Print a month's calendar as returned by \method{formatmonth()}.
+\end{methoddesc}
+
+\begin{methoddesc}{formatyear}{theyear, themonth\optional{, w\optional{,
+                               l\optional{, c\optional{, m}}}}}
+Return a \var{m}-column calendar for an entire year as a multi-line string.
+Optional parameters \var{w}, \var{l}, and \var{c} are for date column
+width, lines per week, and number of spaces between month columns,
+respectively. Depends on the first weekday as set by
+\method{setfirstweekday()}.  The earliest year for which a calendar can
+be generated is platform-dependent.
+\end{methoddesc}
+
+\begin{methoddesc}{pryear}{theyear\optional{, w\optional{, l\optional{,
+                           c\optional{, m}}}}}
+Print the calendar for an entire year as returned by \method{formatyear()}.
+\end{methoddesc}
+
+
+\begin{classdesc}{HTMLCalendar}{\optional{firstweekday}}
+This class can be used to generate HTML calendars.
+
+\versionadded{2.5}
+\end{classdesc}
+
+\class{HTMLCalendar} instances have the following methods:
+
+\begin{methoddesc}{formatmonth}{theyear, themonth\optional{, withyear}}
+Return a month's calendar as an HTML table. If \var{withyear} is
+true the year will be included in the header, otherwise just the
+month name will be used.
+\end{methoddesc}
+
+\begin{methoddesc}{formatyear}{theyear, themonth\optional{, width}}
+Return a year's calendar as an HTML table. \var{width} (defaulting to 3)
+specifies the number of months per row.
+\end{methoddesc}
+
+\begin{methoddesc}{formatyearpage}{theyear, themonth\optional{,
+                                   width\optional{, css\optional{, encoding}}}}
+Return a year's calendar as a complete HTML page. \var{width}
+(defaulting to 3) specifies the number of months per row. \var{css}
+is the name for the cascading style sheet to be used. \constant{None}
+can be passed if no style sheet should be used. \var{encoding}
+specifies the encoding to be used for the output (defaulting
+to the system default encoding).
+\end{methoddesc}
+
+
+\begin{classdesc}{LocaleTextCalendar}{\optional{firstweekday\optional{, locale}}}
+This subclass of \class{TextCalendar} can be passed a locale name in the
+constructor and will return month and weekday names in the specified locale.
+If this locale includes an encoding all strings containing month and weekday
+names will be returned as unicode.
+\versionadded{2.5}
+\end{classdesc}
+
+
+\begin{classdesc}{LocaleHTMLCalendar}{\optional{firstweekday\optional{, locale}}}
+This subclass of \class{HTMLCalendar} can be passed a locale name in the
+constructor and will return month and weekday names in the specified locale.
+If this locale includes an encoding all strings containing month and weekday
+names will be returned as unicode.
+\versionadded{2.5}
+\end{classdesc}
+
+
+For simple text calendars this module provides the following functions.
 
 \begin{funcdesc}{setfirstweekday}{weekday}
 Sets the weekday (\code{0} is Monday, \code{6} is Sunday) to start
@@ -80,11 +245,8 @@
 \end{funcdesc}
 
 \begin{funcdesc}{month}{theyear, themonth\optional{, w\optional{, l}}}
-Returns a month's calendar in a multi-line string. If \var{w} is
-provided, it specifies the width of the date columns, which are
-centered. If \var{l} is given, it specifies the number of lines that
-each week will use. Depends on the first weekday as set by
-\function{setfirstweekday()}.
+Returns a month's calendar in a multi-line string using the
+\method{formatmonth} of the \class{TextCalendar} class.
 \versionadded{2.0}
 \end{funcdesc}
 
@@ -94,12 +256,8 @@
 \end{funcdesc}
 
 \begin{funcdesc}{calendar}{year\optional{, w\optional{, l\optional{c}}}}
-Returns a 3-column calendar for an entire year as a multi-line string.
-Optional parameters \var{w}, \var{l}, and \var{c} are for date column
-width, lines per week, and number of spaces between month columns,
-respectively. Depends on the first weekday as set by
-\function{setfirstweekday()}.  The earliest year for which a calendar can
-be generated is platform-dependent.
+Returns a 3-column calendar for an entire year as a multi-line string
+using the \method{formatyear} of the \class{TextCalendar} class.
 \versionadded{2.0}
 \end{funcdesc}
 
diff --git a/Doc/lib/libcgi.tex b/Doc/lib/libcgi.tex
index cd6f58a..1dd7e03 100644
--- a/Doc/lib/libcgi.tex
+++ b/Doc/lib/libcgi.tex
@@ -323,7 +323,7 @@
 
 The optional argument \var{strict_parsing} is a flag indicating what
 to do with parsing errors.  If false (the default), errors
-are silently ignored.  If true, errors raise a ValueError
+are silently ignored.  If true, errors raise a \exception{ValueError}
 exception.
 
 Use the \function{\refmodule{urllib}.urlencode()} function to convert
@@ -347,7 +347,7 @@
 
 The optional argument \var{strict_parsing} is a flag indicating what
 to do with parsing errors.  If false (the default), errors
-are silently ignored.  If true, errors raise a ValueError
+are silently ignored.  If true, errors raise a \exception{ValueError}
 exception.
 
 Use the \function{\refmodule{urllib}.urlencode()} function to convert
diff --git a/Doc/lib/libcodecs.tex b/Doc/lib/libcodecs.tex
index 1806ef0..8a2417e 100644
--- a/Doc/lib/libcodecs.tex
+++ b/Doc/lib/libcodecs.tex
@@ -112,6 +112,7 @@
 
 Raises a \exception{LookupError} in case the encoding cannot be found or the
 codec doesn't support an incremental encoder.
+\versionadded{2.5}
 \end{funcdesc}
 
 \begin{funcdesc}{getincrementaldecoder}{encoding}
@@ -120,6 +121,7 @@
 
 Raises a \exception{LookupError} in case the encoding cannot be found or the
 codec doesn't support an incremental decoder.
+\versionadded{2.5}
 \end{funcdesc}
 
 \begin{funcdesc}{getreader}{encoding}
@@ -150,7 +152,7 @@
 continue. The encoder will encode the replacement and continue encoding
 the original input at the specified position. Negative position values
 will be treated as being relative to the end of the input string. If the
-resulting position is out of bound an IndexError will be raised.
+resulting position is out of bound an \exception{IndexError} will be raised.
 
 Decoding and translating works similar, except \exception{UnicodeDecodeError}
 or \exception{UnicodeTranslateError} will be passed to the handler and
@@ -229,12 +231,14 @@
 Uses an incremental encoder to iteratively encode the input provided by
 \var{iterable}. This function is a generator. \var{errors} (as well as
 any other keyword argument) is passed through to the incremental encoder.
+\versionadded{2.5}
 \end{funcdesc}
 
 \begin{funcdesc}{iterdecode}{iterable, encoding\optional{, errors}}
 Uses an incremental decoder to iteratively decode the input provided by
 \var{iterable}. This function is a generator. \var{errors} (as well as
 any other keyword argument) is passed through to the incremental encoder.
+\versionadded{2.5}
 \end{funcdesc}
 
 The module also provides the following constants which are useful
@@ -355,6 +359,8 @@
 
 \subsubsection{IncrementalEncoder Objects \label{incremental-encoder-objects}}
 
+\versionadded{2.5}
+
 The \class{IncrementalEncoder} class is used for encoding an input in multiple
 steps. It defines the following methods which every incremental encoder must
 define in order to be compatible to the Python codec registry.
@@ -437,6 +443,10 @@
   Decodes \var{object} (taking the current state of the decoder into account)
   and returns the resulting decoded object. If this is the last call to
   \method{decode} \var{final} must be true (the default is false).
+  If \var{final} is true the decoder must decode the input completely and must
+  flush all buffers. If this isn't possible (e.g. because of incomplete byte
+  sequences at the end of the input) it must initiate error handling just like
+  in the stateless case (which might raise an exception).
 \end{methoddesc}
 
 \begin{methoddesc}{reset}{}
@@ -690,10 +700,10 @@
 The simplest method is to map the codepoints 0-255 to the bytes
 \code{0x0}-\code{0xff}. This means that a unicode object that contains 
 codepoints above \code{U+00FF} can't be encoded with this method (which 
-is called \code{'latin-1'} or \code{'iso-8859-1'}). unicode.encode() will 
-raise a UnicodeEncodeError that looks like this: \samp{UnicodeEncodeError:
-'latin-1' codec can't encode character u'\e u1234' in position 3: ordinal
-not in range(256)}.
+is called \code{'latin-1'} or \code{'iso-8859-1'}).
+\function{unicode.encode()} will raise a \exception{UnicodeEncodeError}
+that looks like this: \samp{UnicodeEncodeError: 'latin-1' codec can't
+encode character u'\e u1234' in position 3: ordinal not in range(256)}.
 
 There's another group of encodings (the so called charmap encodings)
 that choose a different subset of all unicode code points and how
@@ -1220,7 +1230,7 @@
 
 \lineiv{rot_13}
          {rot13}
-         {byte string}
+         {Unicode string}
          {Returns the Caesar-cypher encryption of the operand}
 
 \lineiv{string_escape}
diff --git a/Doc/lib/libcollections.tex b/Doc/lib/libcollections.tex
index 542ef6b..d9bfa39 100644
--- a/Doc/lib/libcollections.tex
+++ b/Doc/lib/libcollections.tex
@@ -10,9 +10,11 @@
 
 This module implements high-performance container datatypes.  Currently,
 there are two datatypes, deque and defaultdict.
-Future additions may include B-trees and Fibonacci heaps.
+Future additions may include balanced trees and ordered dictionaries.
 \versionchanged[Added defaultdict]{2.5}
 
+\subsection{\class{deque} objects \label{deque-objects}}
+
 \begin{funcdesc}{deque}{\optional{iterable}}
   Returns a new deque objected initialized left-to-right (using
   \method{append()}) with data from \var{iterable}.  If \var{iterable}
@@ -137,7 +139,7 @@
 deque(['c', 'b', 'a'])
 \end{verbatim}
 
-\subsection{Recipes \label{deque-recipes}}
+\subsubsection{Recipes \label{deque-recipes}}
 
 This section shows various approaches to working with deques.
 
@@ -215,6 +217,8 @@
 
 
 
+\subsection{\class{defaultdict} objects \label{defaultdict-objects}}
+
 \begin{funcdesc}{defaultdict}{\optional{default_factory\optional{, ...}}}
   Returns a new dictionary-like object.  \class{defaultdict} is a subclass
   of the builtin \class{dict} class.  It overrides one method and adds one
@@ -255,3 +259,79 @@
   from the first argument to the constructor, if present, or to \code{None}, 
   if absent.
 \end{datadesc}
+
+
+\subsubsection{\class{defaultdict} Examples \label{defaultdict-examples}}
+
+Using \class{list} as the \member{default_factory}, it is easy to group
+a sequence of key-value pairs into a dictionary of lists:
+
+\begin{verbatim}
+>>> s = [('yellow', 1), ('blue', 2), ('yellow', 3), ('blue', 4), ('red', 1)]
+>>> d = defaultdict(list)
+>>> for k, v in s:
+        d[k].append(v)
+
+>>> d.items()
+[('blue', [2, 4]), ('red', [1]), ('yellow', [1, 3])]
+\end{verbatim}
+
+When each key is encountered for the first time, it is not already in the
+mapping; so an entry is automatically created using the
+\member{default_factory} function which returns an empty \class{list}.  The
+\method{list.append()} operation then attaches the value to the new list.  When
+keys are encountered again, the look-up proceeds normally (returning the list
+for that key) and the \method{list.append()} operation adds another value to
+the list. This technique is simpler and faster than an equivalent technique
+using \method{dict.setdefault()}:
+
+\begin{verbatim}
+>>> d = {}
+>>> for k, v in s:
+	d.setdefault(k, []).append(v)
+
+>>> d.items()
+[('blue', [2, 4]), ('red', [1]), ('yellow', [1, 3])]
+\end{verbatim}
+
+Setting the \member{default_factory} to \class{int} makes the
+\class{defaultdict} useful for counting (like a bag or multiset in other
+languages):
+
+\begin{verbatim}
+>>> s = 'mississippi'
+>>> d = defaultdict(int)
+>>> for k in s:
+        d[k] += 1
+
+>>> d.items()
+[('i', 4), ('p', 2), ('s', 4), ('m', 1)]
+\end{verbatim}
+
+When a letter is first encountered, it is missing from the mapping, so the
+\member{default_factory} function calls \function{int()} to supply a default
+count of zero.  The increment operation then builds up the count for each
+letter. This technique makes counting simpler and faster than an equivalent
+technique using \method{dict.get()}:
+
+\begin{verbatim}
+>>> d = {}
+>>> for k in s:
+	d[k] = d.get(k, 0) + 1
+
+>>> d.items()
+[('i', 4), ('p', 2), ('s', 4), ('m', 1)]
+\end{verbatim}
+
+Setting the \member{default_factory} to \class{set} makes the
+\class{defaultdict} useful for building a dictionary of sets:
+
+\begin{verbatim}
+>>> s = [('red', 1), ('blue', 2), ('red', 3), ('blue', 4), ('red', 1), ('blue', 4)]
+>>> d = defaultdict(set)
+>>> for k, v in s:
+        d[k].add(v)
+
+>>> d.items()
+[('blue', set([2, 4])), ('red', set([1, 3]))]
+\end{verbatim}
diff --git a/Doc/lib/libcontextlib.tex b/Doc/lib/libcontextlib.tex
new file mode 100644
index 0000000..46f9cdd
--- /dev/null
+++ b/Doc/lib/libcontextlib.tex
@@ -0,0 +1,144 @@
+\section{\module{contextlib} ---
+         Utilities for \keyword{with}-statement contexts.}
+
+\declaremodule{standard}{contextlib}
+\modulesynopsis{Utilities for \keyword{with}-statement contexts.}
+
+\versionadded{2.5}
+
+This module provides utilities for common tasks involving the
+\keyword{with} statement.
+
+Functions provided:
+
+\begin{funcdesc}{contextmanager}{func}
+This function is a decorator that can be used to define context managers
+for use with the \keyword{with} statement, without needing to create a
+class or separate \method{__enter__()} and \method{__exit__()} methods.
+
+A simple example:
+
+\begin{verbatim}
+from __future__ import with_statement
+from contextlib import contextmanager
+
+@contextmanager
+def tag(name):
+    print "<%s>" % name
+    yield
+    print "</%s>" % name
+
+>>> with tag("h1"):
+...    print "foo"
+...
+<h1>
+foo
+</h1>
+\end{verbatim}
+
+When called, the decorated function must return a generator-iterator.
+This iterator must yield exactly one value, which will be bound to the
+targets in the \keyword{with} statement's \keyword{as} clause, if any.
+
+At the point where the generator yields, the block nested in the
+\keyword{with} statement is executed.  The generator is then resumed
+after the block is exited.  If an unhandled exception occurs in the
+block, it is reraised inside the generator at the point where the yield
+occurred.  Thus, you can use a
+\keyword{try}...\keyword{except}...\keyword{finally} statement to trap
+the error (if any), or ensure that some cleanup takes place.
+
+Note that you can use \code{@contextmanager} to define a context
+manager's \method{__context__} method.  This is usually more convenient
+than creating another class just to serve as a context.  For example:
+
+\begin{verbatim}
+from __future__ import with_statement
+from contextlib import contextmanager
+
+class Tag:
+    def __init__(self, name):
+        self.name = name
+        
+    @contextmanager
+    def __context__(self):
+        print "<%s>" % self.name
+        yield self
+        print "</%s>" % self.name
+        
+h1 = Tag("h1")
+
+>>> with h1 as me:
+...     print "hello from", me
+<h1>
+hello from <__main__.Tag instance at 0x402ce8ec>
+</h1>
+\end{verbatim}
+\end{funcdesc}
+
+\begin{funcdesc}{nested}{ctx1\optional{, ctx2\optional{, ...}}}
+Combine multiple context managers into a single nested context manager.
+
+Code like this:
+
+\begin{verbatim}
+from contextlib import nested
+
+with nested(A, B, C) as (X, Y, Z):
+    do_something()
+\end{verbatim}
+
+is equivalent to this:
+
+\begin{verbatim}
+with A as X:
+    with B as Y:
+        with C as Z:
+            do_something()
+\end{verbatim}
+
+Note that if one of the nested contexts' \method{__exit__()} method
+raises an exception, any previous exception state will be lost; the new
+exception will be passed to the outer contexts' \method{__exit__()}
+method(s), if any.  In general, \method{__exit__()} methods should avoid
+raising exceptions, and in particular they should not re-raise a
+passed-in exception.
+\end{funcdesc}
+
+\label{context-closing}
+\begin{funcdesc}{closing}{thing}
+Return a context manager that closes \var{thing} upon completion of the
+block.  This is basically equivalent to:
+
+\begin{verbatim}
+from contextlib import contextmanager
+
+@contextmanager
+def closing(thing):
+    try:
+        yield thing
+    finally:
+        thing.close()
+\end{verbatim}
+
+And lets you write code like this:
+\begin{verbatim}
+from __future__ import with_statement
+from contextlib import closing
+import codecs
+
+with closing(codecs.open("foo", encoding="utf8")) as f:
+    for line in f:
+        print line.encode("latin1")
+\end{verbatim}
+
+without needing to explicitly close \code{f}.  Even if an error occurs,
+\code{f.close()} will be called when the \keyword{with} block is exited.
+
+\end{funcdesc}
+
+\begin{seealso}
+  \seepep{0343}{The "with" statement}
+         {The specification, background, and examples for the
+          Python \keyword{with} statement.}
+\end{seealso}
diff --git a/Doc/lib/libcookielib.tex b/Doc/lib/libcookielib.tex
index a35f97d..ef2d833 100644
--- a/Doc/lib/libcookielib.tex
+++ b/Doc/lib/libcookielib.tex
@@ -249,7 +249,7 @@
     ignore_discard=\constant{False}, ignore_expires=\constant{False}}
 Save cookies to a file.
 
-This base class raises \class{NotImplementedError}.  Subclasses may
+This base class raises \exception{NotImplementedError}.  Subclasses may
 leave this method unimplemented.
 
 \var{filename} is the name of file in which to save cookies.  If
diff --git a/Doc/lib/libcsv.tex b/Doc/lib/libcsv.tex
index ba0df4f..65053c7 100644
--- a/Doc/lib/libcsv.tex
+++ b/Doc/lib/libcsv.tex
@@ -33,8 +33,9 @@
 \begin{notice}
   This version of the \module{csv} module doesn't support Unicode
   input.  Also, there are currently some issues regarding \ASCII{} NUL
-  characters.  Accordingly, all input should generally be printable
-  \ASCII{} to be safe.  These restrictions will be removed in the future.
+  characters.  Accordingly, all input should be UTF-8 or printable
+  \ASCII{} to be safe; see the examples in section~\ref{csv-examples}.
+  These restrictions will be removed in the future.
 \end{notice}
 
 \begin{seealso}
@@ -365,7 +366,7 @@
 
 
 
-\subsection{Examples}
+\subsection{Examples\label{csv-examples}}
 
 The simplest example of reading a CSV file:
 
@@ -426,37 +427,99 @@
 \end{verbatim}
 
 The \module{csv} module doesn't directly support reading and writing
-Unicode, but it is 8-bit clean save for some problems with \ASCII{} NUL
-characters, so you can write classes that handle the encoding and decoding
-for you as long as you avoid encodings like utf-16 that use NULs:
+Unicode, but it is 8-bit-clean save for some problems with \ASCII{} NUL
+characters.  So you can write functions or classes that handle the
+encoding and decoding for you as long as you avoid encodings like
+UTF-16 that use NULs.  UTF-8 is recommended.
+
+\function{unicode_csv_reader} below is a generator that wraps
+\class{csv.reader} to handle Unicode CSV data (a list of Unicode
+strings).  \function{utf_8_encoder} is a generator that encodes the
+Unicode strings as UTF-8, one string (or row) at a time.  The encoded
+strings are parsed by the CSV reader, and
+\function{unicode_csv_reader} decodes the UTF-8-encoded cells back
+into Unicode:
 
 \begin{verbatim}
 import csv
 
+def unicode_csv_reader(unicode_csv_data, dialect=csv.excel, **kwargs):
+    # csv.py doesn't do Unicode; encode temporarily as UTF-8:
+    csv_reader = csv.reader(utf_8_encoder(unicode_csv_data),
+                            dialect=dialect, **kwargs)
+    for row in csv_reader:
+        # decode UTF-8 back to Unicode, cell by cell:
+        yield [unicode(cell, 'utf-8') for cell in row]
+
+def utf_8_encoder(unicode_csv_data):
+    for line in unicode_csv_data:
+        yield line.encode('utf-8')
+\end{verbatim}
+
+For all other encodings the following \class{UnicodeReader} and
+\class{UnicodeWriter} classes can be used. They take an additional
+\var{encoding} parameter in their constructor and make sure that the data
+passes the real reader or writer encoded as UTF-8:
+
+\begin{verbatim}
+import csv, codecs, cStringIO
+
+class UTF8Recoder:
+    """
+    Iterator that reads an encoded stream and reencodes the input to UTF-8
+    """
+    def __init__(self, f, encoding):
+        self.reader = codecs.getreader(encoding)(f)
+
+    def __iter__(self):
+        return self
+
+    def next(self):
+        return self.reader.next().encode("utf-8")
+
 class UnicodeReader:
+    """
+    A CSV reader which will iterate over lines in the CSV file "f",
+    which is encoded in the given encoding.
+    """
+
     def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
+        f = UTF8Recoder(f, encoding)
         self.reader = csv.reader(f, dialect=dialect, **kwds)
-        self.encoding = encoding
 
     def next(self):
         row = self.reader.next()
-        return [unicode(s, self.encoding) for s in row]
+        return [unicode(s, "utf-8") for s in row]
 
     def __iter__(self):
         return self
 
 class UnicodeWriter:
+    """
+    A CSV writer which will write rows to CSV file "f",
+    which is encoded in the given encoding.
+    """
+
     def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
-        self.writer = csv.writer(f, dialect=dialect, **kwds)
-        self.encoding = encoding
+        # Redirect output to a queue
+        self.queue = cStringIO.StringIO()
+        self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
+        self.stream = f
+        self.encoder = codecs.getincrementalencoder(encoding)()
 
     def writerow(self, row):
-        self.writer.writerow([s.encode(self.encoding) for s in row])
+        self.writer.writerow([s.encode("utf-8") for s in row])
+        # Fetch UTF-8 output from the queue ...
+        data = self.queue.getvalue()
+        data = data.decode("utf-8")
+        # ... and reencode it into the target encoding
+        data = self.encoder.encode(data)
+        # write to the target stream
+        self.stream.write(data)
+        # empty queue
+        self.queue.truncate(0)
 
     def writerows(self, rows):
         for row in rows:
             self.writerow(row)
 \end{verbatim}
-
-They should work just like the \class{csv.reader} and \class{csv.writer}
-classes but add an \var{encoding} parameter.
diff --git a/Doc/lib/libdatetime.tex b/Doc/lib/libdatetime.tex
index 4bba553..cae5d60 100644
--- a/Doc/lib/libdatetime.tex
+++ b/Doc/lib/libdatetime.tex
@@ -504,7 +504,7 @@
   Return a string representing the date, controlled by an explicit
   format string.  Format codes referring to hours, minutes or seconds
   will see 0 values.
-  See the section on \method{strftime()} behavior.
+  See section~\ref{strftime-behavior} -- \method{strftime()} behavior.
 \end{methoddesc}
 
 
@@ -970,8 +970,8 @@
 
 \begin{methoddesc}{strftime}{format}
   Return a string representing the date and time, controlled by an
-  explicit format string.  See the section on \method{strftime()}
-  behavior.
+  explicit format string.  See section~\ref{strftime-behavior} --
+  \method{strftime()} behavior.
 \end{methoddesc}
 
 
@@ -1100,7 +1100,8 @@
 
 \begin{methoddesc}{strftime}{format}
   Return a string representing the time, controlled by an explicit
-  format string.  See the section on \method{strftime()} behavior.
+  format string.  See section~\ref{strftime-behavior} --
+  \method{strftime()} behavior.
 \end{methoddesc}
 
 \begin{methoddesc}{utcoffset}{}
@@ -1368,7 +1369,7 @@
 -4 hours)).
 
 
-\subsection{\method{strftime()} Behavior}
+\subsection{\method{strftime()} Behavior\label{strftime-behavior}}
 
 \class{date}, \class{datetime}, and \class{time}
 objects all support a \code{strftime(\var{format})}
diff --git a/Doc/lib/libdecimal.tex b/Doc/lib/libdecimal.tex
index 092f038..ffc3363 100644
--- a/Doc/lib/libdecimal.tex
+++ b/Doc/lib/libdecimal.tex
@@ -442,9 +442,33 @@
   Set the current context for the active thread to \var{c}.                                          
 \end{funcdesc}  
 
-New contexts can formed using the \class{Context} constructor described below.
-In addition, the module provides three pre-made contexts:                                          
+Beginning with Python 2.5, you can also use the \keyword{with} statement
+to temporarily change the active context. For example the following code
+increases the current decimal precision by 2 places, performs a
+calculation, and then automatically restores the previous context:
 
+\begin{verbatim}
+from __future__ import with_statement
+import decimal
+
+with decimal.getcontext() as ctx:
+    ctx.prec += 2   # add 2 more digits of precision
+    calculate_something()
+\end{verbatim}
+
+The context that's active in the body of the \keyword{with} statement is
+a \emph{copy} of the context you provided to the \keyword{with}
+statement, so modifying its attributes doesn't affect anything except
+that temporary copy.
+
+You can use any decimal context in a \keyword{with} statement, but if
+you just want to make a temporary change to some aspect of the current
+context, it's easiest to just use \function{getcontext()} as shown
+above.
+
+New contexts can also be created using the \class{Context} constructor
+described below. In addition, the module provides three pre-made
+contexts:
 
 \begin{classdesc*}{BasicContext}
   This is a standard context defined by the General Decimal Arithmetic
diff --git a/Doc/lib/libfuncs.tex b/Doc/lib/libfuncs.tex
index 0be3aa9..c0352d3 100644
--- a/Doc/lib/libfuncs.tex
+++ b/Doc/lib/libfuncs.tex
@@ -6,7 +6,7 @@
 
 \setindexsubitem{(built-in function)}
 
-\begin{funcdesc}{__import__}{name\optional{, globals\optional{, locals\optional{, fromlist}}}}
+\begin{funcdesc}{__import__}{name\optional{, globals\optional{, locals\optional{, fromlist\optional{, level}}}}}
   This function is invoked by the \keyword{import}\stindex{import}
   statement.  It mainly exists so that you can replace it with another
   function that has a compatible interface, in order to change the
@@ -20,9 +20,9 @@
 
   For example, the statement \samp{import spam} results in the
   following call: \code{__import__('spam',} \code{globals(),}
-  \code{locals(), [])}; the statement \samp{from spam.ham import eggs}
+  \code{locals(), [], -1)}; the statement \samp{from spam.ham import eggs}
   results in \samp{__import__('spam.ham', globals(), locals(),
-  ['eggs'])}.  Note that even though \code{locals()} and
+  ['eggs'], -1)}.  Note that even though \code{locals()} and
   \code{['eggs']} are passed in as arguments, the
   \function{__import__()} function does not set the local variable
   named \code{eggs}; this is done by subsequent code that is generated
@@ -52,6 +52,15 @@
         mod = getattr(mod, comp)
     return mod
 \end{verbatim}
+
+  \var{level} specifies whether to use absolute or relative imports.
+  The default is \code{-1} which indicates both absolute and relative
+  imports will be attempted.  \code{0} means only perform absolute imports.
+  Positive values for \var{level} indicate the number of parent directories
+  to search relative to the directory of the module calling
+  \function{__import__}.
+\versionchanged[The level parameter was added]{2.5}
+\versionchanged[Keyword support for parameters was added]{2.5}
 \end{funcdesc}
 
 \begin{funcdesc}{abs}{x}
@@ -683,7 +692,7 @@
 \end{funcdesc}
 
 \begin{funcdesc}{object}{}
-  Return a new featureless object.  \function{object()} is a base
+  Return a new featureless object.  \class{object} is a base
   for all new style classes.  It has the methods that are common
   to all instances of new style classes.
   \versionadded{2.2}
@@ -718,8 +727,11 @@
 \begin{funcdesc}{pow}{x, y\optional{, z}}
   Return \var{x} to the power \var{y}; if \var{z} is present, return
   \var{x} to the power \var{y}, modulo \var{z} (computed more
-  efficiently than \code{pow(\var{x}, \var{y}) \%\ \var{z}}).  The
-  arguments must have numeric types.  With mixed operand types, the
+  efficiently than \code{pow(\var{x}, \var{y}) \%\ \var{z}}).
+  The two-argument form \code{pow(\var{x}, \var{y})} is equivalent to using
+  the power operator: \code{\var{x}**\var{y}}.
+  
+  The arguments must have numeric types.  With mixed operand types, the
   coercion rules for binary arithmetic operators apply.  For int and
   long int operands, the result has the same type as the operands
   (after coercion) unless the second argument is negative; in that
diff --git a/Doc/lib/libgc.tex b/Doc/lib/libgc.tex
index 54ca26c..0d3408b 100644
--- a/Doc/lib/libgc.tex
+++ b/Doc/lib/libgc.tex
@@ -35,7 +35,8 @@
 \begin{funcdesc}{collect}{\optional{generation}}
 With no arguments, run a full collection.  The optional argument
 \var{generation} may be an integer specifying which generation to collect
-(from 0 to 2).  A ValueError is raised if the generation number is invalid.
+(from 0 to 2).  A \exception{ValueError} is raised if the generation number 
+is invalid.
 The number of unreachable objects found is returned.
 
 \versionchanged[The optional \var{generation} argument was added]{2.5}
diff --git a/Doc/lib/libgetpass.tex b/Doc/lib/libgetpass.tex
index 28bfe8f..1d177d3 100644
--- a/Doc/lib/libgetpass.tex
+++ b/Doc/lib/libgetpass.tex
@@ -11,11 +11,15 @@
 The \module{getpass} module provides two functions:
 
 
-\begin{funcdesc}{getpass}{\optional{prompt}}
+\begin{funcdesc}{getpass}{\optional{prompt\optional{, stream}}}
   Prompt the user for a password without echoing.  The user is
   prompted using the string \var{prompt}, which defaults to
-  \code{'Password: '}.
+  \code{'Password: '}. On \UNIX, the prompt is written to the
+  file-like object \var{stream}, which defaults to
+  \code{sys.stdout} (this argument is ignored on Windows).
+
   Availability: Macintosh, \UNIX, Windows.
+  \versionadded[The \var{stream} parameter]{2.5}
 \end{funcdesc}
 
 
diff --git a/Doc/lib/libhashlib.tex b/Doc/lib/libhashlib.tex
index e9d0b57..62e3fc4 100644
--- a/Doc/lib/libhashlib.tex
+++ b/Doc/lib/libhashlib.tex
@@ -31,7 +31,7 @@
 Constructors for hash algorithms that are always present in this module are
 \function{md5()}, \function{sha1()}, \function{sha224()}, \function{sha256()},
 \function{sha384()}, and \function{sha512()}.  Additional algorithms may also
-be available depending upon the OpenSSL library python uses on your platform.
+be available depending upon the OpenSSL library that Python uses on your platform.
 \index{OpenSSL}
 
 For example, to obtain the digest of the string \code{'Nobody inspects
diff --git a/Doc/lib/libitertools.tex b/Doc/lib/libitertools.tex
index 421d647..20bbc8d 100644
--- a/Doc/lib/libitertools.tex
+++ b/Doc/lib/libitertools.tex
@@ -276,12 +276,30 @@
      def izip(*iterables):
          iterables = map(iter, iterables)
          while iterables:
-             result = [i.next() for i in iterables]
+             result = [it.next() for it in iterables]
              yield tuple(result)
   \end{verbatim}
 
   \versionchanged[When no iterables are specified, returns a zero length
-                  iterator instead of raising a TypeError exception]{2.4}  
+                  iterator instead of raising a \exception{TypeError}
+		  exception]{2.4}
+
+  Note, the left-to-right evaluation order of the iterables is guaranteed.
+  This makes possible an idiom for clustering a data series into n-length
+  groups using \samp{izip(*[iter(s)]*n)}.  For data that doesn't fit
+  n-length groups exactly, the last tuple can be pre-padded with fill
+  values using \samp{izip(*[chain(s, [None]*(n-1))]*n)}.
+         
+  Note, when \function{izip()} is used with unequal length inputs, subsequent
+  iteration over the longer iterables cannot reliably be continued after
+  \function{izip()} terminates.  Potentially, up to one entry will be missing
+  from each of the left-over iterables. This occurs because a value is fetched
+  from each iterator in-turn, but the process ends when one of the iterators
+  terminates.  This leaves the last fetched values in limbo (they cannot be
+  returned in a final, incomplete tuple and they are cannot be pushed back
+  into the iterator for retrieval with \code{it.next()}).  In general,
+  \function{izip()} should only be used with unequal length inputs when you
+  don't care about trailing, unmatched values from the longer iterables.
 \end{funcdesc}
 
 \begin{funcdesc}{repeat}{object\optional{, times}}
@@ -517,4 +535,9 @@
         pass
     return izip(a, b)
 
+def grouper(n, iterable, padvalue=None):
+    "grouper(3, 'abcdefg', 'x') --> ('a','b','c'), ('d','e','f'), ('g','x','x')"
+    return izip(*[chain(iterable, repeat(padvalue, n-1))]*n)
+
+
 \end{verbatim}
diff --git a/Doc/lib/liblinecache.tex b/Doc/lib/liblinecache.tex
index c022ba9..1477d3c 100644
--- a/Doc/lib/liblinecache.tex
+++ b/Doc/lib/liblinecache.tex
@@ -15,7 +15,7 @@
 
 The \module{linecache} module defines the following functions:
 
-\begin{funcdesc}{getline}{filename, lineno}
+\begin{funcdesc}{getline}{filename, lineno\optional{, module_globals}}
 Get line \var{lineno} from file named \var{filename}. This function
 will never throw an exception --- it will return \code{''} on errors
 (the terminating newline character will be included for lines that are
@@ -23,7 +23,11 @@
 
 If a file named \var{filename} is not found, the function will look
 for it in the module\indexiii{module}{search}{path} search path,
-\code{sys.path}.
+\code{sys.path}, after first checking for a \pep{302} \code{__loader__}
+in \var{module_globals}, in case the module was imported from a zipfile
+or other non-filesystem import source. 
+
+\versionadded[The \var{module_globals} parameter was added]{2.5}
 \end{funcdesc}
 
 \begin{funcdesc}{clearcache}{}
diff --git a/Doc/lib/libnntplib.tex b/Doc/lib/libnntplib.tex
index 7f14dee..10330ed 100644
--- a/Doc/lib/libnntplib.tex
+++ b/Doc/lib/libnntplib.tex
@@ -68,48 +68,48 @@
 sent before authentication is performed.  Reader mode is sometimes
 necessary if you are connecting to an NNTP server on the local machine
 and intend to call reader-specific commands, such as \samp{group}.  If
-you get unexpected \code{NNTPPermanentError}s, you might need to set
+you get unexpected \exception{NNTPPermanentError}s, you might need to set
 \var{readermode}.  \var{readermode} defaults to \code{None}.
 \var{usenetrc} defaults to \code{True}.
 
 \versionchanged[\var{usenetrc} argument added]{2.4}
 \end{classdesc}
 
-\begin{classdesc}{NNTPError}{}
-Derived from the standard exception \code{Exception}, this is the base
-class for all exceptions raised by the \code{nntplib} module.
-\end{classdesc}
+\begin{excdesc}{NNTPError}
+Derived from the standard exception \exception{Exception}, this is the
+base class for all exceptions raised by the \module{nntplib} module.
+\end{excdesc}
 
-\begin{classdesc}{NNTPReplyError}{}
+\begin{excdesc}{NNTPReplyError}
 Exception raised when an unexpected reply is received from the
 server.  For backwards compatibility, the exception \code{error_reply}
 is equivalent to this class.
-\end{classdesc}
+\end{excdesc}
 
-\begin{classdesc}{NNTPTemporaryError}{}
+\begin{excdesc}{NNTPTemporaryError}
 Exception raised when an error code in the range 400--499 is
 received.  For backwards compatibility, the exception
 \code{error_temp} is equivalent to this class.
-\end{classdesc}
+\end{excdesc}
 
-\begin{classdesc}{NNTPPermanentError}{}
+\begin{excdesc}{NNTPPermanentError}
 Exception raised when an error code in the range 500--599 is
 received.  For backwards compatibility, the exception
 \code{error_perm} is equivalent to this class.
-\end{classdesc}
+\end{excdesc}
 
-\begin{classdesc}{NNTPProtocolError}{}
+\begin{excdesc}{NNTPProtocolError}
 Exception raised when a reply is received from the server that does
 not begin with a digit in the range 1--5.  For backwards
 compatibility, the exception \code{error_proto} is equivalent to this
 class.
-\end{classdesc}
+\end{excdesc}
 
-\begin{classdesc}{NNTPDataError}{}
+\begin{excdesc}{NNTPDataError}
 Exception raised when there is some error in the response data.  For
 backwards compatibility, the exception \code{error_data} is
 equivalent to this class.
-\end{classdesc}
+\end{excdesc}
 
 
 \subsection{NNTP Objects \label{nntp-objects}}
diff --git a/Doc/lib/liboptparse.tex b/Doc/lib/liboptparse.tex
index 4ab325b..8aca501 100644
--- a/Doc/lib/liboptparse.tex
+++ b/Doc/lib/liboptparse.tex
@@ -100,8 +100,8 @@
 single letter, e.g. \code{"-x"} or \code{"-F"}.  Also, traditional \UNIX{}
 syntax allows multiple options to be merged into a single argument,
 e.g.  \code{"-x -F"} is equivalent to \code{"-xF"}.  The GNU project
-introduced \code{"-{}-"} followed by a series of hyphen-separated words,
-e.g. \code{"-{}-file"} or \code{"-{}-dry-run"}.  These are the only two option
+introduced \code{"{--}"} followed by a series of hyphen-separated words,
+e.g. \code{"{--}file"} or \code{"{--}dry-run"}.  These are the only two option
 syntaxes provided by \module{optparse}.
 
 Some other option syntaxes that the world has seen include:
@@ -170,7 +170,7 @@
 prog -v --report /tmp/report.txt foo bar
 \end{verbatim}
 
-\code{"-v"} and \code{"-{}-report"} are both options.  Assuming that
+\code{"-v"} and \code{"{--}report"} are both options.  Assuming that
 \longprogramopt{report} takes one argument, \code{"/tmp/report.txt"} is an option
 argument.  \code{"foo"} and \code{"bar"} are positional arguments.
 
@@ -587,7 +587,7 @@
 erroneous calls to \code{parse.add{\_}option()}, e.g. invalid option strings,
 unknown option attributes, missing option attributes, etc.  These are
 dealt with in the usual way: raise an exception (either
-\code{optparse.OptionError} or \code{TypeError}) and let the program crash.
+\exception{optparse.OptionError} or \exception{TypeError}) and let the program crash.
 
 Handling user errors is much more important, since they are guaranteed
 to happen no matter how stable your code is.  \module{optparse} can automatically
@@ -1019,9 +1019,9 @@
 
 Integer arguments are passed to \code{int()} to convert them to Python
 integers.  If \code{int()} fails, so will \module{optparse}, although with a more
-useful error message.  (Internally, \module{optparse} raises OptionValueError;
-OptionParser catches this exception higher up and terminates your
-program with a useful error message.)
+useful error message.  (Internally, \module{optparse} raises
+\exception{OptionValueError}; OptionParser catches this exception higher
+up and terminates your program with a useful error message.)
 
 Likewise, \code{float} arguments are passed to \code{float()} for conversion,
 \code{long} arguments to \code{long()}, and \code{complex} arguments to
@@ -1032,7 +1032,7 @@
 option attribute (a sequence of strings) defines the set of allowed
 option arguments.  \code{optparse.option.check{\_}choice()} compares
 user-supplied option arguments against this master list and raises
-OptionValueError if an invalid string is given.
+\exception{OptionValueError} if an invalid string is given.
 
 
 \subsubsection{Querying and manipulating your option parser\label{optparse-querying-manipulating-option-parser}}
@@ -1052,7 +1052,7 @@
 option strings, all of those option strings become invalid.
 
 If \code{opt{\_}str} does not occur in any option belonging to this
-OptionParser, raises ValueError.
+OptionParser, raises \exception{ValueError}.
 \end{description}
 
 
@@ -1087,7 +1087,7 @@
 \begin{description}
 \item[\code{error} (default)]
 assume option conflicts are a programming error and raise 
-OptionConflictError
+\exception{OptionConflictError}
 \item[\code{resolve}]
 resolve option conflicts intelligently (see below)
 \end{description}
@@ -1260,7 +1260,7 @@
 
 \subsubsection{Raising errors in a callback\label{optparse-raising-errors-in-callback}}
 
-The callback function should raise OptionValueError if there are any
+The callback function should raise \exception{OptionValueError} if there are any
 problems with the option or its argument(s).  \module{optparse} catches this and
 terminates the program, printing the error message you supply to
 stderr.  Your message should be clear, concise, accurate, and mention
diff --git a/Doc/lib/libos.tex b/Doc/lib/libos.tex
index 9af5889..9ded3ae 100644
--- a/Doc/lib/libos.tex
+++ b/Doc/lib/libos.tex
@@ -343,6 +343,10 @@
 \versionchanged[When specified, the \var{mode} argument must now start
   with one of the letters \character{r}, \character{w}, or \character{a},
   otherwise a \exception{ValueError} is raised]{2.3}
+\versionchanged[On \UNIX, when the \var{mode} argument starts with
+  \character{a}, the \var{O_APPEND} flag is set on the file descriptor
+  (which the \cfunction{fdopen()} implementation already does on most
+  platforms)]{2.5}
 \end{funcdesc}
 
 \begin{funcdesc}{popen}{command\optional{, mode\optional{, bufsize}}}
@@ -547,7 +551,8 @@
 This function is intended for low-level I/O.  For normal usage,
 use the built-in function \function{open()}, which returns a ``file
 object'' with \method{read()} and \method{write()} methods (and many
-more).
+more).  To wrap a file descriptor in a ``file object'', use
+\function{fdopen()}.
 \end{notice}
 \end{funcdesc}
 
@@ -1731,6 +1736,27 @@
 return suitable process handles.
 \end{funcdesc}
 
+\begin{funcdesc}{wait3}{\optional{options}}
+Similar to \function{waitpid()}, except no process id argument is given and
+a 3-element tuple containing the child's process id, exit status indication,
+and resource usage information is returned.  Refer to
+\module{resource}.\function{getrusage()}
+for details on resource usage information.  The option argument is the same
+as that provided to \function{waitpid()} and \function{wait4()}.
+Availability: \UNIX.
+\versionadded{2.5}
+\end{funcdesc}
+
+\begin{funcdesc}{wait4}{pid, options}
+Similar to \function{waitpid()}, except a 3-element tuple, containing the
+child's process id, exit status indication, and resource usage information
+is returned.  Refer to \module{resource}.\function{getrusage()} for details
+on resource usage information.  The arguments to \function{wait4()} are
+the same as those provided to \function{waitpid()}.
+Availability: \UNIX.
+\versionadded{2.5}
+\end{funcdesc}
+
 \begin{datadesc}{WNOHANG}
 The option for \function{waitpid()} to return immediately if no child
 process status is available immediately. The function returns
@@ -1818,14 +1844,14 @@
 string which is the name of a defined system value; these names are
 specified in a number of standards (\POSIX, \UNIX{} 95, \UNIX{} 98, and
 others).  Some platforms define additional names as well.  The names
-known to the host operating system are given in the
+known to the host operating system are given as the keys of the
 \code{confstr_names} dictionary.  For configuration variables not
 included in that mapping, passing an integer for \var{name} is also
 accepted.
 Availability: Macintosh, \UNIX.
 
-If the configuration value specified by \var{name} isn't defined, the
-empty string is returned.
+If the configuration value specified by \var{name} isn't defined,
+\code{None} is returned.
 
 If \var{name} is a string and is not known, \exception{ValueError} is
 raised.  If a specific value for \var{name} is not supported by the
diff --git a/Doc/lib/libossaudiodev.tex b/Doc/lib/libossaudiodev.tex
index ec79e9e..223cf28 100644
--- a/Doc/lib/libossaudiodev.tex
+++ b/Doc/lib/libossaudiodev.tex
@@ -311,7 +311,7 @@
 
 \begin{methoddesc}[mixer device]{close}{}
 This method closes the open mixer device file.  Any further attempts to
-use the mixer after this file is closed will raise an IOError.
+use the mixer after this file is closed will raise an \exception{IOError}.
 \end{methoddesc}
 
 \begin{methoddesc}[mixer device]{fileno}{}
diff --git a/Doc/lib/libpdb.tex b/Doc/lib/libpdb.tex
index 6301175..a5b36a6 100644
--- a/Doc/lib/libpdb.tex
+++ b/Doc/lib/libpdb.tex
@@ -240,6 +240,45 @@
 the breakpoint is honored.  If condition is absent, any existing
 condition is removed; i.e., the breakpoint is made unconditional.
 
+\item[commands \optional{\var{bpnumber}}]
+
+Specify a list of commands for breakpoint number \var{bpnumber}.  The
+commands themselves appear on the following lines.  Type a line
+containing just 'end' to terminate the commands. An example:
+
+\begin{verbatim}
+(Pdb) commands 1
+(com) print some_variable
+(com) end
+(Pdb)
+\end{verbatim}
+
+To remove all commands from a breakpoint, type commands and
+follow it immediately with  end; that is, give no commands.
+
+With no \var{bpnumber} argument, commands refers to the last
+breakpoint set.
+
+You can use breakpoint commands to start your program up again.
+Simply use the continue command, or step, or any other
+command that resumes execution.
+
+Specifying any command resuming execution (currently continue,
+step, next, return, jump, quit and their abbreviations) terminates
+the command list (as if that command was immediately followed by end).
+This is because any time you resume execution
+(even with a simple next or step), you may encounter·
+another breakpoint--which could have its own command list, leading to
+ambiguities about which list to execute.
+
+   If you use the 'silent' command in the command list, the
+usual message about stopping at a breakpoint is not printed.  This may
+be desirable for breakpoints that are to print a specific message and
+then continue.  If none of the other commands print anything, you
+see no sign that the breakpoint was reached.
+
+\versionadded{2.5}
+
 \item[s(tep)]
 
 Execute the current line, stop at the first possible occasion
diff --git a/Doc/lib/libprofile.tex b/Doc/lib/libprofile.tex
index afc9694..9ff5ba0 100644
--- a/Doc/lib/libprofile.tex
+++ b/Doc/lib/libprofile.tex
@@ -124,7 +124,7 @@
 %\end{description}
 
 
-\section{Instant Users Manual \label{profile-instant}}
+\section{Instant User's Manual \label{profile-instant}}
 
 This section is provided for users that ``don't want to read the
 manual.'' It provides a very brief overview, and allows a user to
@@ -391,17 +391,17 @@
 % (This \stmodindex use may be hard to change ;-( )
 \stmodindex{pstats}
 
-\begin{classdesc}{Stats}{filename\optional{, \moreargs}}
+\begin{classdesc}{Stats}{filename\optional{, \moreargs\optional{, stream=sys.stdout}}}
 This class constructor creates an instance of a ``statistics object''
 from a \var{filename} (or set of filenames).  \class{Stats} objects are
-manipulated by methods, in order to print useful reports.
+manipulated by methods, in order to print useful reports.  You may specify
+an alternate output stream by giving the keyword argument, \code{stream}.
 
-The file selected by the above constructor must have been created by
-the corresponding version of \module{profile} or \module{cProfile}.
-To be specific, there is
-\emph{no} file compatibility guaranteed with future versions of this
-profiler, and there is no compatibility with files produced by other
-profilers.
+The file selected by the above constructor must have been created by the
+corresponding version of \module{profile} or \module{cProfile}.  To be
+specific, there is \emph{no} file compatibility guaranteed with future
+versions of this profiler, and there is no compatibility with files produced
+by other profilers.
 %(such as the old system profiler).
 
 If several files are provided, all the statistics for identical
diff --git a/Doc/lib/libpycompile.tex b/Doc/lib/libpycompile.tex
index 0458191..85f0aaa 100644
--- a/Doc/lib/libpycompile.tex
+++ b/Doc/lib/libpycompile.tex
@@ -30,9 +30,10 @@
   \code{+} \code{'c'} (\code{'o'} if optimization is enabled in the
   current interpreter).  If \var{dfile} is specified, it is used as
   the name of the source file in error messages instead of \var{file}. 
-  If \var{doraise} = True, a PyCompileError is raised when an error is 
-  encountered while compiling \var{file}. If \var{doraise} = False (the default), 
-  an error string is written to sys.stderr, but no exception is raised.
+  If \var{doraise} is true, a \exception{PyCompileError} is raised when
+  an error is encountered while compiling \var{file}. If \var{doraise}
+  is false (the default), an error string is written to \code{sys.stderr},
+  but no exception is raised.
 \end{funcdesc}
 
 \begin{funcdesc}{main}{\optional{args}}
diff --git a/Doc/lib/libqueue.tex b/Doc/lib/libqueue.tex
index f1d892a..95ad47f 100644
--- a/Doc/lib/libqueue.tex
+++ b/Doc/lib/libqueue.tex
@@ -1,3 +1,4 @@
+
 \section{\module{Queue} ---
          A synchronized queue class}
 
@@ -94,3 +95,51 @@
 \begin{methoddesc}{get_nowait}{}
 Equivalent to \code{get(False)}.
 \end{methoddesc}
+
+Two methods are offered to support tracking whether enqueued tasks have
+been fully processed by daemon consumer threads.
+
+\begin{methoddesc}{task_done}{}
+Indicate that a formerly enqueued task is complete.  Used by queue consumer
+threads.  For each \method{get()} used to fetch a task, a subsequent call to
+\method{task_done()} tells the queue that the processing on the task is complete.
+
+If a \method{join()} is currently blocking, it will resume when all items
+have been processed (meaning that a \method{task_done()} call was received
+for every item that had been \method{put()} into the queue).
+
+Raises a \exception{ValueError} if called more times than there were items
+placed in the queue.
+\versionadded{2.5}
+\end{methoddesc}
+
+\begin{methoddesc}{join}{}
+Blocks until all items in the queue have been gotten and processed.
+
+The count of unfinished tasks goes up whenever an item is added to the
+queue. The count goes down whenever a consumer thread calls \method{task_done()}
+to indicate that the item was retrieved and all work on it is complete.
+When the count of unfinished tasks drops to zero, join() unblocks.
+\versionadded{2.5}
+\end{methoddesc}
+
+Example of how to wait for enqueued tasks to be completed:
+
+\begin{verbatim}
+    def worker(): 
+        while True: 
+            item = q.get() 
+            do_work(item) 
+            q.task_done() 
+
+    q = Queue() 
+    for i in range(num_worker_threads): 
+         t = Thread(target=worker)
+         t.setDaemon(True)
+         t.start() 
+
+    for item in source():
+        q.put(item) 
+
+    q.join()       # block until all tasks are done
+\end{verbatim}
diff --git a/Doc/lib/libre.tex b/Doc/lib/libre.tex
index 8e6513a..1404e09 100644
--- a/Doc/lib/libre.tex
+++ b/Doc/lib/libre.tex
@@ -566,9 +566,6 @@
 >>> re.split('\W+', 'Words, words, words.', 1)
 ['Words', 'words, words.']
 \end{verbatim}
-
-  This function combines and extends the functionality of
-  the old \function{regsub.split()} and \function{regsub.splitx()}.
 \end{funcdesc}
 
 \begin{funcdesc}{findall}{pattern, string\optional{, flags}}
@@ -934,7 +931,7 @@
 \leftline{\strong{Avoiding recursion}}
 
 If you create regular expressions that require the engine to perform a
-lot of recursion, you may encounter a RuntimeError exception with
+lot of recursion, you may encounter a \exception{RuntimeError} exception with
 the message \code{maximum recursion limit} exceeded. For example,
 
 \begin{verbatim}
@@ -943,7 +940,7 @@
 >>> re.match('Begin (\w| )*? end', s).end()
 Traceback (most recent call last):
   File "<stdin>", line 1, in ?
-  File "/usr/local/lib/python2.3/sre.py", line 132, in match
+  File "/usr/local/lib/python2.5/re.py", line 132, in match
     return _compile(pattern, flags).match(string)
 RuntimeError: maximum recursion limit exceeded
 \end{verbatim}
diff --git a/Doc/lib/libreconvert.tex b/Doc/lib/libreconvert.tex
deleted file mode 100644
index 29c6e52..0000000
--- a/Doc/lib/libreconvert.tex
+++ /dev/null
@@ -1,80 +0,0 @@
-\section{\module{reconvert} ---
-         Convert regular expressions from regex to re form}
-\declaremodule{standard}{reconvert}
-\moduleauthor{Andrew M. Kuchling}{amk@amk.ca}
-\sectionauthor{Skip Montanaro}{skip@pobox.com}
-
-
-\modulesynopsis{Convert regex-, emacs- or sed-style regular expressions
-to re-style syntax.}
-
-
-This module provides a facility to convert regular expressions from the
-syntax used by the deprecated \module{regex} module to those used by the
-newer \module{re} module.  Because of similarity between the regular
-expression syntax of \code{sed(1)} and \code{emacs(1)} and the
-\module{regex} module, it is also helpful to convert patterns written for
-those tools to \module{re} patterns.
-
-When used as a script, a Python string literal (or any other expression
-evaluating to a string) is read from stdin, and the translated expression is
-written to stdout as a string literal.  Unless stdout is a tty, no trailing
-newline is written to stdout.  This is done so that it can be used with
-Emacs \code{C-U M-|} (shell-command-on-region) which filters the region
-through the shell command.
-
-\begin{seealso}
-  \seetitle{Mastering Regular Expressions}{Book on regular expressions
-            by Jeffrey Friedl, published by O'Reilly.  The second 
-            edition of the book no longer covers Python at all, 
-            but the first edition covered writing good regular expression
-            patterns in great detail.}
-\end{seealso}
-
-\subsection{Module Contents}
-\nodename{Contents of Module reconvert}
-
-The module defines two functions and a handful of constants.
-
-\begin{funcdesc}{convert}{pattern\optional{, syntax=None}}
- Convert a \var{pattern} representing a \module{regex}-stype regular
- expression into a \module{re}-style regular expression.  The optional
- \var{syntax} parameter is a bitwise-or'd set of flags that control what
- constructs are converted.  See below for a description of the various
- constants.
-\end{funcdesc}
-
-\begin{funcdesc}{quote}{s\optional{, quote=None}}
- Convert a string object to a quoted string literal.
-
- This is similar to \function{repr} but will return a "raw" string (r'...'
- or r"...") when the string contains backslashes, instead of doubling all
- backslashes.  The resulting string does not always evaluate to the same
- string as the original; however it will do just the right thing when passed
- into re.compile().
-
- The optional second argument forces the string quote; it must be a single
- character which is a valid Python string quote.  Note that prior to Python
- 2.5 this would not accept triple-quoted string delimiters.
-\end{funcdesc}
-
-\begin{datadesc}{RE_NO_BK_PARENS}
- Suppress paren conversion.  This should be omitted when converting
- \code{sed}-style or \code{emacs}-style regular expressions.
-\end{datadesc}
-
-\begin{datadesc}{RE_NO_BK_VBAR}
- Suppress vertical bar conversion.  This should be omitted when converting
- \code{sed}-style or \code{emacs}-style regular expressions.
-\end{datadesc}
-
-\begin{datadesc}{RE_BK_PLUS_QM}
- Enable conversion of \code{+} and \code{?} characters.  This should be
- added to the \var{syntax} arg of \function{convert} when converting
- \code{sed}-style regular expressions and omitted when converting
- \code{emacs}-style regular expressions.
-\end{datadesc}
-
-\begin{datadesc}{RE_NEWLINE_OR}
- When set, newline characters are replaced by \code{|}.
-\end{datadesc}
diff --git a/Doc/lib/libregex.tex b/Doc/lib/libregex.tex
deleted file mode 100644
index 0982f81..0000000
--- a/Doc/lib/libregex.tex
+++ /dev/null
@@ -1,370 +0,0 @@
-\section{\module{regex} ---
-         Regular expression operations}
-\declaremodule{builtin}{regex}
-
-\modulesynopsis{Regular expression search and match operations.
-                \strong{Obsolete!}}
-
-
-This module provides regular expression matching operations similar to
-those found in Emacs.
-
-\strong{Obsolescence note:}
-This module is obsolete as of Python version 1.5; it is still being
-maintained because much existing code still uses it.  All new code in
-need of regular expressions should use the new
-\code{re}\refstmodindex{re} module, which supports the more powerful
-and regular Perl-style regular expressions.  Existing code should be
-converted.  The standard library module
-\code{reconvert}\refstmodindex{reconvert} helps in converting
-\code{regex} style regular expressions to \code{re}\refstmodindex{re}
-style regular expressions.  (For more conversion help, see Andrew
-Kuchling's\index{Kuchling, Andrew} ``\module{regex-to-re} HOWTO'' at
-\url{http://www.python.org/doc/howto/regex-to-re/}.)
-
-By default the patterns are Emacs-style regular expressions
-(with one exception).  There is
-a way to change the syntax to match that of several well-known
-\UNIX{} utilities.  The exception is that Emacs' \samp{\e s}
-pattern is not supported, since the original implementation references
-the Emacs syntax tables.
-
-This module is 8-bit clean: both patterns and strings may contain null
-bytes and characters whose high bit is set.
-
-\strong{Please note:} There is a little-known fact about Python string
-literals which means that you don't usually have to worry about
-doubling backslashes, even though they are used to escape special
-characters in string literals as well as in regular expressions.  This
-is because Python doesn't remove backslashes from string literals if
-they are followed by an unrecognized escape character.
-\emph{However}, if you want to include a literal \dfn{backslash} in a
-regular expression represented as a string literal, you have to
-\emph{quadruple} it or enclose it in a singleton character class.
-E.g.\  to extract \LaTeX\ \samp{\e section\{\textrm{\ldots}\}} headers
-from a document, you can use this pattern:
-\code{'[\e ]section\{\e (.*\e )\}'}.  \emph{Another exception:}
-the escape sequence \samp{\e b} is significant in string literals
-(where it means the ASCII bell character) as well as in Emacs regular
-expressions (where it stands for a word boundary), so in order to
-search for a word boundary, you should use the pattern \code{'\e \e b'}.
-Similarly, a backslash followed by a digit 0-7 should be doubled to
-avoid interpretation as an octal escape.
-
-\subsection{Regular Expressions}
-
-A regular expression (or RE) specifies a set of strings that matches
-it; the functions in this module let you check if a particular string
-matches a given regular expression (or if a given regular expression
-matches a particular string, which comes down to the same thing).
-
-Regular expressions can be concatenated to form new regular
-expressions; if \emph{A} and \emph{B} are both regular expressions,
-then \emph{AB} is also an regular expression.  If a string \emph{p}
-matches A and another string \emph{q} matches B, the string \emph{pq}
-will match AB.  Thus, complex expressions can easily be constructed
-from simpler ones like the primitives described here.  For details of
-the theory and implementation of regular expressions, consult almost
-any textbook about compiler construction.
-
-% XXX The reference could be made more specific, say to 
-% "Compilers: Principles, Techniques and Tools", by Alfred V. Aho, 
-% Ravi Sethi, and Jeffrey D. Ullman, or some FA text.   
-
-A brief explanation of the format of regular expressions follows.
-
-Regular expressions can contain both special and ordinary characters.
-Ordinary characters, like '\code{A}', '\code{a}', or '\code{0}', are
-the simplest regular expressions; they simply match themselves.  You
-can concatenate ordinary characters, so '\code{last}' matches the
-characters 'last'.  (In the rest of this section, we'll write RE's in
-\code{this special font}, usually without quotes, and strings to be
-matched 'in single quotes'.)
-
-Special characters either stand for classes of ordinary characters, or
-affect how the regular expressions around them are interpreted.
-
-The special characters are:
-\begin{itemize}
-\item[\code{.}] (Dot.)  Matches any character except a newline.
-\item[\code{\^}] (Caret.)  Matches the start of the string.
-\item[\code{\$}] Matches the end of the string.  
-\code{foo} matches both 'foo' and 'foobar', while the regular
-expression '\code{foo\$}' matches only 'foo'.
-\item[\code{*}] Causes the resulting RE to
-match 0 or more repetitions of the preceding RE.  \code{ab*} will
-match 'a', 'ab', or 'a' followed by any number of 'b's.
-\item[\code{+}] Causes the
-resulting RE to match 1 or more repetitions of the preceding RE.
-\code{ab+} will match 'a' followed by any non-zero number of 'b's; it
-will not match just 'a'.
-\item[\code{?}] Causes the resulting RE to
-match 0 or 1 repetitions of the preceding RE.  \code{ab?} will
-match either 'a' or 'ab'.
-
-\item[\code{\e}] Either escapes special characters (permitting you to match
-characters like '*?+\&\$'), or signals a special sequence; special
-sequences are discussed below.  Remember that Python also uses the
-backslash as an escape sequence in string literals; if the escape
-sequence isn't recognized by Python's parser, the backslash and
-subsequent character are included in the resulting string.  However,
-if Python would recognize the resulting sequence, the backslash should
-be repeated twice.  
-
-\item[\code{[]}] Used to indicate a set of characters.  Characters can
-be listed individually, or a range is indicated by giving two
-characters and separating them by a '-'.  Special characters are
-not active inside sets.  For example, \code{[akm\$]}
-will match any of the characters 'a', 'k', 'm', or '\$'; \code{[a-z]} will
-match any lowercase letter.  
-
-If you want to include a \code{]} inside a
-set, it must be the first character of the set; to include a \code{-},
-place it as the first or last character. 
-
-Characters \emph{not} within a range can be matched by including a
-\code{\^} as the first character of the set; \code{\^} elsewhere will
-simply match the '\code{\^}' character.  
-\end{itemize}
-
-The special sequences consist of '\code{\e}' and a character
-from the list below.  If the ordinary character is not on the list,
-then the resulting RE will match the second character.  For example,
-\code{\e\$} matches the character '\$'.  Ones where the backslash
-should be doubled in string literals are indicated.
-
-\begin{itemize}
-\item[\code{\e|}]\code{A\e|B}, where A and B can be arbitrary REs,
-creates a regular expression that will match either A or B.  This can
-be used inside groups (see below) as well.
-%
-\item[\code{\e( \e)}] Indicates the start and end of a group; the
-contents of a group can be matched later in the string with the
-\code{\e [1-9]} special sequence, described next.
-\end{itemize}
-
-\begin{fulllineitems}
-\item[\code{\e \e 1, ... \e \e 7, \e 8, \e 9}]
-Matches the contents of the group of the same
-number.  For example, \code{\e (.+\e ) \e \e 1} matches 'the the' or
-'55 55', but not 'the end' (note the space after the group).  This
-special sequence can only be used to match one of the first 9 groups;
-groups with higher numbers can be matched using the \code{\e v}
-sequence.  (\code{\e 8} and \code{\e 9} don't need a double backslash
-because they are not octal digits.)
-\end{fulllineitems}
-
-\begin{itemize}
-\item[\code{\e \e b}] Matches the empty string, but only at the
-beginning or end of a word.  A word is defined as a sequence of
-alphanumeric characters, so the end of a word is indicated by
-whitespace or a non-alphanumeric character.
-%
-\item[\code{\e B}] Matches the empty string, but when it is \emph{not} at the
-beginning or end of a word.
-%
-\item[\code{\e v}] Must be followed by a two digit decimal number, and
-matches the contents of the group of the same number.  The group
-number must be between 1 and 99, inclusive.
-%
-\item[\code{\e w}]Matches any alphanumeric character; this is
-equivalent to the set \code{[a-zA-Z0-9]}.
-%
-\item[\code{\e W}] Matches any non-alphanumeric character; this is
-equivalent to the set \code{[\^a-zA-Z0-9]}.
-\item[\code{\e <}] Matches the empty string, but only at the beginning of a
-word.  A word is defined as a sequence of alphanumeric characters, so
-the end of a word is indicated by whitespace or a non-alphanumeric 
-character.
-\item[\code{\e >}] Matches the empty string, but only at the end of a
-word.
-
-\item[\code{\e \e \e \e}] Matches a literal backslash.
-
-% In Emacs, the following two are start of buffer/end of buffer.  In
-% Python they seem to be synonyms for ^$.
-\item[\code{\e `}] Like \code{\^}, this only matches at the start of the
-string.
-\item[\code{\e \e '}] Like \code{\$}, this only matches at the end of
-the string.
-% end of buffer
-\end{itemize}
-
-\subsection{Module Contents}
-\nodename{Contents of Module regex}
-
-The module defines these functions, and an exception:
-
-
-\begin{funcdesc}{match}{pattern, string}
-  Return how many characters at the beginning of \var{string} match
-  the regular expression \var{pattern}.  Return \code{-1} if the
-  string does not match the pattern (this is different from a
-  zero-length match!).
-\end{funcdesc}
-
-\begin{funcdesc}{search}{pattern, string}
-  Return the first position in \var{string} that matches the regular
-  expression \var{pattern}.  Return \code{-1} if no position in the string
-  matches the pattern (this is different from a zero-length match
-  anywhere!).
-\end{funcdesc}
-
-\begin{funcdesc}{compile}{pattern\optional{, translate}}
-  Compile a regular expression pattern into a regular expression
-  object, which can be used for matching using its \code{match()} and
-  \code{search()} methods, described below.  The optional argument
-  \var{translate}, if present, must be a 256-character string
-  indicating how characters (both of the pattern and of the strings to
-  be matched) are translated before comparing them; the \var{i}-th
-  element of the string gives the translation for the character with
-  \ASCII{} code \var{i}.  This can be used to implement
-  case-insensitive matching; see the \code{casefold} data item below.
-
-  The sequence
-
-\begin{verbatim}
-prog = regex.compile(pat)
-result = prog.match(str)
-\end{verbatim}
-%
-is equivalent to
-
-\begin{verbatim}
-result = regex.match(pat, str)
-\end{verbatim}
-
-but the version using \code{compile()} is more efficient when multiple
-regular expressions are used concurrently in a single program.  (The
-compiled version of the last pattern passed to \code{regex.match()} or
-\code{regex.search()} is cached, so programs that use only a single
-regular expression at a time needn't worry about compiling regular
-expressions.)
-\end{funcdesc}
-
-\begin{funcdesc}{set_syntax}{flags}
-  Set the syntax to be used by future calls to \code{compile()},
-  \code{match()} and \code{search()}.  (Already compiled expression
-  objects are not affected.)  The argument is an integer which is the
-  OR of several flag bits.  The return value is the previous value of
-  the syntax flags.  Names for the flags are defined in the standard
-  module \code{regex_syntax}\refstmodindex{regex_syntax}; read the
-  file \file{regex_syntax.py} for more information.
-\end{funcdesc}
-
-\begin{funcdesc}{get_syntax}{}
-  Returns the current value of the syntax flags as an integer.
-\end{funcdesc}
-
-\begin{funcdesc}{symcomp}{pattern\optional{, translate}}
-This is like \code{compile()}, but supports symbolic group names: if a
-parenthesis-enclosed group begins with a group name in angular
-brackets, e.g. \code{'\e(<id>[a-z][a-z0-9]*\e)'}, the group can
-be referenced by its name in arguments to the \code{group()} method of
-the resulting compiled regular expression object, like this:
-\code{p.group('id')}.  Group names may contain alphanumeric characters
-and \code{'_'} only.
-\end{funcdesc}
-
-\begin{excdesc}{error}
-  Exception raised when a string passed to one of the functions here
-  is not a valid regular expression (e.g., unmatched parentheses) or
-  when some other error occurs during compilation or matching.  (It is
-  never an error if a string contains no match for a pattern.)
-\end{excdesc}
-
-\begin{datadesc}{casefold}
-A string suitable to pass as the \var{translate} argument to
-\code{compile()} to map all upper case characters to their lowercase
-equivalents.
-\end{datadesc}
-
-\noindent
-Compiled regular expression objects support these methods:
-
-\setindexsubitem{(regex method)}
-\begin{funcdesc}{match}{string\optional{, pos}}
-  Return how many characters at the beginning of \var{string} match
-  the compiled regular expression.  Return \code{-1} if the string
-  does not match the pattern (this is different from a zero-length
-  match!).
-  
-  The optional second parameter, \var{pos}, gives an index in the string
-  where the search is to start; it defaults to \code{0}.  This is not
-  completely equivalent to slicing the string; the \code{'\^'} pattern
-  character matches at the real beginning of the string and at positions
-  just after a newline, not necessarily at the index where the search
-  is to start.
-\end{funcdesc}
-
-\begin{funcdesc}{search}{string\optional{, pos}}
-  Return the first position in \var{string} that matches the regular
-  expression \code{pattern}.  Return \code{-1} if no position in the
-  string matches the pattern (this is different from a zero-length
-  match anywhere!).
-  
-  The optional second parameter has the same meaning as for the
-  \code{match()} method.
-\end{funcdesc}
-
-\begin{funcdesc}{group}{index, index, ...}
-This method is only valid when the last call to the \code{match()}
-or \code{search()} method found a match.  It returns one or more
-groups of the match.  If there is a single \var{index} argument,
-the result is a single string; if there are multiple arguments, the
-result is a tuple with one item per argument.  If the \var{index} is
-zero, the corresponding return value is the entire matching string; if
-it is in the inclusive range [1..99], it is the string matching the
-corresponding parenthesized group (using the default syntax,
-groups are parenthesized using \code{{\e}(} and \code{{\e})}).  If no
-such group exists, the corresponding result is \code{None}.
-
-If the regular expression was compiled by \code{symcomp()} instead of
-\code{compile()}, the \var{index} arguments may also be strings
-identifying groups by their group name.
-\end{funcdesc}
-
-\noindent
-Compiled regular expressions support these data attributes:
-
-\setindexsubitem{(regex attribute)}
-
-\begin{datadesc}{regs}
-When the last call to the \code{match()} or \code{search()} method found a
-match, this is a tuple of pairs of indexes corresponding to the
-beginning and end of all parenthesized groups in the pattern.  Indices
-are relative to the string argument passed to \code{match()} or
-\code{search()}.  The 0-th tuple gives the beginning and end or the
-whole pattern.  When the last match or search failed, this is
-\code{None}.
-\end{datadesc}
-
-\begin{datadesc}{last}
-When the last call to the \code{match()} or \code{search()} method found a
-match, this is the string argument passed to that method.  When the
-last match or search failed, this is \code{None}.
-\end{datadesc}
-
-\begin{datadesc}{translate}
-This is the value of the \var{translate} argument to
-\code{regex.compile()} that created this regular expression object.  If
-the \var{translate} argument was omitted in the \code{regex.compile()}
-call, this is \code{None}.
-\end{datadesc}
-
-\begin{datadesc}{givenpat}
-The regular expression pattern as passed to \code{compile()} or
-\code{symcomp()}.
-\end{datadesc}
-
-\begin{datadesc}{realpat}
-The regular expression after stripping the group names for regular
-expressions compiled with \code{symcomp()}.  Same as \code{givenpat}
-otherwise.
-\end{datadesc}
-
-\begin{datadesc}{groupindex}
-A dictionary giving the mapping from symbolic group names to numerical
-group indexes for regular expressions compiled with \code{symcomp()}.
-\code{None} otherwise.
-\end{datadesc}
diff --git a/Doc/lib/libregsub.tex b/Doc/lib/libregsub.tex
deleted file mode 100644
index b41b700..0000000
--- a/Doc/lib/libregsub.tex
+++ /dev/null
@@ -1,74 +0,0 @@
-\section{\module{regsub} ---
-         String operations using regular expressions}
-
-\declaremodule{standard}{regsub}
-\modulesynopsis{Substitution and splitting operations that use
-                regular expressions.  \strong{Obsolete!}}
-
-
-This module defines a number of functions useful for working with
-regular expressions (see built-in module \refmodule{regex}).
-
-Warning: these functions are not thread-safe.
-
-\strong{Obsolescence note:}
-This module is obsolete as of Python version 1.5; it is still being
-maintained because much existing code still uses it.  All new code in
-need of regular expressions should use the new \refmodule{re} module, which
-supports the more powerful and regular Perl-style regular expressions.
-Existing code should be converted.  The standard library module
-\module{reconvert} helps in converting \refmodule{regex} style regular
-expressions to \refmodule{re} style regular expressions.  (For more
-conversion help, see Andrew Kuchling's\index{Kuchling, Andrew}
-``regex-to-re HOWTO'' at
-\url{http://www.python.org/doc/howto/regex-to-re/}.)
-
-
-\begin{funcdesc}{sub}{pat, repl, str}
-Replace the first occurrence of pattern \var{pat} in string
-\var{str} by replacement \var{repl}.  If the pattern isn't found,
-the string is returned unchanged.  The pattern may be a string or an
-already compiled pattern.  The replacement may contain references
-\samp{\e \var{digit}} to subpatterns and escaped backslashes.
-\end{funcdesc}
-
-\begin{funcdesc}{gsub}{pat, repl, str}
-Replace all (non-overlapping) occurrences of pattern \var{pat} in
-string \var{str} by replacement \var{repl}.  The same rules as for
-\code{sub()} apply.  Empty matches for the pattern are replaced only
-when not adjacent to a previous match, so e.g.
-\code{gsub('', '-', 'abc')} returns \code{'-a-b-c-'}.
-\end{funcdesc}
-
-\begin{funcdesc}{split}{str, pat\optional{, maxsplit}}
-Split the string \var{str} in fields separated by delimiters matching
-the pattern \var{pat}, and return a list containing the fields.  Only
-non-empty matches for the pattern are considered, so e.g.
-\code{split('a:b', ':*')} returns \code{['a', 'b']} and
-\code{split('abc', '')} returns \code{['abc']}.  The \var{maxsplit}
-defaults to 0. If it is nonzero, only \var{maxsplit} number of splits
-occur, and the remainder of the string is returned as the final
-element of the list.
-\end{funcdesc}
-
-\begin{funcdesc}{splitx}{str, pat\optional{, maxsplit}}
-Split the string \var{str} in fields separated by delimiters matching
-the pattern \var{pat}, and return a list containing the fields as well
-as the separators.  For example, \code{splitx('a:::b', ':*')} returns
-\code{['a', ':::', 'b']}.  Otherwise, this function behaves the same
-as \code{split}.
-\end{funcdesc}
-
-\begin{funcdesc}{capwords}{s\optional{, pat}}
-Capitalize words separated by optional pattern \var{pat}.  The default
-pattern uses any characters except letters, digits and underscores as
-word delimiters.  Capitalization is done by changing the first
-character of each word to upper case.
-\end{funcdesc}
-
-\begin{funcdesc}{clear_cache}{}
-The regsub module maintains a cache of compiled regular expressions,
-keyed on the regular expression string and the syntax of the regex
-module at the time the expression was compiled.  This function clears
-that cache.
-\end{funcdesc}
diff --git a/Doc/lib/librunpy.tex b/Doc/lib/librunpy.tex
new file mode 100644
index 0000000..4be9901
--- /dev/null
+++ b/Doc/lib/librunpy.tex
@@ -0,0 +1,74 @@
+\section{\module{runpy} ---
+         Locating and executing Python modules.}
+
+\declaremodule{standard}{runpy}		% standard library, in Python
+
+\moduleauthor{Nick Coghlan}{ncoghlan@gmail.com}
+
+\modulesynopsis{Locate and execute Python modules as scripts}
+
+\versionadded{2.5}
+
+The \module{runpy} module is used to locate and run Python modules
+without importing them first. It's main use is to implement the
+\programopt{-m} command line switch that allows scripts to be located
+using the Python module namespace rather than the filesystem.
+
+When executed as a script, the module effectively operates as follows:
+\begin{verbatim}
+    del sys.argv[0]  # Remove the runpy module from the arguments
+    run_module(sys.argv[0], run_name="__main__", alter_sys=True)
+\end{verbatim}
+
+The \module{runpy} module provides a single function:
+
+\begin{funcdesc}{run_module}{mod_name\optional{, init_globals}
+\optional{, run_name}\optional{, alter_sys}}
+Execute the code of the specified module and return the resulting
+module globals dictionary. The module's code is first located using
+the standard import mechanism (refer to PEP 302 for details) and
+then executed in a fresh module namespace.
+
+The optional dictionary argument \var{init_globals} may be used to
+pre-populate the globals dictionary before the code is executed.
+The supplied dictionary will not be modified. If any of the special
+global variables below are defined in the supplied dictionary, those
+definitions are overridden by the \code{run_module} function.
+
+The special global variables \code{__name__}, \code{__file__},
+\code{__loader__} and \code{__builtins__} are set in the globals
+dictionary before the module code is executed.
+
+\code{__name__} is set to \var{run_name} if this optional argument is
+supplied, and the \var{mod_name} argument otherwise.
+
+\code{__loader__} is set to the PEP 302 module loader used to retrieve
+the code for the module (This loader may be a wrapper around the
+standard import mechanism).
+
+\code{__file__} is set to the name provided by the module loader. If
+the loader does not make filename information available, this
+variable is set to \code{None}.
+
+\code{__builtins__} is automatically initialised with a reference to
+the top level namespace of the \module{__builtin__} module.
+
+If the argument \var{alter_sys} is supplied and evaluates to
+\code{True}, then \code{sys.argv[0]} is updated with the value of
+\code{__file__} and \code{sys.modules[__name__]} is updated with a
+temporary module object for the module being executed. Both
+\code{sys.argv[0]} and \code{sys.modules[__name__]} are restored to
+their original values before the function returns.
+
+Note that this manipulation of \module{sys} is not thread-safe. Other
+threads may see the partially initialised module, as well as the
+altered list of arguments. It is recommended that the \module{sys}
+module be left alone when invoking this function from threaded code.
+\end{funcdesc}
+
+\begin{seealso}
+
+\seepep{338}{Executing modules as scripts}{PEP written and 
+implemented by Nick Coghlan.}
+
+\end{seealso}
diff --git a/Doc/lib/libsets.tex b/Doc/lib/libsets.tex
index dd85ec7..22bf34b 100644
--- a/Doc/lib/libsets.tex
+++ b/Doc/lib/libsets.tex
@@ -151,12 +151,13 @@
   \lineiii{\var{s}.add(\var{x})}{}
          {add element \var{x} to set \var{s}}
   \lineiii{\var{s}.remove(\var{x})}{}
-         {remove \var{x} from set \var{s}; raises KeyError if not present}
+         {remove \var{x} from set \var{s}; raises \exception{KeyError}
+	  if not present}
   \lineiii{\var{s}.discard(\var{x})}{}
          {removes \var{x} from set \var{s} if present}
   \lineiii{\var{s}.pop()}{}
          {remove and return an arbitrary element from \var{s}; raises
-	  KeyError if empty}
+	  \exception{KeyError} if empty}
   \lineiii{\var{s}.clear()}{}
          {remove all elements from set \var{s}}
 \end{tableiii}
diff --git a/Doc/lib/libsgmllib.tex b/Doc/lib/libsgmllib.tex
index 27bf0b0..1578313 100644
--- a/Doc/lib/libsgmllib.tex
+++ b/Doc/lib/libsgmllib.tex
@@ -95,12 +95,22 @@
 should be used to support semantic interpretation of the start tag.
 The \var{attributes} argument is a list of \code{(\var{name},
 \var{value})} pairs containing the attributes found inside the tag's
-\code{<>} brackets.  The \var{name} has been translated to lower case
-and double quotes and backslashes in the \var{value} have been interpreted.
+\code{<>} brackets.
+
+The \var{name} has been translated to lower case.
+Double quotes and backslashes in the \var{value} have been interpreted,
+as well as known character references and known entity references
+terminated by a semicolon (normally, entity references can be terminated
+by any non-alphanumerical character, but this would break the very
+common case of \code{<A HREF="url?spam=1\&eggs=2">} when \code{eggs}
+is a valid entity name).
+
 For instance, for the tag \code{<A HREF="http://www.cwi.nl/">}, this
 method would be called as \samp{unknown_starttag('a', [('href',
 'http://www.cwi.nl/')])}.  The base implementation simply calls
 \var{method} with \var{attributes} as the only argument.
+\versionadded[Handling of entity and character references within
+              attribute values]{2.5}
 \end{methoddesc}
 
 \begin{methoddesc}{handle_endtag}{tag, method}
diff --git a/Doc/lib/libshutil.tex b/Doc/lib/libshutil.tex
index a217150..449d741 100644
--- a/Doc/lib/libshutil.tex
+++ b/Doc/lib/libshutil.tex
@@ -73,18 +73,18 @@
   If \var{symlinks} is true, symbolic links in
   the source tree are represented as symbolic links in the new tree;
   if false or omitted, the contents of the linked files are copied to
-  the new tree.  If exception(s) occur, an Error is raised
+  the new tree.  If exception(s) occur, an \exception{Error} is raised
   with a list of reasons.
 
   The source code for this should be considered an example rather than 
   a tool.
 
-  \versionchanged[Error is raised if any exceptions occur during copying,
-  rather than printing a message]{2.3}
+  \versionchanged[\exception{Error} is raised if any exceptions occur during
+                  copying, rather than printing a message]{2.3}
 
   \versionchanged[Create intermediate directories needed to create \var{dst},
-  rather than raising an error. Copy permissions and times of directories using
-  \function{copystat()}]{2.5}
+                  rather than raising an error. Copy permissions and times of
+		  directories using \function{copystat()}]{2.5}
 
 \end{funcdesc}
 
diff --git a/Doc/lib/libsignal.tex b/Doc/lib/libsignal.tex
index f168b6d..cfdb4dd 100644
--- a/Doc/lib/libsignal.tex
+++ b/Doc/lib/libsignal.tex
@@ -100,7 +100,7 @@
   Any previously scheduled alarm is canceled (only one alarm can
   be scheduled at any time).  The returned value is then the number of
   seconds before any previously set alarm was to have been delivered.
-  If \var{time} is zero, no alarm id scheduled, and any scheduled
+  If \var{time} is zero, no alarm is scheduled, and any scheduled
   alarm is canceled.  The return value is the number of seconds
   remaining before a previously scheduled alarm.  If the return value
   is zero, no alarm is currently scheduled.  (See the \UNIX{} man page
diff --git a/Doc/lib/libsocket.tex b/Doc/lib/libsocket.tex
index cc7bd75..c7b656d 100644
--- a/Doc/lib/libsocket.tex
+++ b/Doc/lib/libsocket.tex
@@ -317,10 +317,11 @@
 \end{funcdesc}
 
 \begin{funcdesc}{fromfd}{fd, family, type\optional{, proto}}
-Build a socket object from an existing file descriptor (an integer as
-returned by a file object's \method{fileno()} method).  Address family,
-socket type and protocol number are as for the \function{socket()} function
-above.  The file descriptor should refer to a socket, but this is not
+Duplicate the file descriptor \var{fd} (an integer as returned by a file
+object's \method{fileno()} method) and build a socket object from the
+result.  Address family, socket type and protocol number are as for the
+\function{socket()} function above.
+The file descriptor should refer to a socket, but this is not
 checked --- subsequent operations on the object may fail if the file
 descriptor is invalid.  This function is rarely needed, but can be
 used to get or set socket options on a socket passed to a program as
@@ -626,7 +627,7 @@
 \end{methoddesc}
 
 \begin{methoddesc}[socket]{gettimeout}{}
-Returns the timeout in floating seconds associated with socket
+Return the timeout in floating seconds associated with socket
 operations, or \code{None} if no timeout is set.  This reflects
 the last call to \method{setblocking()} or \method{settimeout()}.
 \versionadded{2.3}
@@ -677,6 +678,25 @@
 instead.
 
 
+Socket objects also have these (read-only) attributes that correspond
+to the values given to the \class{socket} constructor.
+
+\begin{memberdesc}[socket]{family}
+The socket family.
+\versionadded{2.5}
+\end{memberdesc}
+
+\begin{memberdesc}[socket]{type}
+The socket type.
+\versionadded{2.5}
+\end{memberdesc}
+
+\begin{memberdesc}[socket]{proto}
+The socket protocol.
+\versionadded{2.5}
+\end{memberdesc}
+
+
 \subsection{SSL Objects \label{ssl-objects}}
 
 SSL objects have the following methods.
diff --git a/Doc/lib/libstdtypes.tex b/Doc/lib/libstdtypes.tex
index d2a0425..8d011fd 100644
--- a/Doc/lib/libstdtypes.tex
+++ b/Doc/lib/libstdtypes.tex
@@ -185,10 +185,12 @@
 In addition, Booleans are a subtype of plain integers.
 Plain integers (also just called \dfn{integers})
 are implemented using \ctype{long} in C, which gives them at least 32
-bits of precision.  Long integers have unlimited precision.  Floating
-point numbers are implemented using \ctype{double} in C.  All bets on
-their precision are off unless you happen to know the machine you are
-working with.
+bits of precision (\code{sys.maxint} is always set to the maximum
+plain integer value for the current platform, the minimum value is 
+\code{-sys.maxint - 1}).  Long integers have unlimited precision.
+Floating point numbers are implemented using \ctype{double} in C.
+All bets on their precision are off unless you happen to know the
+machine you are working with.
 \obindex{numeric}
 \obindex{Boolean}
 \obindex{integer}
@@ -249,6 +251,7 @@
   \hline
   \lineiii{\var{x} * \var{y}}{product of \var{x} and \var{y}}{}
   \lineiii{\var{x} / \var{y}}{quotient of \var{x} and \var{y}}{(1)}
+  \lineiii{\var{x} // \var{y}}{(floored) quotient of \var{x} and \var{y}}{(5)}
   \lineiii{\var{x} \%{} \var{y}}{remainder of \code{\var{x} / \var{y}}}{(4)}
   \hline
   \lineiii{-\var{x}}{\var{x} negated}{}
@@ -299,6 +302,9 @@
 \deprecated{2.3}{Instead convert to float using \function{abs()}
 if appropriate.}
 
+\item[(5)]
+Also referred to as integer division.  The resultant value is a whole integer,
+though the result's type is not necessarily int.
 \end{description}
 % XXXJH exceptions: overflow (when? what operations?) zerodivision
 
@@ -1278,7 +1284,8 @@
   \lineiii{\var{s}.add(\var{x})}{}
          {add element \var{x} to set \var{s}}
   \lineiii{\var{s}.remove(\var{x})}{}
-         {remove \var{x} from set \var{s}; raises KeyError if not present}
+         {remove \var{x} from set \var{s}; raises \exception{KeyError}
+	  if not present}
   \lineiii{\var{s}.discard(\var{x})}{}
          {removes \var{x} from set \var{s} if present}
   \lineiii{\var{s}.pop()}{}
@@ -1495,6 +1502,38 @@
   Any operation which requires that the file be open will raise a
   \exception{ValueError} after the file has been closed.  Calling
   \method{close()} more than once is allowed.
+
+  As of Python 2.5, you can avoid having to call this method explicitly
+  if you use the \keyword{with} statement.  For example, the following
+  code will automatically close \code{f} when the \keyword{with} block
+  is exited:
+
+\begin{verbatim}
+from __future__ import with_statement
+
+with open("hello.txt") as f:
+    for line in f:
+        print line
+\end{verbatim}
+
+  In older versions of Python, you would have needed to do this to get
+  the same effect:
+
+\begin{verbatim}
+f = open("hello.txt")
+try:
+    for line in f:
+        print line
+finally:
+    f.close()
+\end{verbatim}
+
+  \note{Not all ``file-like'' types in Python support use as a context
+  manager for the \keyword{with} statement.  If your code is intended to
+  work with any file-like object, you can use the \function{closing()}
+  function in the \module{contextlib} module instead of using the object
+  directly.  See section~\ref{context-closing} for details.}
+  
 \end{methoddesc}
 
 \begin{methoddesc}[file]{flush}{}
@@ -1783,14 +1822,14 @@
 attribute will be \code{None} and if called, an explicit \code{self}
 object must be passed as the first argument.  In this case,
 \code{self} must be an instance of the unbound method's class (or a
-subclass of that class), otherwise a \code{TypeError} is raised.
+subclass of that class), otherwise a \exception{TypeError} is raised.
 
 Like function objects, methods objects support getting
 arbitrary attributes.  However, since method attributes are actually
 stored on the underlying function object (\code{meth.im_func}),
 setting method attributes on either bound or unbound methods is
 disallowed.  Attempting to set a method attribute results in a
-\code{TypeError} being raised.  In order to set a method attribute,
+\exception{TypeError} being raised.  In order to set a method attribute,
 you need to explicitly set it on the underlying function object:
 
 \begin{verbatim}
diff --git a/Doc/lib/libsubprocess.tex b/Doc/lib/libsubprocess.tex
index f48b29b..4417797 100644
--- a/Doc/lib/libsubprocess.tex
+++ b/Doc/lib/libsubprocess.tex
@@ -135,8 +135,8 @@
 
 \begin{funcdesc}{check_call}{*popenargs, **kwargs}
 Run command with arguments.  Wait for command to complete. If the exit
-code was zero then return, otherwise raise CalledProcessError.  The
-CalledProcessError object will have the return code in the
+code was zero then return, otherwise raise \exception{CalledProcessError.}
+The \exception{CalledProcessError} object will have the return code in the
 \member{errno} attribute.
 
 The arguments are the same as for the Popen constructor.  Example:
diff --git a/Doc/lib/libthread.tex b/Doc/lib/libthread.tex
index 4914948..9e0c202 100644
--- a/Doc/lib/libthread.tex
+++ b/Doc/lib/libthread.tex
@@ -100,6 +100,19 @@
 some thread, \code{False} if not.
 \end{methoddesc}
 
+In addition to these methods, lock objects can also be used via the
+\keyword{with} statement, e.g.:
+
+\begin{verbatim}
+from __future__ import with_statement
+import thread
+
+a_lock = thread.allocate_lock()
+
+with a_lock:
+    print "a_lock is locked while this executes"
+\end{verbatim}
+
 \strong{Caveats:}
 
 \begin{itemize}
diff --git a/Doc/lib/libthreading.tex b/Doc/lib/libthreading.tex
index 33839a4..8fb3137 100644
--- a/Doc/lib/libthreading.tex
+++ b/Doc/lib/libthreading.tex
@@ -675,3 +675,26 @@
 Stop the timer, and cancel the execution of the timer's action.  This
 will only work if the timer is still in its waiting stage.
 \end{methoddesc}
+
+\subsection{Using locks, conditions, and semaphores in the \keyword{with}
+statement \label{with-locks}}
+
+All of the objects provided by this module that have \method{acquire()} and
+\method{release()} methods can be used as context managers for a \keyword{with}
+statement.  The \method{acquire()} method will be called when the block is
+entered, and \method{release()} will be called when the block is exited.
+
+Currently, \class{Lock}, \class{RLock}, \class{Condition}, \class{Semaphore},
+and \class{BoundedSemaphore} objects may be used as \keyword{with}
+statement context managers.  For example:
+
+\begin{verbatim}
+from __future__ import with_statement
+import threading
+
+some_rlock = threading.RLock()
+
+with some_rlock:
+    print "some_rlock is locked while this executes"
+\end{verbatim}
+
diff --git a/Doc/lib/libundoc.tex b/Doc/lib/libundoc.tex
index 6cef183..df78152 100644
--- a/Doc/lib/libundoc.tex
+++ b/Doc/lib/libundoc.tex
@@ -42,15 +42,15 @@
 \begin{description}
 \item[\module{ntpath}]
 --- Implementation of \module{os.path} on Win32, Win64, WinCE, and
-OS/2 platforms.
+    OS/2 platforms.
 
 \item[\module{posixpath}]
 --- Implementation of \module{os.path} on \POSIX.
 
 \item[\module{bsddb185}]
 --- Backwards compatibility module for systems which still use the Berkeley
-DB 1.85 module.  It is normally only available on certain BSD Unix-based
-systems.  It should never be used directly.
+    DB 1.85 module.  It is normally only available on certain BSD Unix-based
+    systems.  It should never be used directly.
 \end{description}
 
 
@@ -62,14 +62,14 @@
 
 \item[\module{linuxaudiodev}]
 --- Play audio data on the Linux audio device.  Replaced in Python 2.3
-by the \module{ossaudiodev} module.
+    by the \module{ossaudiodev} module.
 
 \item[\module{sunaudio}]
 --- Interpret Sun audio headers (may become obsolete or a tool/demo).
 
 \item[\module{toaiff}]
 --- Convert "arbitrary" sound files to AIFF files; should probably
-become a tool or demo.  Requires the external program \program{sox}.
+    become a tool or demo.  Requires the external program \program{sox}.
 \end{description}
 
 
@@ -78,12 +78,13 @@
 These modules are not normally available for import; additional work
 must be done to make them available.
 
-Those which are written in Python will be installed into the directory 
-\file{lib-old/} installed as part of the standard library.  To use
-these, the directory must be added to \code{sys.path}, possibly using
-\envvar{PYTHONPATH}.
+%%% lib-old is empty as of Python 2.5
+% Those which are written in Python will be installed into the directory 
+% \file{lib-old/} installed as part of the standard library.  To use
+% these, the directory must be added to \code{sys.path}, possibly using
+% \envvar{PYTHONPATH}.
 
-Obsolete extension modules written in C are not built by default.
+These extension modules written in C are not built by default.
 Under \UNIX, these must be enabled by uncommenting the appropriate
 lines in \file{Modules/Setup} in the build tree and either rebuilding
 Python if the modules are statically linked, or building and
@@ -92,122 +93,11 @@
 % XXX need Windows instructions!
 
 \begin{description}
-\item[\module{addpack}]
---- Alternate approach to packages.  Use the built-in package support
-instead.
-
-\item[\module{cmp}]
---- File comparison function.  Use the newer \refmodule{filecmp} instead.
-
-\item[\module{cmpcache}]
---- Caching version of the obsolete \module{cmp} module.  Use the
-newer \refmodule{filecmp} instead.
-
-\item[\module{codehack}]
---- Extract function name or line number from a function
-code object (these are now accessible as attributes:
-\member{co.co_name}, \member{func.func_name},
-\member{co.co_firstlineno}).
-
-\item[\module{dircmp}]
---- Class to build directory diff tools on (may become a demo or tool).
-\deprecated{2.0}{The \refmodule{filecmp} module replaces
-\module{dircmp}.}
-
-\item[\module{dump}]
---- Print python code that reconstructs a variable.
-
-\item[\module{fmt}]
---- Text formatting abstractions (too slow).
-
-\item[\module{lockfile}]
---- Wrapper around FCNTL file locking (use
-\function{fcntl.lockf()}/\function{flock()} instead; see \refmodule{fcntl}).
-
-\item[\module{newdir}]
---- New \function{dir()} function (the standard \function{dir()} is
-now just as good).
-
-\item[\module{Para}]
---- Helper for \module{fmt}.
-
-\item[\module{poly}]
---- Polynomials.
-
-\item[\module{rand}]
---- Old interface to the random number generator.
-
-\item[\module{regex}]
---- Emacs-style regular expression support; may still be used in some
-old code (extension module).  Refer to the
-\citetitle[http://www.python.org/doc/1.6/lib/module-regex.html]{Python
-1.6 Documentation} for documentation.
-
-\item[\module{regsub}]
---- Regular expression based string replacement utilities, for use
-with \module{regex} (extension module).  Refer to the
-\citetitle[http://www.python.org/doc/1.6/lib/module-regsub.html]{Python
-1.6 Documentation} for documentation.
-
-\item[\module{statcache}]
---- Caches the results of os.stat().  Using the cache can be fragile
-and error-prone, just use \code{os.stat()} directly.
-
-\item[\module{tb}]
---- Print tracebacks, with a dump of local variables (use
-\function{pdb.pm()} or \refmodule{traceback} instead).
-
 \item[\module{timing}]
---- Measure time intervals to high resolution (use
-\function{time.clock()} instead).  (This is an extension module.)
-
-\item[\module{tzparse}]
---- Parse a timezone specification (unfinished; may disappear in the
-future, and does not work when the \envvar{TZ} environment variable is
-not set).
-
-\item[\module{util}]
---- Useful functions that don't fit elsewhere.
-
-\item[\module{whatsound}]
---- Recognize sound files; use \refmodule{sndhdr} instead.
-
-\item[\module{whrandom}]
---- Old random number generator.  Use \module{random} instead.
-
-\item[\module{zmod}]
---- Compute properties of mathematical ``fields.''
+--- Measure time intervals to high resolution (use \function{time.clock()}
+    instead).
 \end{description}
 
-
-The following modules are obsolete, but are likely to re-surface as
-tools or scripts:
-
-\begin{description}
-\item[\module{find}]
---- Find files matching pattern in directory tree.
-
-\item[\module{grep}]
---- \program{grep} implementation in Python.
-
-\item[\module{packmail}]
---- Create a self-unpacking \UNIX{} shell archive.
-\end{description}
-
-
-The following modules were documented in previous versions of this
-manual, but are now considered obsolete.  The source for the
-documentation is still available as part of the documentation source
-archive.
-
-\begin{description}
-\item[\module{ni}]
---- Import modules in ``packages.''  Basic package support is now
-built in.  The built-in support is very similar to what is provided in
-this module.
-\end{description}
-
-
 \section{SGI-specific Extension modules}
 
 The following are SGI specific, and may be out of touch with the
@@ -219,5 +109,5 @@
 
 \item[\module{sv}]
 --- Interface to the ``simple video'' board on SGI Indigo
-(obsolete hardware).
+    (obsolete hardware).
 \end{description}
diff --git a/Doc/lib/liburllib2.tex b/Doc/lib/liburllib2.tex
index 706c54b..e0c4568 100644
--- a/Doc/lib/liburllib2.tex
+++ b/Doc/lib/liburllib2.tex
@@ -384,7 +384,7 @@
     \method{\var{protocol}_open()} are called to handle the request.
     This stage ends when a handler either returns a
     non-\constant{None} value (ie. a response), or raises an exception
-    (usually URLError).  Exceptions are allowed to propagate.
+    (usually \exception{URLError}).  Exceptions are allowed to propagate.
 
     In fact, the above algorithm is first tried for methods named
     \method{default_open}.  If all such methods return
diff --git a/Doc/lib/liburlparse.tex b/Doc/lib/liburlparse.tex
index 8f80d6b..f18efe9 100644
--- a/Doc/lib/liburlparse.tex
+++ b/Doc/lib/liburlparse.tex
@@ -23,50 +23,76 @@
 \code{file}, \code{ftp}, \code{gopher}, \code{hdl}, \code{http}, 
 \code{https}, \code{imap}, \code{mailto}, \code{mms}, \code{news}, 
 \code{nntp}, \code{prospero}, \code{rsync}, \code{rtsp}, \code{rtspu}, 
-\code{sftp}, \code{shttp}, \code{sip}, \code{snews}, \code{svn}, 
+\code{sftp}, \code{shttp}, \code{sip}, \code{sips}, \code{snews}, \code{svn}, 
 \code{svn+ssh}, \code{telnet}, \code{wais}.
-\versionadded[Support for the \code{sftp} scheme]{2.5}
+
+\versionadded[Support for the \code{sftp} and \code{sips} schemes]{2.5}
 
 The \module{urlparse} module defines the following functions:
 
-\begin{funcdesc}{urlparse}{urlstring\optional{, default_scheme\optional{, allow_fragments}}}
-Parse a URL into 6 components, returning a 6-tuple: (addressing
-scheme, network location, path, parameters, query, fragment
-identifier).  This corresponds to the general structure of a URL:
+\begin{funcdesc}{urlparse}{urlstring\optional{,
+                           default_scheme\optional{, allow_fragments}}}
+Parse a URL into six components, returning a 6-tuple.  This
+corresponds to the general structure of a URL:
 \code{\var{scheme}://\var{netloc}/\var{path};\var{parameters}?\var{query}\#\var{fragment}}.
 Each tuple item is a string, possibly empty.
-The components are not broken up in smaller parts (e.g. the network
+The components are not broken up in smaller parts (for example, the network
 location is a single string), and \% escapes are not expanded.
-The delimiters as shown above are not part of the tuple items,
+The delimiters as shown above are not part of the result,
 except for a leading slash in the \var{path} component, which is
-retained if present.
-
-Example:
+retained if present.  For example:
 
 \begin{verbatim}
-urlparse('http://www.cwi.nl:80/%7Eguido/Python.html')
-\end{verbatim}
-
-yields the tuple
-
-\begin{verbatim}
+>>> from urlparse import urlparse
+>>> o = urlparse('http://www.cwi.nl:80/%7Eguido/Python.html')
+>>> o
 ('http', 'www.cwi.nl:80', '/%7Eguido/Python.html', '', '', '')
+>>> o.scheme
+'http'
+>>> o.port
+80
+>>> o.geturl()
+'http://www.cwi.nl:80/%7Eguido/Python.html'
 \end{verbatim}
 
 If the \var{default_scheme} argument is specified, it gives the
-default addressing scheme, to be used only if the URL string does not
+default addressing scheme, to be used only if the URL does not
 specify one.  The default value for this argument is the empty string.
 
-If the \var{allow_fragments} argument is zero, fragment identifiers
+If the \var{allow_fragments} argument is false, fragment identifiers
 are not allowed, even if the URL's addressing scheme normally does
-support them.  The default value for this argument is \code{1}.
+support them.  The default value for this argument is \constant{True}.
+
+The return value is actually an instance of a subclass of
+\pytype{tuple}.  This class has the following additional read-only
+convenience attributes:
+
+\begin{tableiv}{l|c|l|c}{member}{Attribute}{Index}{Value}{Value if not present}
+  \lineiv{scheme}  {0} {URL scheme specifier}             {empty string}
+  \lineiv{netloc}  {1} {Network location part}            {empty string}
+  \lineiv{path}    {2} {Hierarchical path}                {empty string}
+  \lineiv{params}  {3} {Parameters for last path element} {empty string}
+  \lineiv{query}   {4} {Query component}                  {empty string}
+  \lineiv{fragment}{5} {Fragment identifier}              {empty string}
+  \lineiv{username}{ } {User name}                        {\constant{None}}
+  \lineiv{password}{ } {Password}                         {\constant{None}}
+  \lineiv{hostname}{ } {Host name (lower case)}           {\constant{None}}
+  \lineiv{port}    { } {Port number as integer, if present} {\constant{None}}
+\end{tableiv}
+
+See section~\ref{urlparse-result-object}, ``Results of
+\function{urlparse()} and \function{urlsplit()},'' for more
+information on the result object.
+
+\versionchanged[Added attributes to return value]{2.5}
 \end{funcdesc}
 
-\begin{funcdesc}{urlunparse}{tuple}
-Construct a URL string from a tuple as returned by \code{urlparse()}.
+\begin{funcdesc}{urlunparse}{parts}
+Construct a URL from a tuple as returned by \code{urlparse()}.
+The \var{parts} argument be any six-item iterable.
 This may result in a slightly different, but equivalent URL, if the
-URL that was parsed originally had redundant delimiters, e.g. a ? with
-an empty query (the draft states that these are equivalent).
+URL that was parsed originally had unnecessary delimiters (for example,
+a ? with an empty query; the RFC states that these are equivalent).
 \end{funcdesc}
 
 \begin{funcdesc}{urlsplit}{urlstring\optional{,
@@ -79,12 +105,38 @@
 separate the path segments and parameters.  This function returns a
 5-tuple: (addressing scheme, network location, path, query, fragment
 identifier).
+
+The return value is actually an instance of a subclass of
+\pytype{tuple}.  This class has the following additional read-only
+convenience attributes:
+
+\begin{tableiv}{l|c|l|c}{member}{Attribute}{Index}{Value}{Value if not present}
+  \lineiv{scheme}   {0} {URL scheme specifier}   {empty string}
+  \lineiv{netloc}   {1} {Network location part}  {empty string}
+  \lineiv{path}     {2} {Hierarchical path}      {empty string}
+  \lineiv{query}    {3} {Query component}        {empty string}
+  \lineiv{fragment} {4} {Fragment identifier}    {empty string}
+  \lineiv{username} { } {User name}              {\constant{None}}
+  \lineiv{password} { } {Password}               {\constant{None}}
+  \lineiv{hostname} { } {Host name (lower case)} {\constant{None}}
+  \lineiv{port}     { } {Port number as integer, if present} {\constant{None}}
+\end{tableiv}
+
+See section~\ref{urlparse-result-object}, ``Results of
+\function{urlparse()} and \function{urlsplit()},'' for more
+information on the result object.
+
 \versionadded{2.2}
+\versionchanged[Added attributes to return value]{2.5}
 \end{funcdesc}
 
-\begin{funcdesc}{urlunsplit}{tuple}
+\begin{funcdesc}{urlunsplit}{parts}
 Combine the elements of a tuple as returned by \function{urlsplit()}
 into a complete URL as a string.
+The \var{parts} argument be any five-item iterable.
+This may result in a slightly different, but equivalent URL, if the
+URL that was parsed originally had unnecessary delimiters (for example,
+a ? with an empty query; the RFC states that these are equivalent).
 \versionadded{2.2}
 \end{funcdesc}
 
@@ -93,22 +145,16 @@
 (\var{base}) with a ``relative URL'' (\var{url}).  Informally, this
 uses components of the base URL, in particular the addressing scheme,
 the network location and (part of) the path, to provide missing
-components in the relative URL.
-
-Example:
+components in the relative URL.  For example:
 
 \begin{verbatim}
-urljoin('http://www.cwi.nl/%7Eguido/Python.html', 'FAQ.html')
-\end{verbatim}
-
-yields the string
-
-\begin{verbatim}
+>>> from urlparse import urljoin
+>>> urljoin('http://www.cwi.nl/%7Eguido/Python.html', 'FAQ.html')
 'http://www.cwi.nl/%7Eguido/FAQ.html'
 \end{verbatim}
 
-The \var{allow_fragments} argument has the same meaning as for
-\code{urlparse()}.
+The \var{allow_fragments} argument has the same meaning and default as
+for \function{urlparse()}.
 \end{funcdesc}
 
 \begin{funcdesc}{urldefrag}{url}
@@ -133,3 +179,61 @@
         both Uniform Resource Names (URNs) and Uniform Resource
         Locators (URLs).}
 \end{seealso}
+
+
+\subsection{Results of \function{urlparse()} and \function{urlsplit()}
+            \label{urlparse-result-object}}
+
+The result objects from the \function{urlparse()} and
+\function{urlsplit()} functions are subclasses of the \pytype{tuple}
+type.  These subclasses add the attributes described in those
+functions, as well as provide an additional method:
+
+\begin{methoddesc}[ParseResult]{geturl}{}
+  Return the re-combined version of the original URL as a string.
+  This may differ from the original URL in that the scheme will always
+  be normalized to lower case and empty components may be dropped.
+  Specifically, empty parameters, queries, and fragment identifiers
+  will be removed.
+
+  The result of this method is a fixpoint if passed back through the
+  original parsing function:
+
+\begin{verbatim}
+>>> import urlparse
+>>> url = 'HTTP://www.Python.org/doc/#'
+
+>>> r1 = urlparse.urlsplit(url)
+>>> r1.geturl()
+'http://www.Python.org/doc/'
+
+>>> r2 = urlparse.urlsplit(r1.geturl())
+>>> r2.geturl()
+'http://www.Python.org/doc/'
+\end{verbatim}
+
+\versionadded{2.5}
+\end{methoddesc}
+
+The following classes provide the implementations of the parse results::
+
+\begin{classdesc*}{BaseResult}
+  Base class for the concrete result classes.  This provides most of
+  the attribute definitions.  It does not provide a \method{geturl()}
+  method.  It is derived from \class{tuple}, but does not override the
+  \method{__init__()} or \method{__new__()} methods.
+\end{classdesc*}
+
+
+\begin{classdesc}{ParseResult}{scheme, netloc, path, params, query, fragment}
+  Concrete class for \function{urlparse()} results.  The
+  \method{__new__()} method is overridden to support checking that the
+  right number of arguments are passed.
+\end{classdesc}
+
+
+\begin{classdesc}{SplitResult}{scheme, netloc, path, query, fragment}
+  Concrete class for \function{urlsplit()} results.  The
+  \method{__new__()} method is overridden to support checking that the
+  right number of arguments are passed.
+\end{classdesc}
diff --git a/Doc/lib/libwarnings.tex b/Doc/lib/libwarnings.tex
index 8655451..7b829a0 100644
--- a/Doc/lib/libwarnings.tex
+++ b/Doc/lib/libwarnings.tex
@@ -169,7 +169,8 @@
 \end{funcdesc}
 
 \begin{funcdesc}{warn_explicit}{message, category, filename,
- lineno\optional{, module\optional{, registry}}}
+ lineno\optional{, module\optional{, registry\optional{,
+ module_globals}}}}
 This is a low-level interface to the functionality of
 \function{warn()}, passing in explicitly the message, category,
 filename and line number, and optionally the module name and the
@@ -179,6 +180,11 @@
 \var{message} must be a string and \var{category} a subclass of
 \exception{Warning} or \var{message} may be a \exception{Warning} instance,
 in which case \var{category} will be ignored.
+
+\var{module_globals}, if supplied, should be the global namespace in use
+by the code for which the warning is issued.  (This argument is used to
+support displaying source for modules found in zipfiles or other
+non-filesystem import sources, and was added in Python 2.5.)
 \end{funcdesc}
 
 \begin{funcdesc}{showwarning}{message, category, filename,
diff --git a/Doc/lib/libxmlrpclib.tex b/Doc/lib/libxmlrpclib.tex
index 0fb88c5..1c36f99 100644
--- a/Doc/lib/libxmlrpclib.tex
+++ b/Doc/lib/libxmlrpclib.tex
@@ -203,7 +203,7 @@
 
 \subsection{Binary Objects \label{binary-objects}}
 
-This class may initialized from string data (which may include NULs).
+This class may be initialized from string data (which may include NULs).
 The primary access to the content of a \class{Binary} object is
 provided by an attribute:
 
@@ -303,10 +303,6 @@
 \code{True} or \code{False}.
 \end{funcdesc}
 
-\begin{funcdesc}{binary}{data}
-Trivially convert any Python string to a \class{Binary} object.
-\end{funcdesc}
-
 \begin{funcdesc}{dumps}{params\optional{, methodname\optional{, 
 	                methodresponse\optional{, encoding\optional{,
 	                allow_none}}}}}
diff --git a/Doc/lib/libzipfile.tex b/Doc/lib/libzipfile.tex
index 32ca3e0..4e06ef6 100644
--- a/Doc/lib/libzipfile.tex
+++ b/Doc/lib/libzipfile.tex
@@ -141,10 +141,17 @@
   Write the file named \var{filename} to the archive, giving it the
   archive name \var{arcname} (by default, this will be the same as
   \var{filename}, but without a drive letter and with leading path
-  separators removed).  If given, \var{compress_type} overrides the value
-  given for the \var{compression} parameter to the constructor for
-  the new entry.  The archive must be open with mode \code{'w'} or
-  \code{'a'}.
+  separators removed).  If given, \var{compress_type} overrides the
+  value given for the \var{compression} parameter to the constructor
+  for the new entry.  The archive must be open with mode \code{'w'}
+  or \code{'a'}.
+  
+  \note{There is no official file name encoding for ZIP files.
+  If you have unicode file names, please convert them to byte strings
+  in your desired encoding before passing them to \method{write()}.
+  WinZip interprets all file names as encoded in CP437, also known
+  as DOS Latin.}
+
   \note{Archive names should be relative to the archive root, that is,
         they should not start with a path separator.}
 \end{methoddesc}
diff --git a/Doc/lib/libzipimport.tex b/Doc/lib/libzipimport.tex
index 0a60b29..770ea21 100644
--- a/Doc/lib/libzipimport.tex
+++ b/Doc/lib/libzipimport.tex
@@ -69,8 +69,8 @@
 
 \begin{classdesc}{zipimporter}{archivepath} 
   Create a new zipimporter instance. \var{archivepath} must be a path to
-  a zipfile.  \class{ZipImportError} is raised if \var{archivepath} doesn't
-  point to a valid ZIP archive.
+  a zipfile.  \exception{ZipImportError} is raised if \var{archivepath}
+  doesn't point to a valid ZIP archive.
 \end{classdesc}
 
 \begin{methoddesc}{find_module}{fullname\optional{, path}}
@@ -83,7 +83,7 @@
 
 \begin{methoddesc}{get_code}{fullname}
   Return the code object for the specified module. Raise
-  \class{ZipImportError} if the module couldn't be found.
+  \exception{ZipImportError} if the module couldn't be found.
 \end{methoddesc}
 
 \begin{methoddesc}{get_data}{pathname}
@@ -93,20 +93,20 @@
 
 \begin{methoddesc}{get_source}{fullname}
   Return the source code for the specified module. Raise
-  \class{ZipImportError} if the module couldn't be found, return
+  \exception{ZipImportError} if the module couldn't be found, return
   \constant{None} if the archive does contain the module, but has
   no source for it.
 \end{methoddesc}
 
 \begin{methoddesc}{is_package}{fullname}
   Return True if the module specified by \var{fullname} is a package.
-  Raise \class{ZipImportError} if the module couldn't be found.
+  Raise \exception{ZipImportError} if the module couldn't be found.
 \end{methoddesc}
 
 \begin{methoddesc}{load_module}{fullname}
   Load the module specified by \var{fullname}. \var{fullname} must be the
   fully qualified (dotted) module name. It returns the imported
-  module, or raises \class{ZipImportError} if it wasn't found.
+  module, or raises \exception{ZipImportError} if it wasn't found.
 \end{methoddesc}
 
 \subsection{Examples}
diff --git a/Doc/lib/libzlib.tex b/Doc/lib/libzlib.tex
index 2df8b85..dfbb43d 100644
--- a/Doc/lib/libzlib.tex
+++ b/Doc/lib/libzlib.tex
@@ -166,11 +166,14 @@
 decompressed, and \member{unconsumed_tail} is an empty string.
 \end{methoddesc}
 
-\begin{methoddesc}[Decompress]{flush}{}
+\begin{methoddesc}[Decompress]{flush}{\optional{length}}
 All pending input is processed, and a string containing the remaining
 uncompressed output is returned.  After calling \method{flush()}, the
 \method{decompress()} method cannot be called again; the only realistic
 action is to delete the object.
+
+The optional parameter \var{length} sets the initial size of the
+output buffer.
 \end{methoddesc}
 
 \begin{seealso}
diff --git a/Doc/lib/mimelib.tex b/Doc/lib/mimelib.tex
index 67de597..491d844 100644
--- a/Doc/lib/mimelib.tex
+++ b/Doc/lib/mimelib.tex
@@ -12,9 +12,9 @@
 \authoraddress{\email{barry@python.org}}
 
 \date{\today}
-\release{3.0}			% software release, not documentation
+\release{4.0}			% software release, not documentation
 \setreleaseinfo{}		% empty for final release
-\setshortversion{3.0}		% major.minor only for software
+\setshortversion{4.0}		% major.minor only for software
 
 \begin{document}
 
@@ -38,11 +38,11 @@
 parse, generate, and modify email messages, conforming to all the
 relevant email and MIME related RFCs.
 
-This document describes version 3.0 of the \module{email} package, which is
-distributed with Python 2.4 and is available as a standalone distutils-based
-package for use with Python 2.3.  \module{email} 3.0 is not compatible with
-Python versions earlier than 2.3.  For more information about the
-\module{email} package, including download links and mailing lists, see
+This document describes version 4.0 of the \module{email} package, which is
+distributed with Python 2.5 and is available as a standalone distutils-based
+package for use with earlier Python versions.  \module{email} 4.0 is not
+compatible with Python versions earlier than 2.3.  For more information about
+the \module{email} package, including download links and mailing lists, see
 \ulink{Python's email SIG}{http://www.python.org/sigs/email-sig}.
 
 The documentation that follows was written for the Python project, so
@@ -51,7 +51,8 @@
 
 \begin{itemize}
 \item Deprecation and ``version added'' notes are relative to the
-      Python version a feature was added or deprecated.
+      Python version a feature was added or deprecated.  See
+      the package history in section \ref{email-pkg-history} for details.
 
 \item If you're reading this documentation as part of the
       standalone \module{email} package, some of the internal links to
diff --git a/Doc/lib/xmldomminidom.tex b/Doc/lib/xmldomminidom.tex
index f7657eb..093915f 100644
--- a/Doc/lib/xmldomminidom.tex
+++ b/Doc/lib/xmldomminidom.tex
@@ -165,7 +165,7 @@
 
 With an explicit \var{encoding} argument, the result is a byte string
 in the specified encoding. It is recommended that this argument is
-always specified. To avoid UnicodeError exceptions in case of
+always specified. To avoid \exception{UnicodeError} exceptions in case of
 unrepresentable text data, the encoding argument should be specified
 as "utf-8".
 
diff --git a/Doc/python-docs.txt b/Doc/python-docs.txt
index 017fece..bf475b6 100644
--- a/Doc/python-docs.txt
+++ b/Doc/python-docs.txt
@@ -180,4 +180,4 @@
       whether it's safe to remove, see the "Why is Python Installed on
       my Computer?" FAQ, found at:
 
-          http://www.python.org/doc/faq/installed.html
+          http://www.python.org/doc/faq/installed/
diff --git a/Doc/ref/ref3.tex b/Doc/ref/ref3.tex
index 737b861..964013f 100644
--- a/Doc/ref/ref3.tex
+++ b/Doc/ref/ref3.tex
@@ -1035,7 +1035,7 @@
 %=========================================================================
 \section{New-style and classic classes}
 
-Classes and instances come in two flavours: old-style or classic, and new-style.  
+Classes and instances come in two flavors: old-style or classic, and new-style.  
 
 Up to Python 2.1, old-style classes were the only flavour available to the
 user.  The concept of (old-style) class is unrelated to the concept of type: if
@@ -1065,10 +1065,14 @@
 implemented before for compatibility concerns, like the method resolution order
 in case of multiple inheritance.
 
-This manuel is not up-to-date with respect to new-style classes.  For now,
+This manual is not up-to-date with respect to new-style classes.  For now,
 please see \url{http://www.python.org/doc/newstyle.html} for more information.
 
-The plan is to eventually drop old-style classes, leaving only the semantics of new-style classes.  This change will probably only be feasible in Python 3.0.
+The plan is to eventually drop old-style classes, leaving only the semantics of
+new-style classes.  This change will probably only be feasible in Python 3.0.
+\index{class}{new-style}
+\index{class}{classic}
+\index{class}{old-style}
 
 %=========================================================================
 \section{Special method names\label{specialnames}}
@@ -2053,14 +2057,15 @@
 \item
 
 Exception to the previous item: if the left operand is an instance of
-a built-in type or a new-style class, and the right operand is an
-instance of a proper subclass of that type or class, the right
-operand's \method{__rop__()} method is tried \emph{before} the left
-operand's \method{__op__()} method.  This is done so that a subclass can
-completely override binary operators.  Otherwise, the left operand's
-__op__ method would always accept the right operand: when an instance
-of a given class is expected, an instance of a subclass of that class
-is always acceptable.
+a built-in type or a new-style class, and the right operand is an instance
+of a proper subclass of that type or class and overrides the base's
+\method{__rop__()} method, the right operand's \method{__rop__()} method
+is tried \emph{before} the left operand's \method{__op__()} method.
+
+This is done so that a subclass can completely override binary operators.
+Otherwise, the left operand's \method{__op__()} method would always
+accept the right operand: when an instance of a given class is expected,
+an instance of a subclass of that class is always acceptable.
 
 \item
 
@@ -2106,3 +2111,63 @@
 \function{coerce()} function.
 
 \end{itemize}
+
+\subsection{Context Managers and Contexts\label{context-managers}}
+
+\versionadded{2.5}
+
+A \dfn{context manager} is an object that manages the entry to, and exit
+from, a \dfn{context} surrounding a block of code.  Context managers are
+normally invoked using the \keyword{with} statement (described in
+section~\ref{with}), but can also be used by directly invoking their
+methods.
+\stindex{with}
+\index{context manager}
+\index{context}
+
+Typical uses of context managers include saving and restoring various
+kinds of global state, locking and unlocking resources, closing opened
+files, etc.
+
+\begin{methoddesc}[context manager]{__context__}{self}
+Invoked when the object is used as the context expression of a
+\keyword{with} statement.  The return value must implement
+\method{__enter__()} and \method{__exit__()} methods.  Simple context
+managers that wish to directly
+implement \method{__enter__()} and \method{__exit__()} should just
+return \var{self}.
+
+Context managers written in Python can also implement this method using
+a generator function decorated with the
+\function{contextlib.contextmanager} decorator, as this can be simpler
+than writing individual \method{__enter__()} and \method{__exit__()}
+methods when the state to be managed is complex.
+\end{methoddesc}
+
+\begin{methoddesc}[context]{__enter__}{self}
+Enter the context defined by this object. The \keyword{with} statement
+will bind this method's return value to the target(s) specified in the
+\keyword{as} clause of the statement, if any.
+\end{methoddesc}
+
+\begin{methoddesc}[context]{__exit__}{exc_type, exc_value, traceback}
+Exit the context defined by this object. The parameters describe the
+exception that caused the context to be exited. If the context was
+exited without an exception, all three arguments will be
+\constant{None}.
+
+If an exception is supplied, and the method wishes to suppress the
+exception (i.e., prevent it from being propagated), it should return a
+true value. Otherwise, the exception will be processed normally upon
+exit from this method.
+
+Note that \method{__exit__} methods should not reraise the passed-in
+exception; this is the caller's responsibility.
+\end{methoddesc}
+
+\begin{seealso}
+  \seepep{0343}{The "with" statement}
+         {The specification, background, and examples for the
+          Python \keyword{with} statement.}
+\end{seealso}
+
diff --git a/Doc/ref/ref6.tex b/Doc/ref/ref6.tex
index d1d23ac..1eb1258 100644
--- a/Doc/ref/ref6.tex
+++ b/Doc/ref/ref6.tex
@@ -488,11 +488,12 @@
 invoked, the function can proceed exactly as if the \keyword{yield}
 statement were just another external call.
 
-The \keyword{yield} statement is not allowed in the \keyword{try}
-clause of a \keyword{try} ...\ \keyword{finally} construct.  The
-difficulty is that there's no guarantee the generator will ever be
-resumed, hence no guarantee that the \keyword{finally} block will ever
-get executed.
+As of Python version 2.5, the \keyword{yield} statement is now
+allowed in the \keyword{try} clause of a \keyword{try} ...\ 
+\keyword{finally} construct.  If the generator is not resumed before
+it is finalized (by reaching a zero reference count or by being garbage
+collected), the generator-iterator's \method{close()} method will be
+called, allowing any pending \keyword{finally} clauses to execute.
 
 \begin{notice}
 In Python 2.2, the \keyword{yield} statement is only allowed
@@ -510,6 +511,11 @@
   \seepep{0255}{Simple Generators}
          {The proposal for adding generators and the \keyword{yield}
           statement to Python.}
+
+  \seepep{0342}{Coroutines via Enhanced Generators}
+         {The proposal that, among other generator enhancements,
+          proposed allowing \keyword{yield} to appear inside a
+          \keyword{try} ... \keyword{finally} block.}
 \end{seealso}
 
 
diff --git a/Doc/ref/ref7.tex b/Doc/ref/ref7.tex
index 90627a4..a2d46a8 100644
--- a/Doc/ref/ref7.tex
+++ b/Doc/ref/ref7.tex
@@ -46,6 +46,7 @@
   \productioncont{| \token{while_stmt}}
   \productioncont{| \token{for_stmt}}
   \productioncont{| \token{try_stmt}}
+  \productioncont{| \token{with_stmt}}
   \productioncont{| \token{funcdef}}
   \productioncont{| \token{classdef}}
   \production{suite}
@@ -305,8 +306,75 @@
 \section{The \keyword{with} statement\label{with}}
 \stindex{with}
 
-The \keyword{with} statement specifies 
+\versionadded{2.5}
 
+The \keyword{with} statement is used to wrap the execution of a block
+with methods defined by a context manager (see
+section~\ref{context-managers}). This allows common
+\keyword{try}...\keyword{except}...\keyword{finally} usage patterns to
+be encapsulated as context managers for convenient reuse.
+
+\begin{productionlist}
+  \production{with_stmt}
+  {"with" \token{expression} ["as" target_list] ":" \token{suite}}
+\end{productionlist}
+
+The execution of the \keyword{with} statement proceeds as follows:
+
+\begin{enumerate}
+
+\item The expression is evaluated, to obtain a context manager
+object.
+
+\item The context manager's \method{__context__()} method is invoked to
+obtain a context object.
+
+\item The context object's \method{__enter__()} method is invoked.
+
+\item If a target list was included in the \keyword{with}
+statement, the return value from \method{__enter__()} is assigned to it.
+
+\note{The \keyword{with} statement guarantees that if the
+\method{__enter__()} method returns without an error, then
+\method{__exit__()} will always be called. Thus, if an error occurs
+during the assignment to the target list, it will be treated the same as
+an error occurring within the suite would be. See step 6 below.}
+
+\item The suite is executed.
+
+\item The context object's \method{__exit__()} method is invoked. If an
+exception caused the suite to be exited, its type, value, and
+traceback are passed as arguments to \method{__exit__()}. Otherwise,
+three \constant{None} arguments are supplied.
+
+If the suite was exited due to an exception, and the return
+value from the \method{__exit__()} method was false, the exception is
+reraised. If the return value was true, the exception is suppressed, and
+execution continues with the statement following the \keyword{with}
+statement.
+
+If the suite was exited for any reason other than an exception, the
+return value from \method{__exit__()} is ignored, and execution proceeds
+at the normal location for the kind of exit that was taken.
+
+\end{enumerate}
+
+\begin{notice}
+In Python 2.5, the \keyword{with} statement is only allowed
+when the \code{with_statement} feature has been enabled.  It will always
+be enabled in Python 2.6.  This \code{__future__} import statement can
+be used to enable the feature:
+
+\begin{verbatim}
+from __future__ import with_statement
+\end{verbatim}
+\end{notice}
+
+\begin{seealso}
+  \seepep{0343}{The "with" statement}
+         {The specification, background, and examples for the
+          Python \keyword{with} statement.}
+\end{seealso}
 
 \section{Function definitions\label{function}}
 \indexii{function}{definition}
diff --git a/Doc/tools/prechm.py b/Doc/tools/prechm.py
index 7b2f393..57a43fd 100644
--- a/Doc/tools/prechm.py
+++ b/Doc/tools/prechm.py
@@ -150,6 +150,22 @@
 # Library Doc list of books:
 # each 'book' : (Dir, Title, First page, Content page, Index page)
 supported_libraries = {
+    '2.5':
+    [
+        Book('.', 'Main page', 'index'),
+        Book('.', 'Global Module Index', 'modindex'),
+        Book('whatsnew', "What's New", 'index', 'contents'),
+        Book('tut','Tutorial','tut','node2'),
+        Book('lib','Library Reference','lib','contents','genindex'),
+        Book('ref','Language Reference','ref','contents','genindex'),
+        Book('mac','Macintosh Reference','mac','contents','genindex'),
+        Book('ext','Extending and Embedding','ext','contents'),
+        Book('api','Python/C API','api','contents','genindex'),
+        Book('doc','Documenting Python','doc','contents'),
+        Book('inst','Installing Python Modules', 'inst', 'index'),
+        Book('dist','Distributing Python Modules', 'dist', 'index', 'genindex'),
+    ],
+
     '2.4':
     [
         Book('.', 'Main page', 'index'),
diff --git a/Doc/tools/toc2bkm.py b/Doc/tools/toc2bkm.py
index 636459a..ab669ba 100755
--- a/Doc/tools/toc2bkm.py
+++ b/Doc/tools/toc2bkm.py
@@ -44,6 +44,20 @@
 INCLUDED_LEVELS = ("chapter", "section", "subsection", "subsubsection")
 
 
+class BadSectionNesting(Exception):
+    """Raised for unsupported section level transitions."""
+
+    def __init__(self, level, newsection, path, lineno):
+        self.level = level
+        self.newsection = newsection
+        self.path = path
+        self.lineno = lineno
+
+    def __str__(self):
+        return ("illegal transition from %s to %s at %s (line %s)"
+                % (self.level, self.newsection, self.path, self.lineno))
+
+
 def parse_toc(fp, bigpart=None):
     toc = top = []
     stack = [toc]
@@ -65,7 +79,10 @@
                 if stype not in INCLUDED_LEVELS:
                     # we don't want paragraphs & subparagraphs
                     continue
-                direction = _transition_map[(level, stype)]
+                try:
+                    direction = _transition_map[(level, stype)]
+                except KeyError:
+                    raise BadSectionNesting(level, stype, fp.name, lineno)
                 if direction == OUTER_TO_INNER:
                     toc = toc[-1][-1]
                     stack.insert(0, toc)
diff --git a/Doc/tut/tut.tex b/Doc/tut/tut.tex
index 054985b..7f9a7ee 100644
--- a/Doc/tut/tut.tex
+++ b/Doc/tut/tut.tex
@@ -1012,7 +1012,7 @@
 \end{verbatim}
 
 Assignment to slices is also possible, and this can even change the size
-of the list:
+of the list or clear it entirely:
 
 \begin{verbatim}
 >>> # Replace some items:
@@ -1027,9 +1027,14 @@
 ... a[1:1] = ['bletch', 'xyzzy']
 >>> a
 [123, 'bletch', 'xyzzy', 1234]
->>> a[:0] = a     # Insert (a copy of) itself at the beginning
+>>> # Insert (a copy of) itself at the beginning
+>>> a[:0] = a
 >>> a
 [123, 'bletch', 'xyzzy', 1234, 123, 'bletch', 'xyzzy', 1234]
+>>> # Clear the list: replace all items with an empty list
+>>> a[:] = []
+>>> a
+[]
 \end{verbatim}
 
 The built-in function \function{len()} also applies to lists:
@@ -2023,9 +2028,9 @@
 There is a way to remove an item from a list given its index instead
 of its value: the \keyword{del} statement.  This differs from the
 \method{pop()}) method which returns a value.  The \keyword{del}
-statement can also be used to
-remove slices from a list (which we did earlier by assignment of an
-empty list to the slice).  For example:
+statement can also be used to remove slices from a list or clear the
+entire list (which we did earlier by assignment of an empty list to
+the slice).  For example:
 
 \begin{verbatim}
 >>> a = [-1, 1, 66.25, 333, 333, 1234.5]
@@ -2035,6 +2040,9 @@
 >>> del a[2:4]
 >>> a
 [1, 66.25, 1234.5]
+>>> del a[:]
+>>> a
+[]
 \end{verbatim}
 
 \keyword{del} can also be used to delete entire variables:
@@ -3710,19 +3718,49 @@
 KeyboardInterrupt
 \end{verbatim}
 
-A \emph{finally clause} is executed whether or not an exception has
-occurred in the try clause.  When an exception has occurred, it is
-re-raised after the finally clause is executed.  The finally clause is
-also executed ``on the way out'' when the \keyword{try} statement is
-left via a \keyword{break} or \keyword{return} statement.
+A \emph{finally clause} is always executed before leaving the
+\keyword{try} statement, whether an exception has occurred or not.
+When an exception has occurred in the \keyword{try} clause and has not
+been handled by an \keyword{except} clause (or it has occurred in a
+\keyword{except} or \keyword{else} clause), it is re-raised after the
+\keyword{finally} clause has been executed.  The \keyword{finally} clause
+is also executed ``on the way out'' when any other clause of the
+\keyword{try} statement is left via a \keyword{break}, \keyword{continue}
+or \keyword{return} statement.  A more complicated example:
 
-The code in the finally clause is useful for releasing external
-resources (such as files or network connections), regardless of
-whether the use of the resource was successful.
+\begin{verbatim}
+>>> def divide(x, y):
+...     try:
+...         result = x / y
+...     except ZeroDivisionError:
+...         print "division by zero!"
+...     else:
+...         print "result is", result
+...     finally:
+...         print "executing finally clause"
+...
+>>> divide(2, 1)
+result is 2
+executing finally clause
+>>> divide(2, 0)
+division by zero!
+executing finally clause
+>>> divide("2", "1")
+executing finally clause
+Traceback (most recent call last):
+  File "<stdin>", line 1, in ?
+  File "<stdin>", line 3, in divide
+TypeError: unsupported operand type(s) for /: 'str' and 'str'
+\end{verbatim}
 
-A \keyword{try} statement must either have one or more except clauses
-or one finally clause, but not both (because it would be unclear which
-clause should be executed first).
+As you can see, the \keyword{finally} clause is executed in any
+event.  The \exception{TypeError} raised by dividing two strings
+is not handled by the \keyword{except} clause and therefore
+re-raised after the \keyword{finally} clauses has been executed.
+
+In real world applications, the \keyword{finally} clause is useful
+for releasing external resources (such as files or network connections),
+regardless of whether the use of the resource was successful.
 
 
 \chapter{Classes \label{classes}}
@@ -5340,7 +5378,7 @@
 
 \item \citetitle[../ref/ref.html]{Language Reference}: A detailed 
 explanation of Python's syntax and semantics.  It's heavy reading, 
-but is useful as a
+but is useful as a complete guide to the language itself.
 
 \end{itemize}
 
diff --git a/Doc/whatsnew/whatsnew20.tex b/Doc/whatsnew/whatsnew20.tex
index b89ac19..bf458fa 100644
--- a/Doc/whatsnew/whatsnew20.tex
+++ b/Doc/whatsnew/whatsnew20.tex
@@ -1214,8 +1214,8 @@
 \item{\module{gettext}:} This module provides internationalization
 (I18N) and localization (L10N) support for Python programs by
 providing an interface to the GNU gettext message catalog library.
-(Integrated by Barry Warsaw, from separate contributions by Martin von
-Loewis, Peter Funk, and James Henstridge.)
+(Integrated by Barry Warsaw, from separate contributions by Martin 
+von~L\"owis, Peter Funk, and James Henstridge.)
 
 \item{\module{linuxaudiodev}:} Support for the \file{/dev/audio}
 device on Linux, a twin to the existing \module{sunaudiodev} module.
diff --git a/Doc/whatsnew/whatsnew21.tex b/Doc/whatsnew/whatsnew21.tex
index b7ea3f2..f3d0245 100644
--- a/Doc/whatsnew/whatsnew21.tex
+++ b/Doc/whatsnew/whatsnew21.tex
@@ -5,7 +5,7 @@
 % $Id$
 
 \title{What's New in Python 2.1}
-\release{1.00}
+\release{1.01}
 \author{A.M. Kuchling}
 \authoraddress{
 	\strong{Python Software Foundation}\\
@@ -16,14 +16,7 @@
 
 \section{Introduction}
 
-It's that time again... time for a new Python release, Python 2.1.
-One recent goal of the Python development team has been to accelerate
-the pace of new releases, with a new release coming every 6 to 9
-months. 2.1 is the first release to come out at this faster pace, with
-the first alpha appearing in January, 3 months after the final version
-of 2.0 was released.
-
-This article explains the new features in 2.1.  While there aren't as
+This article explains the new features in Python 2.1.  While there aren't as
 many changes in 2.1 as there were in Python 2.0, there are still some
 pleasant surprises in store.  2.1 is the first release to be steered
 through the use of Python Enhancement Proposals, or PEPs, so most of
@@ -34,6 +27,12 @@
 Refer to the Python 2.1 documentation, or to the specific PEP, for
 more details about any new feature that particularly interests you.
 
+One recent goal of the Python development team has been to accelerate
+the pace of new releases, with a new release coming every 6 to 9
+months. 2.1 is the first release to come out at this faster pace, with
+the first alpha appearing in January, 3 months after the final version
+of 2.0 was released.
+
 The final release of Python 2.1 was made on April 17, 2001.
 
 %======================================================================
diff --git a/Doc/whatsnew/whatsnew24.tex b/Doc/whatsnew/whatsnew24.tex
index e9ff4d9..51baece 100644
--- a/Doc/whatsnew/whatsnew24.tex
+++ b/Doc/whatsnew/whatsnew24.tex
@@ -803,8 +803,8 @@
 
 The PEP also proposes that all \keyword{import} statements be absolute
 imports, with a leading \samp{.} character to indicate a relative
-import.  This part of the PEP is not yet implemented, and will have to
-wait for Python 2.5 or some other future version.
+import.  This part of the PEP was not implemented for Python 2.4,
+but was completed for Python 2.5.
 
 \begin{seealso}
 \seepep{328}{Imports: Multi-Line and Absolute/Relative}
@@ -1336,7 +1336,7 @@
 \item The \module{marshal} module now shares interned strings on unpacking a 
 data structure.  This may shrink the size of certain pickle strings,
 but the primary effect is to make \file{.pyc} files significantly smaller.
-(Contributed by Martin von Loewis.)
+(Contributed by Martin von~L\"owis.)
 
 \item The \module{nntplib} module's \class{NNTP} class gained
 \method{description()} and \method{descriptions()} methods to retrieve 
@@ -1688,7 +1688,7 @@
 \begin{itemize}
 
 \item The Windows port now builds under MSVC++ 7.1 as well as version 6.
-  (Contributed by Martin von Loewis.)
+  (Contributed by Martin von~L\"owis.)
 
 \end{itemize}
 
diff --git a/Doc/whatsnew/whatsnew25.tex b/Doc/whatsnew/whatsnew25.tex
index 5743285..db6c25a 100644
--- a/Doc/whatsnew/whatsnew25.tex
+++ b/Doc/whatsnew/whatsnew25.tex
@@ -2,9 +2,14 @@
 \usepackage{distutils}
 % $Id$
 
+% The easy_install stuff
+% Describe the pkgutil module
+% Stateful codec changes
+% Fix XXX comments
+% Count up the patches and bugs
 
 \title{What's New in Python 2.5}
-\release{0.0}
+\release{0.1}
 \author{A.M. Kuchling}
 \authoraddress{\email{amk@amk.ca}}
 
@@ -14,7 +19,10 @@
 
 This article explains the new features in Python 2.5.  No release date
 for Python 2.5 has been set; it will probably be released in the
-autumn of 2006.
+autumn of 2006.  \pep{356} describes the planned release schedule.
+
+Comments, suggestions, and error reports are welcome; please e-mail them 
+to the author or open a bug in the Python bug tracker.
 
 % XXX Compare with previous release in 2 - 3 sentences here.
 
@@ -27,6 +35,32 @@
 
 
 %======================================================================
+\section{PEP 243: Uploading Modules to PyPI}
+
+PEP 243 describes an HTTP-based protocol for submitting software
+packages to a central archive.  The Python package index at
+\url{http://cheeseshop.python.org} now supports package uploads, and
+the new \command{upload} Distutils command will upload a package to the
+repository.
+
+Before a package can be uploaded, you must be able to build a
+distribution using the \command{sdist} Distutils command.  Once that
+works, you can run \code{python setup.py upload} to add your package
+to the PyPI archive.  Optionally you can GPG-sign the package by
+supplying the \longprogramopt{sign} and
+\longprogramopt{identity} options.
+
+\begin{seealso}
+
+\seepep{243}{Module Repository Upload Mechanism}{PEP written by
+Sean Reifschneider; implemented by Martin von~L\"owis
+and Richard Jones.  Note that the PEP doesn't exactly 
+describe what's implemented in PyPI.}
+
+\end{seealso}
+
+
+%======================================================================
 \section{PEP 308: Conditional Expressions}
 
 For a long time, people have been requesting a way to write
@@ -43,10 +77,10 @@
 \end{verbatim}
 
 There have been endless tedious discussions of syntax on both
-python-dev and comp.lang.python, and even a vote that found the
-majority of voters wanted some way to write conditional expressions,
-but there was no syntax that was clearly preferred by a majority.
-Candidates include C's \code{cond ? true_v : false_v},
+python-dev and comp.lang.python.  A vote was even held that found the
+majority of voters wanted conditional expressions in some form,
+but there was no syntax that was preferred by a clear majority.
+Candidates included C's \code{cond ? true_v : false_v},
 \code{if cond then true_v else false_v}, and 16 other variations.
 
 GvR eventually chose a surprising syntax:
@@ -55,11 +89,12 @@
 x = true_value if condition else false_value
 \end{verbatim}
 
-Evaluation is still lazy as in existing Boolean expression, so the
-evaluation jumps around a bit.  The \var{condition} expression is
-evaluated first, and the \var{true_value} expression is evaluated only
-if the condition was true.  Similarly, the \var{false_value}
-expression is only evaluated when the condition is false.
+Evaluation is still lazy as in existing Boolean expressions, so the
+order of evaluation jumps around a bit.  The \var{condition}
+expression in the middle is evaluated first, and the \var{true_value}
+expression is evaluated only if the condition was true.  Similarly,
+the \var{false_value} expression is only evaluated when the condition
+is false.
 
 This syntax may seem strange and backwards; why does the condition go
 in the \emph{middle} of the expression, and not in the front as in C's
@@ -110,7 +145,7 @@
 \begin{seealso}
 
 \seepep{308}{Conditional Expressions}{PEP written by
-Guido van Rossum and Raymond D. Hettinger; implemented by Thomas
+Guido van~Rossum and Raymond D. Hettinger; implemented by Thomas
 Wouters.}
 
 \end{seealso}
@@ -192,7 +227,16 @@
 possible to look up an entry in the package index, determine the
 dependencies for a package, and download the required packages.
 
-% XXX put example here
+\begin{verbatim}
+VERSION = '1.0'
+setup(name='PyPackage', 
+      version=VERSION,
+      requires=['numarray', 'zlib (>=1.1.4)'],
+      obsoletes=['OldPackage']
+      download_url=('http://www.example.com/pypackage/dist/pkg-%s.tar.gz'
+                    % VERSION),
+     )
+\end{verbatim}
  
 \begin{seealso}
 
@@ -206,31 +250,178 @@
 %======================================================================
 \section{PEP 328: Absolute and Relative Imports}
 
-% XXX write this
+The simpler part of PEP 328 was implemented in Python 2.4: parentheses
+could now be used to enclose the names imported from a module using
+the \code{from ... import ...} statement, making it easier to import
+many different names.
+
+The more complicated part has been implemented in Python 2.5:
+importing a module can be specified to use absolute or
+package-relative imports.  The plan is to move toward making absolute
+imports the default in future versions of Python.
+
+Let's say you have a package directory like this:
+\begin{verbatim}
+pkg/
+pkg/__init__.py
+pkg/main.py
+pkg/string.py
+\end{verbatim}
+
+This defines a package named \module{pkg} containing the
+\module{pkg.main} and \module{pkg.string} submodules.  
+
+Consider the code in the \file{main.py} module.  What happens if it
+executes the statement \code{import string}?  In Python 2.4 and
+earlier, it will first look in the package's directory to perform a
+relative import, finds \file{pkg/string.py}, imports the contents of
+that file as the \module{pkg.string} module, and that module is bound
+to the name \samp{string} in the \module{pkg.main} module's namespace.
+
+That's fine if \module{pkg.string} was what you wanted.  But what if
+you wanted Python's standard \module{string} module?  There's no clean
+way to ignore \module{pkg.string} and look for the standard module;
+generally you had to look at the contents of \code{sys.modules}, which
+is slightly unclean.   
+Holger Krekel's \module{py.std} package provides a tidier way to perform
+imports from the standard library, \code{import py ; py.std.string.join()},
+but that package isn't available on all Python installations.
+
+Reading code which relies on relative imports is also less clear,
+because a reader may be confused about which module, \module{string}
+or \module{pkg.string}, is intended to be used.  Python users soon
+learned not to duplicate the names of standard library modules in the
+names of their packages' submodules, but you can't protect against
+having your submodule's name being used for a new module added in a
+future version of Python.
+
+In Python 2.5, you can switch \keyword{import}'s behaviour to 
+absolute imports using a \code{from __future__ import absolute_import}
+directive.  This absolute-import behaviour will become the default in
+a future version (probably Python 2.7).  Once absolute imports 
+are the default, \code{import string} will
+always find the standard library's version.
+It's suggested that users should begin using absolute imports as much
+as possible, so it's preferable to begin writing \code{from pkg import
+string} in your code.  
+
+Relative imports are still possible by adding a leading period 
+to the module name when using the \code{from ... import} form:
+
+\begin{verbatim}
+# Import names from pkg.string
+from .string import name1, name2
+# Import pkg.string
+from . import string
+\end{verbatim}
+
+This imports the \module{string} module relative to the current
+package, so in \module{pkg.main} this will import \var{name1} and
+\var{name2} from \module{pkg.string}.  Additional leading periods
+perform the relative import starting from the parent of the current
+package.  For example, code in the \module{A.B.C} module can do:
+
+\begin{verbatim}
+from . import D                 # Imports A.B.D
+from .. import E                # Imports A.E
+from ..F import G               # Imports A.F.G
+\end{verbatim}
+
+Leading periods cannot be used with the \code{import \var{modname}} 
+form of the import statement, only the \code{from ... import} form.
+
+\begin{seealso}
+
+\seepep{328}{Imports: Multi-Line and Absolute/Relative}
+{PEP written by Aahz; implemented by Thomas Wouters.}
+
+\seeurl{http://codespeak.net/py/current/doc/index.html}
+{The py library by Holger Krekel, which contains the \module{py.std} package.}
+
+\end{seealso}
 
 
 %======================================================================
 \section{PEP 338: Executing Modules as Scripts}
 
-% XXX write this
+The \programopt{-m} switch added in Python 2.4 to execute a module as
+a script gained a few more abilities.  Instead of being implemented in
+C code inside the Python interpreter, the switch now uses an
+implementation in a new module, \module{runpy}.
+
+The \module{runpy} module implements a more sophisticated import
+mechanism so that it's now possible to run modules in a package such
+as \module{pychecker.checker}.  The module also supports alternative
+import mechanisms such as the \module{zipimport} module.  This means
+you can add a .zip archive's path to \code{sys.path} and then use the
+\programopt{-m} switch to execute code from the archive.
+
+
+\begin{seealso}
+
+\seepep{338}{Executing modules as scripts}{PEP written and 
+implemented by Nick Coghlan.}
+
+\end{seealso}
 
 
 %======================================================================
 \section{PEP 341: Unified try/except/finally}
 
-% XXX write this
+Until Python 2.5, the \keyword{try} statement came in two
+flavours. You could use a \keyword{finally} block to ensure that code
+is always executed, or one or more \keyword{except} blocks to catch 
+specific exceptions.  You couldn't combine both \keyword{except} blocks and a
+\keyword{finally} block, because generating the right bytecode for the
+combined version was complicated and it wasn't clear what the
+semantics of the combined should be.  
+
+GvR spent some time working with Java, which does support the
+equivalent of combining \keyword{except} blocks and a
+\keyword{finally} block, and this clarified what the statement should
+mean.  In Python 2.5, you can now write:
+
+\begin{verbatim}
+try:
+    block-1 ...
+except Exception1:
+    handler-1 ...
+except Exception2:
+    handler-2 ...
+else:
+    else-block
+finally:
+    final-block 
+\end{verbatim}
+
+The code in \var{block-1} is executed.  If the code raises an
+exception, the handlers are tried in order: \var{handler-1},
+\var{handler-2}, ...  If no exception is raised, the \var{else-block}
+is executed.  No matter what happened previously, the
+\var{final-block} is executed once the code block is complete and any
+raised exceptions handled.  Even if there's an error in an exception
+handler or the \var{else-block} and a new exception is raised, the
+\var{final-block} is still executed.
+
+\begin{seealso}
+
+\seepep{341}{Unifying try-except and try-finally}{PEP written by Georg Brandl; 
+implementation by Thomas Lee.}
+
+\end{seealso}
 
 
 %======================================================================
-\section{PEP 342: New Generator Features}
+\section{PEP 342: New Generator Features\label{section-generators}}
 
+Python 2.5 adds a simple way to pass values \emph{into} a generator.
 As introduced in Python 2.3, generators only produce output; once a
 generator's code is invoked to create an iterator, there's no way to
 pass any new information into the function when its execution is
-resumed.  Hackish solutions to this include making the generator's
-code look at a global variable and then changing the global variable's
+resumed.  Sometimes the ability to pass in some information would be
+useful.  Hackish solutions to this include making the generator's code
+look at a global variable and then changing the global variable's
 value, or passing in some mutable object that callers then modify.
-Python 2.5 adds the ability to pass values \emph{into} a generator.
 
 To refresh your memory of basic generators, here's a simple example:
 
@@ -239,7 +430,7 @@
     i = 0
     while i < maximum:
         yield i
-	i += 1
+        i += 1
 \end{verbatim}
 
 When you call \code{counter(10)}, the result is an iterator that
@@ -261,11 +452,14 @@
 expression when you're doing something with the returned value, as in
 the above example.  The parentheses aren't always necessary, but it's
 easier to always add them instead of having to remember when they're
-needed.\footnote{The exact rules are that a \keyword{yield}-expression must
-always be parenthesized except when it occurs at the top-level
-expression on the right-hand side of an assignment, meaning you can
-write \code{val = yield i} but have to use parentheses when there's an
-operation, as in \code{val = (yield i) + 12}.}
+needed.
+
+(\pep{342} explains the exact rules, which are that a
+\keyword{yield}-expression must always be parenthesized except when it
+occurs at the top-level expression on the right-hand side of an
+assignment.  This means you can write \code{val = yield i} but have to
+use parentheses when there's an operation, as in \code{val = (yield i)
++ 12}.)
 
 Values are sent into a generator by calling its
 \method{send(\var{value})} method.  The generator's code is then
@@ -281,11 +475,11 @@
     i = 0
     while i < maximum:
         val = (yield i)
-	# If value provided, change counter
+        # If value provided, change counter
         if val is not None:
             i = val
-	else:
-  	    i += 1
+        else:
+            i += 1
 \end{verbatim}
 
 And here's an example of changing the counter:
@@ -344,13 +538,32 @@
 subroutines.  Subroutines are entered at one point and exited at
 another point (the top of the function, and a \keyword{return
 statement}), but coroutines can be entered, exited, and resumed at
-many different points (the \keyword{yield} statements).
+many different points (the \keyword{yield} statements).  We'll have to
+figure out patterns for using coroutines effectively in Python.
 
+The addition of the \method{close()} method has one side effect that
+isn't obvious.  \method{close()} is called when a generator is
+garbage-collected, so this means the generator's code gets one last
+chance to run before the generator is destroyed.  This last chance
+means that \code{try...finally} statements in generators can now be
+guaranteed to work; the \keyword{finally} clause will now always get a
+chance to run.  The syntactic restriction that you couldn't mix
+\keyword{yield} statements with a \code{try...finally} suite has
+therefore been removed.  This seems like a minor bit of language
+trivia, but using generators and \code{try...finally} is actually
+necessary in order to implement the  \keyword{with} statement
+described by PEP 343.  I'll look at this new statement in the following 
+section.
+
+Another even more esoteric effect of this change: previously, the
+\member{gi_frame} attribute of a generator was always a frame object.
+It's now possible for \member{gi_frame} to be \code{None}
+once the generator has been exhausted.
 
 \begin{seealso}
 
 \seepep{342}{Coroutines via Enhanced Generators}{PEP written by 
-Guido van Rossum and Phillip J. Eby;
+Guido van~Rossum and Phillip J. Eby;
 implemented by Phillip J. Eby.  Includes examples of 
 some fancier uses of generators as coroutines.}
 
@@ -367,14 +580,323 @@
 %======================================================================
 \section{PEP 343: The 'with' statement}
 
-% XXX write this
+The \keyword{with} statement allows a clearer version of code that
+uses \code{try...finally} blocks to ensure that clean-up code is
+executed.
+
+In this section, I'll discuss the statement as it will commonly be
+used.  In the next section, I'll examine the implementation details
+and show how to write objects called ``context managers'' and
+``contexts'' for use with this statement.
+
+The \keyword{with} statement is a new control-flow structure whose
+basic structure is:
+
+\begin{verbatim}
+with expression [as variable]:
+    with-block
+\end{verbatim}
+
+The expression is evaluated, and it should result in a type of object
+that's called a context manager.  The context manager can return a
+value that can optionally be bound to the name \var{variable}.  (Note
+carefully: \var{variable} is \emph{not} assigned the result of
+\var{expression}.)  One method of the context manager is run before
+\var{with-block} is executed, and another method is run after the
+block is done, even if the block raised an exception.
+
+To enable the statement in Python 2.5, you need 
+to add the following directive to your module:
+
+\begin{verbatim}
+from __future__ import with_statement
+\end{verbatim}
+
+The statement will always be enabled in Python 2.6.
+
+Some standard Python objects can now behave as context managers. File
+objects are one example:
+
+\begin{verbatim}
+with open('/etc/passwd', 'r') as f:
+    for line in f:
+        print line
+        ... more processing code ...
+\end{verbatim}
+
+After this statement has executed, the file object in \var{f} will
+have been automatically closed at this point, even if the 'for' loop
+raised an exception part-way through the block.
+
+The \module{threading} module's locks and condition variables 
+also support the \keyword{with} statement:
+
+\begin{verbatim}
+lock = threading.Lock()
+with lock:
+    # Critical section of code
+    ...
+\end{verbatim}
+
+The lock is acquired before the block is executed, and always released once 
+the block is complete.
+
+The \module{decimal} module's contexts, which encapsulate the desired
+precision and rounding characteristics for computations, can also be
+used as context managers.
+
+\begin{verbatim}
+import decimal
+
+# Displays with default precision of 28 digits
+v1 = decimal.Decimal('578')
+print v1.sqrt()
+
+with decimal.Context(prec=16):
+    # All code in this block uses a precision of 16 digits.
+    # The original context is restored on exiting the block.
+    print v1.sqrt()
+\end{verbatim}
+
+\subsection{Writing Context Managers}
+
+Under the hood, the \keyword{with} statement is fairly complicated.
+Most people will only use \keyword{with} in company with
+existing objects that are documented to work as context managers, and
+don't need to know these details, so you can skip the following section if
+you like.  Authors of new context managers will need to understand the
+details of the underlying implementation.
+
+A high-level explanation of the context management protocol is:
+
+\begin{itemize}
+\item The expression is evaluated and should result in an object
+that's a context manager, meaning that it has a 
+\method{__context__()} method.
+
+\item This object's \method{__context__()} method is called, and must
+return a context object.  
+
+\item The context's \method{__enter__()} method is called. 
+The value returned is assigned to \var{VAR}.  If no \code{as \var{VAR}} 
+clause is present, the value is simply discarded.
+
+\item The code in \var{BLOCK} is executed.
+
+\item If \var{BLOCK} raises an exception, the context object's
+\method{__exit__(\var{type}, \var{value}, \var{traceback})} is called
+with the exception's information, the same values returned by
+\function{sys.exc_info()}.  The method's return value
+controls whether the exception is re-raised: any false value 
+re-raises the exception, and \code{True} will result in suppressing it.
+You'll only rarely want to suppress the exception; the 
+author of the code containing the \keyword{with} statement will 
+never realize anything went wrong.
+
+\item If \var{BLOCK} didn't raise an exception, 
+the context object's \method{__exit__()} is still called,
+but \var{type}, \var{value}, and \var{traceback} are all \code{None}.
+
+\end{itemize}
+
+Let's think through an example.  I won't present detailed code but
+will only sketch the necessary code.  The example will be writing a
+context manager for a database that supports transactions.
+
+(For people unfamiliar with database terminology: a set of changes to
+the database are grouped into a transaction.  Transactions can be
+either committed, meaning that all the changes are written into the
+database, or rolled back, meaning that the changes are all discarded
+and the database is unchanged.  See any database textbook for more
+information.)
+% XXX find a shorter reference?
+
+Let's assume there's an object representing a database connection.
+Our goal will be to let the user write code like this:
+
+\begin{verbatim}
+db_connection = DatabaseConnection()
+with db_connection as cursor:
+    cursor.execute('insert into ...')
+    cursor.execute('delete from ...')
+    # ... more operations ...
+\end{verbatim}
+
+The transaction should either be committed if the code in the block
+runs flawlessly, or rolled back if there's an exception.
+
+First, the \class{DatabaseConnection} needs a \method{__context__()}
+method.  Sometimes an object can be its own context manager and can
+simply return \code{self}; the \module{threading} module's lock objects 
+can do this.  For our database example, though, we need to 
+create a new object; I'll call this class \class{DatabaseContext}.
+Our \method{__context__()} must therefore look like this:
+
+\begin{verbatim}
+class DatabaseConnection:
+    ...
+    def __context__ (self):
+        return DatabaseContext(self)
+
+    # Database interface
+    def cursor (self):
+        "Returns a cursor object and starts a new transaction"
+    def commit (self):
+        "Commits current transaction"
+    def rollback (self):
+        "Rolls back current transaction"
+\end{verbatim}
+
+The context needs the connection object so that the connection
+object's \method{commit()} or \method{rollback()} methods can be
+called:
+
+\begin{verbatim}
+class DatabaseContext:
+    def __init__ (self, connection):
+        self.connection = connection
+\end{verbatim}
+
+The \method {__enter__()} method is pretty easy, having only
+to start a new transaction.  In this example,
+the resulting cursor object would be a useful result,
+so the method will return it.  The user can 
+then add \code{as cursor} to their \keyword{with} statement
+to bind the cursor to a variable name.
+
+\begin{verbatim}
+class DatabaseContext:
+    ...
+    def __enter__ (self):
+        # Code to start a new transaction
+        cursor = self.connection.cursor()
+        return cursor
+\end{verbatim}
+
+The \method{__exit__()} method is the most complicated because it's
+where most of the work has to be done.  The method has to check if an
+exception occurred.  If there was no exception, the transaction is
+committed.  The transaction is rolled back if there was an exception.
+Here the code will just fall off the end of the function, returning 
+the default value of \code{None}.  \code{None} is false, so the exception
+will be re-raised automatically.  If you wished, you could be more explicit
+and add a \keyword{return} at the marked location.
+
+\begin{verbatim}
+class DatabaseContext:
+    ...
+    def __exit__ (self, type, value, tb):
+        if tb is None:
+            # No exception, so commit
+            self.connection.commit()
+        else:
+            # Exception occurred, so rollback.
+            self.connection.rollback()
+            # return False
+\end{verbatim}
+
+
+\subsection{The contextlib module\label{module-contextlib}}
+
+The new \module{contextlib} module provides some functions and a
+decorator that are useful for writing context managers.
+
+The decorator is called \function{contextmanager}, and lets you write
+a simple context manager as a generator.  The generator should yield
+exactly one value.  The code up to the \keyword{yield} will be
+executed as the \method{__enter__()} method, and the value yielded
+will be the method's return value that will get bound to the variable
+in the \keyword{with} statement's \keyword{as} clause, if any.  The
+code after the \keyword{yield} will be executed in the
+\method{__exit__()} method.  Any exception raised in the block 
+will be raised by the \keyword{yield} statement.
+
+Our database example from the previous section could be written 
+using this decorator as:
+
+\begin{verbatim}
+from contextlib import contextmanager
+
+@contextmanager
+def db_transaction (connection):
+    cursor = connection.cursor()
+    try:
+        yield cursor
+    except:
+        connection.rollback()
+        raise
+    else:
+        connection.commit()
+
+db = DatabaseConnection()
+with db_transaction(db) as cursor:
+    ...
+\end{verbatim}
+
+You can also use this decorator to write the \method{__context__()} method
+for a class without creating a new class for the context:
+
+\begin{verbatim}
+class DatabaseConnection:
+
+    @contextmanager
+    def __context__ (self):
+	cursor = self.cursor()
+	try:
+	    yield cursor
+	except:
+	    self.rollback()
+	    raise
+	else:
+	    self.commit()
+\end{verbatim}
+
+
+There's a \function{nested(\var{mgr1}, \var{mgr2}, ...)} manager that
+combines a number of context managers so you don't need to write
+nested \keyword{with} statements.  This example statement does two
+things, starting a database transaction and acquiring a thread lock:
+
+\begin{verbatim}
+lock = threading.Lock()
+with nested (db_transaction(db), lock) as (cursor, locked):
+    ...
+\end{verbatim}
+
+Finally, the \function{closing(\var{object})} context manager 
+returns \var{object} so that it can be bound to a variable,
+and calls \code{\var{object}.close()} at the end of the block.
+
+\begin{verbatim}
+import urllib, sys
+from contextlib import closing
+
+with closing(urllib.urlopen('http://www.yahoo.com')) as f:
+    for line in f:
+        sys.stdout.write(line)
+\end{verbatim}
+
+\begin{seealso}
+
+\seepep{343}{The ``with'' statement}{PEP written by Guido van~Rossum
+and Nick Coghlan; implemented by Mike Bland, Guido van~Rossum, and
+Neal Norwitz.  The PEP shows the code generated for a \keyword{with}
+statement, which can be helpful in learning how context managers
+work.}
+
+\seeurl{../lib/module-contextlib.html}{The documentation 
+for the \module{contextlib} module.}
+
+\end{seealso}
 
 
 %======================================================================
 \section{PEP 352: Exceptions as New-Style Classes}
 
-Exception classes can now be new-style classes, not just classic classes,
-and the built-in \exception{Exception} class and all
+Exception classes can now be new-style classes, not just classic
+classes, and the built-in \exception{Exception} class and all the
+standard built-in exceptions (\exception{NameError},
+\exception{ValueError}, etc.) are now new-style classes.
 
 The inheritance hierarchy for exceptions has been rearranged a bit.
 In 2.5, the inheritance relationships are:
@@ -428,8 +950,70 @@
 
 \begin{seealso}
 
-\seepep{352}{}{PEP written by 
-Brett Cannon and Guido van Rossum; implemented by Brett Cannon.}
+\seepep{352}{Required Superclass for Exceptions}{PEP written by 
+Brett Cannon and Guido van~Rossum; implemented by Brett Cannon.}
+
+\end{seealso}
+
+
+%======================================================================
+\section{PEP 353: Using ssize_t as the index type\label{section-353}}
+
+A wide-ranging change to Python's C API, using a new 
+\ctype{Py_ssize_t} type definition instead of \ctype{int}, 
+will permit the interpreter to handle more data on 64-bit platforms.
+This change doesn't affect Python's capacity on 32-bit platforms.
+
+Various pieces of the Python interpreter used C's \ctype{int} type to
+store sizes or counts; for example, the number of items in a list or
+tuple were stored in an \ctype{int}.  The C compilers for most 64-bit
+platforms still define \ctype{int} as a 32-bit type, so that meant
+that lists could only hold up to \code{2**31 - 1} = 2147483647 items.
+(There are actually a few different programming models that 64-bit C
+compilers can use -- see
+\url{http://www.unix.org/version2/whatsnew/lp64_wp.html} for a
+discussion -- but the most commonly available model leaves \ctype{int}
+as 32 bits.)
+
+A limit of 2147483647 items doesn't really matter on a 32-bit platform
+because you'll run out of memory before hitting the length limit.
+Each list item requires space for a pointer, which is 4 bytes, plus
+space for a \ctype{PyObject} representing the item.  2147483647*4 is
+already more bytes than a 32-bit address space can contain.
+
+It's possible to address that much memory on a 64-bit platform,
+however.  The pointers for a list that size would only require 16GiB
+of space, so it's not unreasonable that Python programmers might
+construct lists that large.  Therefore, the Python interpreter had to
+be changed to use some type other than \ctype{int}, and this will be a
+64-bit type on 64-bit platforms.  The change will cause
+incompatibilities on 64-bit machines, so it was deemed worth making
+the transition now, while the number of 64-bit users is still
+relatively small.  (In 5 or 10 years, we may \emph{all} be on 64-bit
+machines, and the transition would be more painful then.)
+
+This change most strongly affects authors of C extension modules.  
+Python strings and container types such as lists and tuples 
+now use \ctype{Py_ssize_t} to store their size.  
+Functions such as \cfunction{PyList_Size()} 
+now return \ctype{Py_ssize_t}.  Code in extension modules
+may therefore need to have some variables changed to
+\ctype{Py_ssize_t}.  
+
+The \cfunction{PyArg_ParseTuple()} and \cfunction{Py_BuildValue()} functions
+have a new conversion code, \samp{n}, for \ctype{Py_ssize_t}.  
+\cfunction{PyArg_ParseTuple()}'s \samp{s\#} and \samp{t\#} still output
+\ctype{int} by default, but you can define the macro 
+\csimplemacro{PY_SSIZE_T_CLEAN} before including \file{Python.h} 
+to make them return \ctype{Py_ssize_t}.
+
+\pep{353} has a section on conversion guidelines that 
+extension authors should read to learn about supporting 64-bit
+platforms.
+
+\begin{seealso}
+
+\seepep{353}{Using ssize_t as the index type}{PEP written and implemented by Martin von~L\"owis.}
 
 \end{seealso}
 
@@ -437,7 +1021,47 @@
 %======================================================================
 \section{PEP 357: The '__index__' method}
 
-% XXX write this
+The NumPy developers had a problem that could only be solved by adding
+a new special method, \method{__index__}.  When using slice notation,
+as in \code{[\var{start}:\var{stop}:\var{step}]}, the values of the
+\var{start}, \var{stop}, and \var{step} indexes must all be either
+integers or long integers.  NumPy defines a variety of specialized
+integer types corresponding to unsigned and signed integers of 8, 16,
+32, and 64 bits, but there was no way to signal that these types could
+be used as slice indexes.
+
+Slicing can't just use the existing \method{__int__} method because
+that method is also used to implement coercion to integers.  If
+slicing used \method{__int__}, floating-point numbers would also
+become legal slice indexes and that's clearly an undesirable
+behaviour.
+
+Instead, a new special method called \method{__index__} was added.  It
+takes no arguments and returns an integer giving the slice index to
+use.  For example:
+
+\begin{verbatim}
+class C:
+    def __index__ (self):
+        return self.value  
+\end{verbatim}
+
+The return value must be either a Python integer or long integer.
+The interpreter will check that the type returned is correct, and
+raises a \exception{TypeError} if this requirement isn't met.
+
+A corresponding \member{nb_index} slot was added to the C-level
+\ctype{PyNumberMethods} structure to let C extensions implement this
+protocol.  \cfunction{PyNumber_Index(\var{obj})} can be used in
+extension code to call the \method{__index__} function and retrieve
+its result.
+
+\begin{seealso}
+
+\seepep{357}{Allowing Any Object to be Used for Slicing}{PEP written 
+and implemented by Travis Oliphant.}
+
+\end{seealso}
 
 
 %======================================================================
@@ -448,10 +1072,29 @@
 
 \begin{itemize}
 
+\item The \class{dict} type has a new hook for letting subclasses
+provide a default value when a key isn't contained in the dictionary.
+When a key isn't found, the dictionary's
+\method{__missing__(\var{key})}
+method will be called.  This hook is used to implement
+the new \class{defaultdict} class in the \module{collections}
+module.  The following example defines a dictionary 
+that returns zero for any missing key:
+
+\begin{verbatim}
+class zerodict (dict):
+    def __missing__ (self, key):
+        return 0
+
+d = zerodict({1:1, 2:2})
+print d[1], d[2]   # Prints 1, 2
+print d[3], d[4]   # Prints 0, 0
+\end{verbatim}
+
 \item The \function{min()} and \function{max()} built-in functions
 gained a \code{key} keyword argument analogous to the \code{key}
-argument for \method{sort()}.  This argument supplies a function
-that takes a single argument and is called for every value in the list; 
+argument for \method{sort()}.  This argument supplies a function that
+takes a single argument and is called for every value in the list;
 \function{min()}/\function{max()} will return the element with the 
 smallest/largest return value from this function.
 For example, to find the longest string in a list, you can do:
@@ -459,7 +1102,7 @@
 \begin{verbatim}
 L = ['medium', 'longest', 'short']
 # Prints 'longest'
-print max(L, key=len)		   
+print max(L, key=len)              
 # Prints 'short', because lexicographically 'short' has the largest value
 print max(L)         
 \end{verbatim}
@@ -472,9 +1115,18 @@
 returned by the iterator is true; otherwise it will return
 \constant{False}.  \function{all()} returns \constant{True} only if
 all of the values returned by the iterator evaluate as being true.
+(Suggested by GvR, and implemented by Raymond Hettinger.)
 
-% XXX who added?
+\item ASCII is now the default encoding for modules.  It's now 
+a syntax error if a module contains string literals with 8-bit
+characters but doesn't have an encoding declaration.  In Python 2.4
+this triggered a warning, not a syntax error.  See \pep{263} 
+for how to declare a module's encoding; for example, you might add 
+a line like this near the top of the source file:
 
+\begin{verbatim}
+# -*- coding: latin1 -*-
+\end{verbatim}
 
 \item The list of base classes in a class definition can now be empty.  
 As an example, this is now legal:
@@ -489,6 +1141,24 @@
 
 
 %======================================================================
+\subsection{Interactive Interpreter Changes}
+
+In the interactive interpreter, \code{quit} and \code{exit} 
+have long been strings so that new users get a somewhat helpful message
+when they try to quit:
+
+\begin{verbatim}
+>>> quit
+'Use Ctrl-D (i.e. EOF) to exit.'
+\end{verbatim}
+
+In Python 2.5, \code{quit} and \code{exit} are now objects that still
+produce string representations of themselves, but are also callable.
+Newbies who try \code{quit()} or \code{exit()} will now exit the
+interpreter as they expect.  (Implemented by Georg Brandl.)
+
+
+%======================================================================
 \subsection{Optimizations}
 
 \begin{itemize}
@@ -500,43 +1170,135 @@
 and as a result sets will use a third less memory and are somewhat faster.
 (Implemented by Raymond Hettinger.)
 
+\item The performance of some Unicode operations, such as 
+character map decoding, has been improved.
+% Patch 1313939
+
+\item The code generator's peephole optimizer now performs
+simple constant folding in expressions.  If you write something like
+\code{a = 2+3}, the code generator will do the arithmetic and produce
+code corresponding to \code{a = 5}.
+
 \end{itemize}
 
 The net result of the 2.5 optimizations is that Python 2.5 runs the
-pystone benchmark around XX\% faster than Python 2.4.
+pystone benchmark around XXX\% faster than Python 2.4.
 
 
 %======================================================================
 \section{New, Improved, and Deprecated Modules}
 
-As usual, Python's standard library received a number of enhancements and
-bug fixes.  Here's a partial list of the most notable changes, sorted
-alphabetically by module name. Consult the
-\file{Misc/NEWS} file in the source tree for a more
-complete list of changes, or look through the SVN logs for all the
-details.
+The standard library received many enhancements and bug fixes in
+Python 2.5.  Here's a partial list of the most notable changes, sorted
+alphabetically by module name. Consult the \file{Misc/NEWS} file in
+the source tree for a more complete list of changes, or look through
+the SVN logs for all the details.
 
 \begin{itemize}
 
-% ctypes added
-
-% collections.deque now has .remove()
-
 % the cPickle module no longer accepts the deprecated None option in the
 % args tuple returned by __reduce__().
 
-% csv module improvements
+\item The \module{audioop} module now supports the a-LAW encoding,
+and the code for u-LAW encoding has been improved.  (Contributed by
+Lars Immisch.)
 
-% datetime.datetime() now has a strptime class method which can be used to
-% create datetime object using a string and format.
+\item The \module{collections} module gained a new type,
+\class{defaultdict}, that subclasses the standard \class{dict}
+type.  The new type mostly behaves like a dictionary but constructs a
+default value when a key isn't present, automatically adding it to the
+dictionary for the requested key value.
 
-\item A new \module{hashlib} module has been added to replace the
-\module{md5} and \module{sha} modules.  \module{hashlib} adds support
-for additional secure hashes (SHA-224, SHA-256, SHA-384, and SHA-512).
-When available, the module uses OpenSSL for fast platform optimized
-implementations of algorithms.  The old \module{md5} and \module{sha}
-modules still exist as wrappers around hashlib to preserve backwards
-compatibility.  (Contributed by Gregory P. Smith.)
+The first argument to \class{defaultdict}'s constructor is a factory
+function that gets called whenever a key is requested but not found.
+This factory function receives no arguments, so you can use built-in
+type constructors such as \function{list()} or \function{int()}.  For
+example, 
+you can make an index of words based on their initial letter like this:
+
+\begin{verbatim}
+words = """Nel mezzo del cammin di nostra vita
+mi ritrovai per una selva oscura
+che la diritta via era smarrita""".lower().split()
+
+index = defaultdict(list)
+
+for w in words:
+    init_letter = w[0]
+    index[init_letter].append(w)
+\end{verbatim}
+
+Printing \code{index} results in the following output:
+
+\begin{verbatim}
+defaultdict(<type 'list'>, {'c': ['cammin', 'che'], 'e': ['era'], 
+        'd': ['del', 'di', 'diritta'], 'm': ['mezzo', 'mi'], 
+        'l': ['la'], 'o': ['oscura'], 'n': ['nel', 'nostra'], 
+        'p': ['per'], 's': ['selva', 'smarrita'], 
+        'r': ['ritrovai'], 'u': ['una'], 'v': ['vita', 'via']}
+\end{verbatim}
+
+The \class{deque} double-ended queue type supplied by the
+\module{collections} module now has a \method{remove(\var{value})}
+method that removes the first occurrence of \var{value} in the queue,
+raising \exception{ValueError} if the value isn't found.
+
+\item New module: The \module{contextlib} module contains helper functions for use 
+with the new \keyword{with} statement.  See
+section~\ref{module-contextlib} for more about this module.
+(Contributed by Phillip J. Eby.)
+
+\item New module: The \module{cProfile} module is a C implementation of 
+the existing \module{profile} module that has much lower overhead.
+The module's interface is the same as \module{profile}: you run
+\code{cProfile.run('main()')} to profile a function, can save profile
+data to a file, etc.  It's not yet known if the Hotshot profiler,
+which is also written in C but doesn't match the \module{profile}
+module's interface, will continue to be maintained in future versions
+of Python.  (Contributed by Armin Rigo.)
+
+\item The \module{csv} module, which parses files in
+comma-separated value format, received several enhancements and a
+number of bugfixes.  You can now set the maximum size in bytes of a
+field by calling the \method{csv.field_size_limit(\var{new_limit})}
+function; omitting the \var{new_limit} argument will return the
+currently-set limit.  The \class{reader} class now has a
+\member{line_num} attribute that counts the number of physical lines
+read from the source; records can span multiple physical lines, so
+\member{line_num} is not the same as the number of records read.
+(Contributed by Skip Montanaro and Andrew McNamara.)
+
+\item The \class{datetime} class in the \module{datetime} 
+module now has a \method{strptime(\var{string}, \var{format})} 
+method for parsing date strings, contributed by Josh Spoerri.
+It uses the same format characters as \function{time.strptime()} and
+\function{time.strftime()}:
+
+\begin{verbatim}
+from datetime import datetime
+
+ts = datetime.strptime('10:13:15 2006-03-07',
+                       '%H:%M:%S %Y-%m-%d')
+\end{verbatim}
+
+\item The \module{fileinput} module was made more flexible.
+Unicode filenames are now supported, and a \var{mode} parameter that
+defaults to \code{"r"} was added to the
+\function{input()} function to allow opening files in binary or
+universal-newline mode.  Another new parameter, \var{openhook},
+lets you use a function other than \function{open()} 
+to open the input files.  Once you're iterating over 
+the set of files, the \class{FileInput} object's new
+\method{fileno()} returns the file descriptor for the currently opened file.
+(Contributed by Georg Brandl.)
+
+\item In the \module{gc} module, the new \function{get_count()} function
+returns a 3-tuple containing the current collection counts for the
+three GC generations.  This is accounting information for the garbage
+collector; when these counts reach a specified threshold, a garbage
+collection sweep will be made.  The existing \function{gc.collect()}
+function now takes an optional \var{generation} argument of 0, 1, or 2
+to specify which generation to collect.
 
 \item The \function{nsmallest()} and 
 \function{nlargest()} functions in the \module{heapq} module 
@@ -568,6 +1330,11 @@
 
 (Contributed by Raymond Hettinger.)
 
+\item The \module{nis} module now supports accessing domains other
+than the system default domain by supplying a \var{domain} argument to
+the \function{nis.match()} and \function{nis.maps()} functions.
+(Contributed by Ben Bell.)
+
 \item The \module{operator} module's \function{itemgetter()} 
 and \function{attrgetter()} functions now support multiple fields.  
 A call such as \code{operator.attrgetter('a', 'b')}
@@ -575,11 +1342,10 @@
 that retrieves the \member{a} and \member{b} attributes.  Combining 
 this new feature with the \method{sort()} method's \code{key} parameter 
 lets you easily sort lists using multiple fields.
-
-% XXX who added?
+(Contributed by Raymond Hettinger.)
 
 
-\item The \module{os} module underwent a number of changes.  The
+\item The \module{os} module underwent several changes.  The
 \member{stat_float_times} variable now defaults to true, meaning that
 \function{os.stat()} will now return time values as floats.  (This
 doesn't necessarily mean that \function{os.stat()} will return times
@@ -591,11 +1357,34 @@
 \function{os.lseek()} function.  Two new constants for locking are
 \member{os.O_SHLOCK} and \member{os.O_EXLOCK}.
 
+Two new functions, \function{wait3()} and \function{wait4()}, were
+added.  They're similar the \function{waitpid()} function which waits
+for a child process to exit and returns a tuple of the process ID and
+its exit status, but \function{wait3()} and \function{wait4()} return
+additional information.  \function{wait3()} doesn't take a process ID
+as input, so it waits for any child process to exit and returns a
+3-tuple of \var{process-id}, \var{exit-status}, \var{resource-usage}
+as returned from the \function{resource.getrusage()} function.
+\function{wait4(\var{pid})} does take a process ID.
+(Contributed by Chad J. Schroeder.)
+
 On FreeBSD, the \function{os.stat()} function now returns 
 times with nanosecond resolution, and the returned object
 now has \member{st_gen} and \member{st_birthtime}.
 The \member{st_flags} member is also available, if the platform supports it.
-% XXX patch 1180695, 1212117
+(Contributed by Antti Louko and  Diego Petten\`o.)
+% (Patch 1180695, 1212117)
+
+\item The old \module{regex} and \module{regsub} modules, which have been 
+deprecated ever since Python 2.0, have finally been deleted.  
+Other deleted modules: \module{statcache}, \module{tzparse},
+\module{whrandom}.
+
+\item The \file{lib-old} directory,
+which includes ancient modules such as \module{dircmp} and
+\module{ni}, was also deleted.  \file{lib-old} wasn't on the default
+\code{sys.path}, so unless your programs explicitly added the directory to 
+\code{sys.path}, this removal shouldn't affect your code.
 
 \item The \module{socket} module now supports \constant{AF_NETLINK}
 sockets on Linux, thanks to a patch from Philippe Biondi.  
@@ -605,9 +1394,26 @@
 In Python code, netlink addresses are represented as a tuple of 2 integers, 
 \code{(\var{pid}, \var{group_mask})}.
 
-\item New module: \module{spwd} provides functions for accessing the
-shadow password database on systems that support it.  
-% XXX give example
+Socket objects also gained accessor methods \method{getfamily()}, 
+\method{gettype()}, and \method{getproto()} methods to retrieve the
+family, type, and protocol values for the socket.
+
+\item New module: the \module{spwd} module provides functions for
+accessing the shadow password database on systems that support 
+shadow passwords.
+
+\item The Python developers switched from CVS to Subversion during the 2.5
+development process.  Information about the exact build version is 
+available as the \code{sys.subversion} variable, a 3-tuple
+of \code{(\var{interpreter-name}, \var{branch-name}, \var{revision-range})}.
+For example, at the time of writing 
+my copy of 2.5 was reporting \code{('CPython', 'trunk', '45313:45315')}.
+
+This information is also available to C extensions via the 
+\cfunction{Py_GetBuildInfo()} function that returns a 
+string of build information like this:
+\code{"trunk:45355:45356M, Apr 13 2006, 07:42:19"}.  
+(Contributed by Barry Warsaw.)
 
 \item The \class{TarFile} class in the \module{tarfile} module now has
 an \method{extractall()} method that extracts all members from the
@@ -625,17 +1431,28 @@
 by some specifications, so it's still available as 
 \member{unicodedata.db_3_2_0}.
 
-\item A new package \module{xml.etree} has been added, which contains
-a subset of the ElementTree XML library.  Available modules are
-\module{ElementTree}, \module{ElementPath}, and
-\module{ElementInclude}, from ElementTree 1.2.6. (Contributed by
-Fredrik Lundh.)
+\item The \module{webbrowser} module received a number of
+enhancements.
+It's now usable as a script with \code{python -m webbrowser}, taking a
+URL as the argument; there are a number of switches 
+to control the behaviour (\programopt{-n} for a new browser window, 
+\programopt{-t} for a new tab).  New module-level functions,
+\function{open_new()} and \function{open_new_tab()}, were added 
+to support this.  The module's \function{open()} function supports an
+additional feature, an \var{autoraise} parameter that signals whether
+to raise the open window when possible. A number of additional
+browsers were added to the supported list such as Firefox, Opera,
+Konqueror, and elinks.  (Contributed by Oleg Broytmann and George
+Brandl.)
+% Patch #754022
+
 
 \item The \module{xmlrpclib} module now supports returning 
       \class{datetime} objects for the XML-RPC date type.  Supply 
       \code{use_datetime=True} to the \function{loads()} function
       or the \class{Unmarshaller} class to enable this feature.
-% XXX patch 1120353
+      (Contributed by Skip Montanaro.)
+% Patch 1120353
 
 
 \end{itemize}
@@ -643,13 +1460,358 @@
 
 
 %======================================================================
-% whole new modules get described in \subsections here
+\subsection{The ctypes package}
 
-% XXX new distutils features: upload
+The \module{ctypes} package, written by Thomas Heller, has been added 
+to the standard library.  \module{ctypes} lets you call arbitrary functions 
+in shared libraries or DLLs.  Long-time users may remember the \module{dl} module, which 
+provides functions for loading shared libraries and calling functions in them.  The \module{ctypes} package is much fancier.
 
-% XXX should hashlib perhaps be described here instead?
-% XXX should xml.etree perhaps be described here instead?
+To load a shared library or DLL, you must create an instance of the 
+\class{CDLL} class and provide the name or path of the shared library
+or DLL.  Once that's done, you can call arbitrary functions
+by accessing them as attributes of the \class{CDLL} object.  
 
+\begin{verbatim}
+import ctypes
+
+libc = ctypes.CDLL('libc.so.6')
+result = libc.printf("Line of output\n")
+\end{verbatim}
+
+Type constructors for the various C types are provided: \function{c_int},
+\function{c_float}, \function{c_double}, \function{c_char_p} (equivalent to \ctype{char *}), and so forth.  Unlike Python's types, the C versions are all mutable; you can assign to their \member{value} attribute
+to change the wrapped value.  Python integers and strings will be automatically
+converted to the corresponding C types, but for other types you 
+must call the correct type constructor.  (And I mean \emph{must}; 
+getting it wrong will often result in the interpreter crashing 
+with a segmentation fault.)
+
+You shouldn't use \function{c_char_p} with a Python string when the C function will be modifying the memory area, because Python strings are 
+supposed to be immutable; breaking this rule will cause puzzling bugs.  When you need a modifiable memory area,
+use \function{create_string_buffer()}:
+
+\begin{verbatim}
+s = "this is a string"
+buf = ctypes.create_string_buffer(s)
+libc.strfry(buf)
+\end{verbatim}
+
+C functions are assumed to return integers, but you can set
+the \member{restype} attribute of the function object to 
+change this:
+
+\begin{verbatim}
+>>> libc.atof('2.71828')
+-1783957616
+>>> libc.atof.restype = ctypes.c_double
+>>> libc.atof('2.71828')
+2.71828
+\end{verbatim}
+
+\module{ctypes} also provides a wrapper for Python's C API 
+as the \code{ctypes.pythonapi} object.  This object does \emph{not} 
+release the global interpreter lock before calling a function, because the lock must be held when calling into the interpreter's code.  
+There's a \class{py_object()} type constructor that will create a 
+\ctype{PyObject *} pointer.  A simple usage:
+
+\begin{verbatim}
+import ctypes
+
+d = {}
+ctypes.pythonapi.PyObject_SetItem(ctypes.py_object(d),
+          ctypes.py_object("abc"),  ctypes.py_object(1))
+# d is now {'abc', 1}.
+\end{verbatim}
+
+Don't forget to use \class{py_object()}; if it's omitted you end 
+up with a segmentation fault.
+
+\module{ctypes} has been around for a while, but people still write 
+and distribution hand-coded extension modules because you can't rely on \module{ctypes} being present.
+Perhaps developers will begin to write 
+Python wrappers atop a library accessed through \module{ctypes} instead
+of extension modules, now that \module{ctypes} is included with core Python.
+
+\begin{seealso}
+
+\seeurl{http://starship.python.net/crew/theller/ctypes/}
+{The ctypes web page, with a tutorial, reference, and FAQ.}
+
+\end{seealso}
+
+
+%======================================================================
+\subsection{The ElementTree package}
+
+A subset of Fredrik Lundh's ElementTree library for processing XML has
+been added to the standard library as \module{xmlcore.etree}.  The
+available modules are
+\module{ElementTree}, \module{ElementPath}, and
+\module{ElementInclude} from ElementTree 1.2.6.   
+The \module{cElementTree} accelerator module is also included. 
+
+The rest of this section will provide a brief overview of using
+ElementTree.  Full documentation for ElementTree is available at
+\url{http://effbot.org/zone/element-index.htm}.
+
+ElementTree represents an XML document as a tree of element nodes.
+The text content of the document is stored as the \member{.text}
+and \member{.tail} attributes of 
+(This is one of the major differences between ElementTree and 
+the Document Object Model; in the DOM there are many different
+types of node, including \class{TextNode}.)
+
+The most commonly used parsing function is \function{parse()}, that
+takes either a string (assumed to contain a filename) or a file-like
+object and returns an \class{ElementTree} instance:
+
+\begin{verbatim}
+from xmlcore.etree import ElementTree as ET
+
+tree = ET.parse('ex-1.xml')
+
+feed = urllib.urlopen(
+          'http://planet.python.org/rss10.xml')
+tree = ET.parse(feed)
+\end{verbatim}
+
+Once you have an \class{ElementTree} instance, you
+can call its \method{getroot()} method to get the root \class{Element} node.
+
+There's also an \function{XML()} function that takes a string literal
+and returns an \class{Element} node (not an \class{ElementTree}).  
+This function provides a tidy way to incorporate XML fragments,
+approaching the convenience of an XML literal:
+
+\begin{verbatim}
+svg = et.XML("""<svg width="10px" version="1.0">
+             </svg>""")
+svg.set('height', '320px')
+svg.append(elem1)
+\end{verbatim}
+
+Each XML element supports some dictionary-like and some list-like
+access methods.  Dictionary-like operations are used to access attribute
+values, and list-like operations are used to access child nodes.
+
+\begin{tableii}{c|l}{code}{Operation}{Result}
+  \lineii{elem[n]}{Returns n'th child element.}
+  \lineii{elem[m:n]}{Returns list of m'th through n'th child elements.}
+  \lineii{len(elem)}{Returns number of child elements.}
+  \lineii{elem.getchildren()}{Returns list of child elements.}
+  \lineii{elem.append(elem2)}{Adds \var{elem2} as a child.}
+  \lineii{elem.insert(index, elem2)}{Inserts \var{elem2} at the specified location.}
+  \lineii{del elem[n]}{Deletes n'th child element.}
+  \lineii{elem.keys()}{Returns list of attribute names.}
+  \lineii{elem.get(name)}{Returns value of attribute \var{name}.}
+  \lineii{elem.set(name, value)}{Sets new value for attribute \var{name}.}
+  \lineii{elem.attrib}{Retrieves the dictionary containing attributes.}
+  \lineii{del elem.attrib[name]}{Deletes attribute \var{name}.}
+\end{tableii}
+
+Comments and processing instructions are also represented as
+\class{Element} nodes.  To check if a node is a comment or processing
+instructions:
+
+\begin{verbatim}
+if elem.tag is ET.Comment:
+    ...
+elif elem.tag is ET.ProcessingInstruction:
+    ...
+\end{verbatim}
+
+To generate XML output, you should call the
+\method{ElementTree.write()} method.  Like \function{parse()},
+it can take either a string or a file-like object:
+
+\begin{verbatim}
+# Encoding is US-ASCII
+tree.write('output.xml')
+
+# Encoding is UTF-8
+f = open('output.xml', 'w')
+tree.write(f, 'utf-8')
+\end{verbatim}
+
+(Caution: the default encoding used for output is ASCII, which isn't
+very useful for general XML work, raising an exception if there are
+any characters with values greater than 127.  You should always
+specify a different encoding such as UTF-8 that can handle any Unicode
+character.)
+
+This section is only a partial description of the ElementTree interfaces.
+Please read the package's official documentation for more details.
+
+\begin{seealso}
+
+\seeurl{http://effbot.org/zone/element-index.htm}
+{Official documentation for ElementTree.}
+
+
+\end{seealso}
+
+
+%======================================================================
+\subsection{The hashlib package}
+
+A new \module{hashlib} module, written by Gregory P. Smith, 
+has been added to replace the
+\module{md5} and \module{sha} modules.  \module{hashlib} adds support
+for additional secure hashes (SHA-224, SHA-256, SHA-384, and SHA-512).
+When available, the module uses OpenSSL for fast platform optimized
+implementations of algorithms.  
+
+The old \module{md5} and \module{sha} modules still exist as wrappers
+around hashlib to preserve backwards compatibility.  The new module's
+interface is very close to that of the old modules, but not identical.
+The most significant difference is that the constructor functions
+for creating new hashing objects are named differently.
+
+\begin{verbatim}
+# Old versions
+h = md5.md5()   
+h = md5.new()   
+
+# New version 
+h = hashlib.md5()
+
+# Old versions
+h = sha.sha()   
+h = sha.new()   
+
+# New version 
+h = hashlib.sha1()
+
+# Hash that weren't previously available
+h = hashlib.sha224()
+h = hashlib.sha256()
+h = hashlib.sha384()
+h = hashlib.sha512()
+
+# Alternative form
+h = hashlib.new('md5')          # Provide algorithm as a string
+\end{verbatim}
+
+Once a hash object has been created, its methods are the same as before:
+\method{update(\var{string})} hashes the specified string into the 
+current digest state, \method{digest()} and \method{hexdigest()}
+return the digest value as a binary string or a string of hex digits,
+and \method{copy()} returns a new hashing object with the same digest state.
+
+
+%======================================================================
+\subsection{The sqlite3 package}
+
+The pysqlite module (\url{http://www.pysqlite.org}), a wrapper for the
+SQLite embedded database, has been added to the standard library under
+the package name \module{sqlite3}.  
+
+SQLite is a C library that provides a SQL-language database that
+stores data in disk files without requiring a separate server process.
+pysqlite was written by Gerhard H\"aring and provides a SQL interface
+compliant with the DB-API 2.0 specification described by
+\pep{249}. This means that it should be possible to write the first
+version of your applications using SQLite for data storage.  If
+switching to a larger database such as PostgreSQL or Oracle is
+later necessary, the switch should be relatively easy.
+
+If you're compiling the Python source yourself, note that the source
+tree doesn't include the SQLite code, only the wrapper module.
+You'll need to have the SQLite libraries and headers installed before
+compiling Python, and the build process will compile the module when
+the necessary headers are available.
+
+To use the module, you must first create a \class{Connection} object
+that represents the database.  Here the data will be stored in the 
+\file{/tmp/example} file:
+
+\begin{verbatim}
+conn = sqlite3.connect('/tmp/example')
+\end{verbatim}
+
+You can also supply the special name \samp{:memory:} to create
+a database in RAM.
+
+Once you have a \class{Connection}, you can create a \class{Cursor} 
+object and call its \method{execute()} method to perform SQL commands:
+
+\begin{verbatim}
+c = conn.cursor()
+
+# Create table
+c.execute('''create table stocks
+(date timestamp, trans varchar, symbol varchar,
+ qty decimal, price decimal)''')
+
+# Insert a row of data
+c.execute("""insert into stocks
+          values ('2006-01-05','BUY','RHAT',100,35.14)""")
+\end{verbatim}    
+
+Usually your SQL operations will need to use values from Python
+variables.  You shouldn't assemble your query using Python's string
+operations because doing so is insecure; it makes your program
+vulnerable to an SQL injection attack.  
+
+Instead, use SQLite's parameter substitution.  Put \samp{?} as a
+placeholder wherever you want to use a value, and then provide a tuple
+of values as the second argument to the cursor's \method{execute()}
+method.  For example:
+
+\begin{verbatim}    
+# Never do this -- insecure!
+symbol = 'IBM'
+c.execute("... where symbol = '%s'" % symbol)
+
+# Do this instead
+t = (symbol,)
+c.execute('select * from stocks where symbol=?', ('IBM',))
+
+# Larger example
+for t in (('2006-03-28', 'BUY', 'IBM', 1000, 45.00),
+          ('2006-04-05', 'BUY', 'MSOFT', 1000, 72.00),
+          ('2006-04-06', 'SELL', 'IBM', 500, 53.00),
+         ):
+    c.execute('insert into stocks values (?,?,?,?,?)', t)
+\end{verbatim}
+
+To retrieve data after executing a SELECT statement, you can either 
+treat the cursor as an iterator, call the cursor's \method{fetchone()}
+method to retrieve a single matching row, 
+or call \method{fetchall()} to get a list of the matching rows.
+
+This example uses the iterator form:
+
+\begin{verbatim}
+>>> c = conn.cursor()
+>>> c.execute('select * from stocks order by price')
+>>> for row in c:
+...    print row
+...
+(u'2006-01-05', u'BUY', u'RHAT', 100, 35.140000000000001)
+(u'2006-03-28', u'BUY', u'IBM', 1000, 45.0)
+(u'2006-04-06', u'SELL', u'IBM', 500, 53.0)
+(u'2006-04-05', u'BUY', u'MSOFT', 1000, 72.0)
+>>>
+\end{verbatim}
+
+For more information about the SQL dialect supported by SQLite, see 
+\url{http://www.sqlite.org}.
+
+\begin{seealso}
+
+\seeurl{http://www.pysqlite.org}
+{The pysqlite web page.}
+
+\seeurl{http://www.sqlite.org}
+{The SQLite web page; the documentation describes the syntax and the
+available data types for the supported SQL dialect.}
+
+\seepep{249}{Database API Specification 2.0}{PEP written by
+Marc-Andr\'e Lemburg.}
+
+\end{seealso}
 
 
 % ======================================================================
@@ -659,11 +1821,32 @@
 
 \begin{itemize}
 
-\item The design of the bytecode compiler has changed a great deal, no
-longer generating bytecode by traversing the parse tree.  Instead
+\item The largest change to the C API came from \pep{353},
+which modifies the interpreter to use a \ctype{Py_ssize_t} type
+definition instead of \ctype{int}.  See the earlier
+section~\ref{section-353} for a discussion of this change.
+
+\item The design of the bytecode compiler has changed a great deal, to
+no longer generate bytecode by traversing the parse tree.  Instead
 the parse tree is converted to an abstract syntax tree (or AST), and it is 
 the abstract syntax tree that's traversed to produce the bytecode.
 
+It's possible for Python code to obtain AST objects by using the 
+\function{compile()} built-in and specifying \code{_ast.PyCF_ONLY_AST}
+as the value of the 
+\var{flags} parameter:
+
+\begin{verbatim}
+from _ast import PyCF_ONLY_AST
+ast = compile("""a=0
+for i in range(10):
+    a += i
+""", "<string>", 'exec', PyCF_ONLY_AST)
+
+assignment = ast.body[0]
+for_loop = ast.body[1]
+\end{verbatim}
+
 No documentation has been written for the AST code yet.  To start
 learning about it, read the definition of the various AST nodes in
 \file{Parser/Python.asdl}.  A Python script reads this file and
@@ -689,6 +1872,18 @@
 new set, \cfunction{PySet_Add()} and \cfunction{PySet_Discard()} to
 add and remove elements, and \cfunction{PySet_Contains} and
 \cfunction{PySet_Size} to examine the set's state.
+(Contributed by Raymond Hettinger.)
+
+\item C code can now obtain information about the exact revision
+of the Python interpreter by calling the 
+\cfunction{Py_GetBuildInfo()} function that returns a 
+string of build information like this:
+\code{"trunk:45355:45356M, Apr 13 2006, 07:42:19"}.  
+(Contributed by Barry Warsaw.)
+
+\item The CPython interpreter is still written in C, but 
+the code can now be compiled with a {\Cpp} compiler without errors.  
+(Implemented by Anthony Baxter, Martin von~L\"owis, Skip Montanaro.)
 
 \item The \cfunction{PyRange_New()} function was removed.  It was
 never documented, never used in the core code, and had dangerously lax
@@ -700,7 +1895,17 @@
 %======================================================================
 \subsection{Port-Specific Changes}
 
-Platform-specific changes go here.
+\begin{itemize}
+
+\item MacOS X (10.3 and higher): dynamic loading of modules
+now uses the \cfunction{dlopen()} function instead of MacOS-specific
+functions.
+
+\item Windows: \file{.dll} is no longer supported as a filename extension for 
+extension modules.  \file{.pyd} is now the only filename extension that will
+be searched for.
+
+\end{itemize}
 
 
 %======================================================================
@@ -715,7 +1920,38 @@
 
 \begin{itemize}
 
-\item Details go here.
+\item Evan Jones's patch to obmalloc, first described in a talk
+at PyCon DC 2005, was applied.  Python 2.4 allocated small objects in
+256K-sized arenas, but never freed arenas.  With this patch, Python
+will free arenas when they're empty.  The net effect is that on some
+platforms, when you allocate many objects, Python's memory usage may
+actually drop when you delete them, and the memory may be returned to
+the operating system.  (Implemented by Evan Jones, and reworked by Tim
+Peters.)
+
+Note that this change means extension modules need to be more careful
+with how they allocate memory.  Python's API has many different
+functions for allocating memory that are grouped into families.  For
+example, \cfunction{PyMem_Malloc()}, \cfunction{PyMem_Realloc()}, and
+\cfunction{PyMem_Free()} are one family that allocates raw memory,
+while \cfunction{PyObject_Malloc()}, \cfunction{PyObject_Realloc()},
+and \cfunction{PyObject_Free()} are another family that's supposed to
+be used for creating Python objects.  
+
+Previously these different families all reduced to the platform's
+\cfunction{malloc()} and \cfunction{free()} functions.  This meant 
+it didn't matter if you got things wrong and allocated memory with the
+\cfunction{PyMem} function but freed it with the \cfunction{PyObject}
+function.  With the obmalloc change, these families now do different
+things, and mismatches will probably result in a segfault.  You should
+carefully test your C extension modules with Python 2.5.
+
+\item Coverity, a company that markets a source code analysis tool
+  called Prevent, provided the results of their examination of the Python
+  source code.  The analysis found about 60 bugs that 
+  were quickly fixed.  Many of the bugs were refcounting problems, often
+  occurring in error-handling code.  See
+  \url{http://scan.coverity.com} for the statistics.
 
 \end{itemize}
 
@@ -728,24 +1964,32 @@
 
 \begin{itemize}
 
-\item Some old deprecated modules (\module{statcache}, \module{tzparse},
-      \module{whrandom})  have been moved to \file{Lib/lib-old}.
-You can get access to these modules  again by adding the directory 
-to your \code{sys.path}:
+\item ASCII is now the default encoding for modules.  It's now 
+a syntax error if a module contains string literals with 8-bit
+characters but doesn't have an encoding declaration.  In Python 2.4
+this triggered a warning, not a syntax error.
 
-\begin{verbatim}
-import os
-from distutils import sysconfig
+\item The \module{pickle} module no longer uses the deprecated \var{bin} parameter.
 
-lib_dir = sysconfig.get_python_lib(standard_lib=True)
-old_dir = os.path.join(lib_dir, 'lib-old')
-sys.path.append(old_dir)
-\end{verbatim}
+\item Previously, the \member{gi_frame} attribute of a generator
+was always a frame object.  Because of the \pep{342} changes
+described in section~\ref{section-generators}, it's now possible
+for \member{gi_frame} to be \code{None}.
 
-Doing so is discouraged, however; it's better to update any code that
-still uses these modules.
+\item C API: Many functions now use \ctype{Py_ssize_t} 
+instead of \ctype{int} to allow processing more data 
+on 64-bit machines.  Extension code may need to make 
+the same change to avoid warnings and to support 64-bit machines.
+See the earlier
+section~\ref{section-353} for a discussion of this change.
 
-% the pickle module no longer uses the deprecated bin parameter.
+\item C API: 
+The obmalloc changes mean that 
+you must be careful to not mix usage 
+of the \cfunction{PyMem_*()} and \cfunction{PyObject_*()}
+families of functions. Memory allocated with 
+one family's \cfunction{*_Malloc()} must be 
+freed with the corresponding family's \cfunction{*_Free()} function.
 
 \end{itemize}
 
@@ -755,6 +1999,7 @@
 
 The author would like to thank the following people for offering
 suggestions, corrections and assistance with various drafts of this
-article: .
+article: Phillip J. Eby, Kent Johnson, Martin von~L\"owis, Gustavo
+Niemeyer, Mike Rovner, Thomas Wouters.
 
 \end{document}
diff --git a/Grammar/Grammar b/Grammar/Grammar
index 33c37d2..a613de6 100644
--- a/Grammar/Grammar
+++ b/Grammar/Grammar
@@ -7,6 +7,9 @@
 #        with someone who can; ask around on python-dev for help.  Fred
 #        Drake <fdrake@acm.org> will probably be listening there.
 
+# NOTE WELL: You should also follow all the steps listed in PEP 306,
+# "How to Change Python's Grammar"
+
 # Start symbols for the grammar:
 #	single_input is a single interactive statement;
 #	file_input is a module or sequence of commands read from an input file;
@@ -119,11 +122,11 @@
 
 list_iter: list_for | list_if
 list_for: 'for' exprlist 'in' testlist_safe [list_iter]
-list_if: 'if' test [list_iter]
+list_if: 'if' old_test [list_iter]
 
 gen_iter: gen_for | gen_if
 gen_for: 'for' exprlist 'in' or_test [gen_iter]
-gen_if: 'if' test [gen_iter]
+gen_if: 'if' old_test [gen_iter]
 
 testlist1: test (',' test)*
 
diff --git a/Include/Python-ast.h b/Include/Python-ast.h
index b3bc063..3e21030 100644
--- a/Include/Python-ast.h
+++ b/Include/Python-ast.h
@@ -35,9 +35,10 @@
 typedef struct _alias *alias_ty;
 
 
+enum _mod_kind {Module_kind=1, Interactive_kind=2, Expression_kind=3,
+                 Suite_kind=4};
 struct _mod {
-        enum { Module_kind=1, Interactive_kind=2, Expression_kind=3,
-               Suite_kind=4 } kind;
+        enum _mod_kind kind;
         union {
                 struct {
                         asdl_seq *body;
@@ -58,14 +59,15 @@
         } v;
 };
 
+enum _stmt_kind {FunctionDef_kind=1, ClassDef_kind=2, Return_kind=3,
+                  Delete_kind=4, Assign_kind=5, AugAssign_kind=6, Print_kind=7,
+                  For_kind=8, While_kind=9, If_kind=10, With_kind=11,
+                  Raise_kind=12, TryExcept_kind=13, TryFinally_kind=14,
+                  Assert_kind=15, Import_kind=16, ImportFrom_kind=17,
+                  Exec_kind=18, Global_kind=19, Expr_kind=20, Pass_kind=21,
+                  Break_kind=22, Continue_kind=23};
 struct _stmt {
-        enum { FunctionDef_kind=1, ClassDef_kind=2, Return_kind=3,
-               Delete_kind=4, Assign_kind=5, AugAssign_kind=6, Print_kind=7,
-               For_kind=8, While_kind=9, If_kind=10, With_kind=11,
-               Raise_kind=12, TryExcept_kind=13, TryFinally_kind=14,
-               Assert_kind=15, Import_kind=16, ImportFrom_kind=17,
-               Exec_kind=18, Global_kind=19, Expr_kind=20, Pass_kind=21,
-               Break_kind=22, Continue_kind=23 } kind;
+        enum _stmt_kind kind;
         union {
                 struct {
                         identifier name;
@@ -181,12 +183,14 @@
         int col_offset;
 };
 
+enum _expr_kind {BoolOp_kind=1, BinOp_kind=2, UnaryOp_kind=3, Lambda_kind=4,
+                  IfExp_kind=5, Dict_kind=6, ListComp_kind=7,
+                  GeneratorExp_kind=8, Yield_kind=9, Compare_kind=10,
+                  Call_kind=11, Repr_kind=12, Num_kind=13, Str_kind=14,
+                  Attribute_kind=15, Subscript_kind=16, Name_kind=17,
+                  List_kind=18, Tuple_kind=19};
 struct _expr {
-        enum { BoolOp_kind=1, BinOp_kind=2, UnaryOp_kind=3, Lambda_kind=4,
-               IfExp_kind=5, Dict_kind=6, ListComp_kind=7, GeneratorExp_kind=8,
-               Yield_kind=9, Compare_kind=10, Call_kind=11, Repr_kind=12,
-               Num_kind=13, Str_kind=14, Attribute_kind=15, Subscript_kind=16,
-               Name_kind=17, List_kind=18, Tuple_kind=19 } kind;
+        enum _expr_kind kind;
         union {
                 struct {
                         boolop_ty op;
@@ -236,7 +240,7 @@
                 
                 struct {
                         expr_ty left;
-                        asdl_seq *ops;
+                        asdl_int_seq *ops;
                         asdl_seq *comparators;
                 } Compare;
                 
@@ -292,9 +296,9 @@
         int col_offset;
 };
 
+enum _slice_kind {Ellipsis_kind=1, Slice_kind=2, ExtSlice_kind=3, Index_kind=4};
 struct _slice {
-        enum { Ellipsis_kind=1, Slice_kind=2, ExtSlice_kind=3, Index_kind=4 }
-               kind;
+        enum _slice_kind kind;
         union {
                 struct {
                         expr_ty lower;
@@ -323,6 +327,8 @@
         expr_ty type;
         expr_ty name;
         asdl_seq *body;
+        int lineno;
+        int col_offset;
 };
 
 struct _arguments {
@@ -403,7 +409,7 @@
 expr_ty GeneratorExp(expr_ty elt, asdl_seq * generators, int lineno, int
                      col_offset, PyArena *arena);
 expr_ty Yield(expr_ty value, int lineno, int col_offset, PyArena *arena);
-expr_ty Compare(expr_ty left, asdl_seq * ops, asdl_seq * comparators, int
+expr_ty Compare(expr_ty left, asdl_int_seq * ops, asdl_seq * comparators, int
                 lineno, int col_offset, PyArena *arena);
 expr_ty Call(expr_ty func, asdl_seq * args, asdl_seq * keywords, expr_ty
              starargs, expr_ty kwargs, int lineno, int col_offset, PyArena
@@ -427,8 +433,8 @@
 slice_ty Index(expr_ty value, PyArena *arena);
 comprehension_ty comprehension(expr_ty target, expr_ty iter, asdl_seq * ifs,
                                PyArena *arena);
-excepthandler_ty excepthandler(expr_ty type, expr_ty name, asdl_seq * body,
-                               PyArena *arena);
+excepthandler_ty excepthandler(expr_ty type, expr_ty name, asdl_seq * body, int
+                               lineno, int col_offset, PyArena *arena);
 arguments_ty arguments(asdl_seq * args, identifier vararg, identifier kwarg,
                        asdl_seq * defaults, PyArena *arena);
 keyword_ty keyword(identifier arg, expr_ty value, PyArena *arena);
diff --git a/Include/abstract.h b/Include/abstract.h
index b76f257..d4bd588 100644
--- a/Include/abstract.h
+++ b/Include/abstract.h
@@ -4,6 +4,11 @@
 extern "C" {
 #endif
 
+#ifdef PY_SSIZE_T_CLEAN
+#define PyObject_CallFunction _PyObject_CallFunction_SizeT
+#define PyObject_CallMethod _PyObject_CallMethod_SizeT
+#endif
+
 /* Abstract Object Interface (many thanks to Jim Fulton) */
 
 /*
@@ -337,6 +342,11 @@
 	 Python expression: o.method(args).
        */
 
+     PyAPI_FUNC(PyObject *) _PyObject_CallFunction_SizeT(PyObject *callable,
+							 char *format, ...);
+     PyAPI_FUNC(PyObject *) _PyObject_CallMethod_SizeT(PyObject *o,
+						       char *name,
+						       char *format, ...);
 
      PyAPI_FUNC(PyObject *) PyObject_CallFunctionObjArgs(PyObject *callable,
                                                          ...);
diff --git a/Include/asdl.h b/Include/asdl.h
index c1c5603..84e837e 100644
--- a/Include/asdl.h
+++ b/Include/asdl.h
@@ -5,7 +5,9 @@
 typedef PyObject * string;
 typedef PyObject * object;
 
+#ifndef __cplusplus
 typedef enum {false, true} bool;
+#endif
 
 /* It would be nice if the code generated by asdl_c.py was completely
    independent of Python, but it is a goal the requires too much work
@@ -20,7 +22,13 @@
     void *elements[1];
 } asdl_seq;
 
+typedef struct {
+    int size;
+    int elements[1];
+} asdl_int_seq;
+
 asdl_seq *asdl_seq_new(int size, PyArena *arena);
+asdl_int_seq *asdl_int_seq_new(int size, PyArena *arena);
 
 #define asdl_seq_GET(S, I) (S)->elements[(I)]
 #define asdl_seq_LEN(S) ((S) == NULL ? 0 : (S)->size)
diff --git a/Include/code.h b/Include/code.h
index a4509e3..23d9e17 100644
--- a/Include/code.h
+++ b/Include/code.h
@@ -44,7 +44,7 @@
 /* These are no longer used. */
 #define CO_GENERATOR_ALLOWED    0x1000
 #define CO_FUTURE_DIVISION    	0x2000
-#define CO_FUTURE_ABSIMPORT	0x4000 /* absolute import by default */
+#define CO_FUTURE_ABSOLUTE_IMPORT 0x4000 /* do absolute imports by default */
 #define CO_FUTURE_WITH_STATEMENT  0x8000
 #endif
 
@@ -72,6 +72,21 @@
 	((*(co)->co_code->ob_type->tp_as_buffer->bf_getreadbuffer) \
 	 ((co)->co_code, 0, (void **)(pp)))
 
+typedef struct _addr_pair {
+        int ap_lower;
+        int ap_upper;
+} PyAddrPair;
+
+/* Check whether lasti (an instruction offset) falls outside bounds
+   and whether it is a line number that should be traced.  Returns
+   a line number if it should be traced or -1 if the line should not.
+
+   If lasti is not within bounds, updates bounds.
+*/
+
+PyAPI_FUNC(int) PyCode_CheckLineNumber(PyCodeObject* co,
+                                       int lasti, PyAddrPair *bounds);
+
 #ifdef __cplusplus
 }
 #endif
diff --git a/Include/compile.h b/Include/compile.h
index 4ac6982..2bde6fb 100644
--- a/Include/compile.h
+++ b/Include/compile.h
@@ -22,7 +22,7 @@
 #define FUTURE_NESTED_SCOPES "nested_scopes"
 #define FUTURE_GENERATORS "generators"
 #define FUTURE_DIVISION "division"
-#define FUTURE_ABSIMPORT "absolute_import"
+#define FUTURE_ABSOLUTE_IMPORT "absolute_import"
 #define FUTURE_WITH_STATEMENT "with_statement"
 
 struct _mod; /* Declare the existence of this type */
diff --git a/Include/genobject.h b/Include/genobject.h
index f4226ed..ca84432 100644
--- a/Include/genobject.h
+++ b/Include/genobject.h
@@ -13,6 +13,7 @@
 	PyObject_HEAD
 	/* The gi_ prefix is intended to remind of generator-iterator. */
 
+	/* Note: gi_frame can be NULL if the generator is "finished" */
 	struct _frame *gi_frame;
 
 	/* True if generator is being executed. */
@@ -28,6 +29,7 @@
 #define PyGen_CheckExact(op) ((op)->ob_type == &PyGen_Type)
 
 PyAPI_FUNC(PyObject *) PyGen_New(struct _frame *);
+PyAPI_FUNC(int) PyGen_NeedsFinalizing(PyGenObject *);
 
 #ifdef __cplusplus
 }
diff --git a/Include/modsupport.h b/Include/modsupport.h
index c356f03..23d5d3a 100644
--- a/Include/modsupport.h
+++ b/Include/modsupport.h
@@ -17,8 +17,10 @@
 #define PyArg_ParseTupleAndKeywords	_PyArg_ParseTupleAndKeywords_SizeT
 #define PyArg_VaParse			_PyArg_VaParse_SizeT
 #define PyArg_VaParseTupleAndKeywords	_PyArg_VaParseTupleAndKeywords_SizeT
-#define PyArg_BuildValue		_PyArg_BuildValue_SizeT
-#define PyArg_VaBuildValue		_PyArg_VaBuildValue_SizeT
+#define Py_BuildValue			_Py_BuildValue_SizeT
+#define Py_VaBuildValue			_Py_VaBuildValue_SizeT
+#else
+PyAPI_FUNC(PyObject *) _Py_VaBuildValue_SizeT(const char *, va_list);
 #endif
 
 PyAPI_FUNC(int) PyArg_Parse(PyObject *, const char *, ...);
@@ -27,6 +29,7 @@
                                                   const char *, char **, ...);
 PyAPI_FUNC(int) PyArg_UnpackTuple(PyObject *, const char *, Py_ssize_t, Py_ssize_t, ...);
 PyAPI_FUNC(PyObject *) Py_BuildValue(const char *, ...);
+PyAPI_FUNC(PyObject *) _Py_BuildValue_SizeT(const char *, ...);
 PyAPI_FUNC(int) _PyArg_NoKeywords(const char *funcname, PyObject *kw);
 
 PyAPI_FUNC(int) PyArg_VaParse(PyObject *, const char *, va_list);
diff --git a/Include/object.h b/Include/object.h
index 9198007..cdbddfe 100644
--- a/Include/object.h
+++ b/Include/object.h
@@ -327,6 +327,7 @@
 	Py_ssize_t tp_allocs;
 	Py_ssize_t tp_frees;
 	Py_ssize_t tp_maxalloc;
+	struct _typeobject *tp_prev;
 	struct _typeobject *tp_next;
 #endif
 } PyTypeObject;
@@ -566,6 +567,9 @@
 PyAPI_DATA(Py_ssize_t) _Py_RefTotal;
 PyAPI_FUNC(void) _Py_NegativeRefcount(const char *fname,
 					    int lineno, PyObject *op);
+PyAPI_FUNC(PyObject *) _PyDict_Dummy(void);
+PyAPI_FUNC(PyObject *) _PySet_Dummy(void);
+PyAPI_FUNC(Py_ssize_t) _Py_GetRefTotal(void);
 #define _Py_INC_REFTOTAL	_Py_RefTotal++
 #define _Py_DEC_REFTOTAL	_Py_RefTotal--
 #define _Py_REF_DEBUG_COMMA	,
@@ -583,8 +587,9 @@
 
 #ifdef COUNT_ALLOCS
 PyAPI_FUNC(void) inc_count(PyTypeObject *);
+PyAPI_FUNC(void) dec_count(PyTypeObject *);
 #define _Py_INC_TPALLOCS(OP)	inc_count((OP)->ob_type)
-#define _Py_INC_TPFREES(OP)	(OP)->ob_type->tp_frees++
+#define _Py_INC_TPFREES(OP)	dec_count((OP)->ob_type)
 #define _Py_DEC_TPFREES(OP)	(OP)->ob_type->tp_frees--
 #define _Py_COUNT_ALLOCS_COMMA	,
 #else
@@ -630,6 +635,40 @@
 	else						\
 		_Py_Dealloc((PyObject *)(op))
 
+/* Safely decref `op` and set `op` to NULL, especially useful in tp_clear
+ * and tp_dealloc implementatons.
+ *
+ * Note that "the obvious" code can be deadly:
+ *
+ *     Py_XDECREF(op);
+ *     op = NULL;
+ *
+ * Typically, `op` is something like self->containee, and `self` is done
+ * using its `containee` member.  In the code sequence above, suppose
+ * `containee` is non-NULL with a refcount of 1.  Its refcount falls to
+ * 0 on the first line, which can trigger an arbitrary amount of code,
+ * possibly including finalizers (like __del__ methods or weakref callbacks)
+ * coded in Python, which in turn can release the GIL and allow other threads
+ * to run, etc.  Such code may even invoke methods of `self` again, or cause
+ * cyclic gc to trigger, but-- oops! --self->containee still points to the
+ * object being torn down, and it may be in an insane state while being torn
+ * down.  This has in fact been a rich historic source of miserable (rare &
+ * hard-to-diagnose) segfaulting (and other) bugs.
+ *
+ * The safe way is:
+ *
+ *      Py_CLEAR(op);
+ *
+ * That arranges to set `op` to NULL _before_ decref'ing, so that any code
+ * triggered as a side-effect of `op` getting torn down no longer believes
+ * `op` points to a valid object.
+ *
+ * There are cases where it's safe to use the naive code, but they're brittle.
+ * For example, if `op` points to a Python integer, you know that destroying
+ * one of those can't cause problems -- but in part that relies on that
+ * Python integers aren't currently weakly referencable.  Best practice is
+ * to use Py_CLEAR() even if you can't think of a reason for why you need to.
+ */
 #define Py_CLEAR(op)				\
         do {                            	\
                 if (op) {			\
diff --git a/Include/objimpl.h b/Include/objimpl.h
index 7c68194..03b6a8d 100644
--- a/Include/objimpl.h
+++ b/Include/objimpl.h
@@ -101,7 +101,7 @@
 
 /* Macros */
 #ifdef WITH_PYMALLOC
-#ifdef PYMALLOC_DEBUG
+#ifdef PYMALLOC_DEBUG	/* WITH_PYMALLOC && PYMALLOC_DEBUG */
 PyAPI_FUNC(void *) _PyObject_DebugMalloc(size_t nbytes);
 PyAPI_FUNC(void *) _PyObject_DebugRealloc(void *p, size_t nbytes);
 PyAPI_FUNC(void) _PyObject_DebugFree(void *p);
@@ -124,11 +124,7 @@
 #else	/* ! WITH_PYMALLOC */
 #define PyObject_MALLOC		PyMem_MALLOC
 #define PyObject_REALLOC	PyMem_REALLOC
-/* This is an odd one!  For backward compatibility with old extensions, the
-   PyMem "release memory" functions have to invoke the object allocator's
-   free() function.  When pymalloc isn't enabled, that leaves us using
-   the platform free(). */
-#define PyObject_FREE		free
+#define PyObject_FREE		PyMem_FREE
 
 #endif	/* WITH_PYMALLOC */
 
@@ -307,13 +303,13 @@
  * "visit" and "arg".  This is intended to keep tp_traverse functions
  * looking as much alike as possible.
  */
-#define Py_VISIT(op)					\
-        do { 						\
-                if (op) {				\
-                        int vret = visit((op), arg);	\
-                        if (vret)			\
-                                return vret;		\
-                }					\
+#define Py_VISIT(op)							\
+        do { 								\
+                if (op) {						\
+                        int vret = visit((PyObject *)(op), arg);	\
+                        if (vret)					\
+                                return vret;				\
+                }							\
         } while (0)
 
 /* This is here for the sake of backwards compatibility.  Extensions that
diff --git a/Include/pymem.h b/Include/pymem.h
index f8aef29..671f967 100644
--- a/Include/pymem.h
+++ b/Include/pymem.h
@@ -59,6 +59,7 @@
 /* Redirect all memory operations to Python's debugging allocator. */
 #define PyMem_MALLOC		PyObject_MALLOC
 #define PyMem_REALLOC		PyObject_REALLOC
+#define PyMem_FREE		PyObject_FREE
 
 #else	/* ! PYMALLOC_DEBUG */
 
@@ -68,14 +69,10 @@
    pymalloc. To solve these problems, allocate an extra byte. */
 #define PyMem_MALLOC(n)         malloc((n) ? (n) : 1)
 #define PyMem_REALLOC(p, n)     realloc((p), (n) ? (n) : 1)
+#define PyMem_FREE		free
 
 #endif	/* PYMALLOC_DEBUG */
 
-/* In order to avoid breaking old code mixing PyObject_{New, NEW} with
-   PyMem_{Del, DEL} and PyMem_{Free, FREE}, the PyMem "release memory"
-   functions have to be redirected to the object deallocator. */
-#define PyMem_FREE           	PyObject_FREE
-
 /*
  * Type-oriented memory interface
  * ==============================
@@ -95,11 +92,11 @@
 #define PyMem_RESIZE(p, type, n) \
 	( (p) = (type *) PyMem_REALLOC((p), (n) * sizeof(type)) )
 
-/* In order to avoid breaking old code mixing PyObject_{New, NEW} with
-   PyMem_{Del, DEL} and PyMem_{Free, FREE}, the PyMem "release memory"
-   functions have to be redirected to the object deallocator. */
-#define PyMem_Del		PyObject_Free
-#define PyMem_DEL		PyObject_FREE
+/* PyMem{Del,DEL} are left over from ancient days, and shouldn't be used
+ * anymore.  They're just confusing aliases for PyMem_{Free,FREE} now.
+ */
+#define PyMem_Del		PyMem_Free
+#define PyMem_DEL		PyMem_FREE
 
 #ifdef __cplusplus
 }
diff --git a/Include/pyport.h b/Include/pyport.h
index 9111d86..2bce415 100644
--- a/Include/pyport.h
+++ b/Include/pyport.h
@@ -85,6 +85,10 @@
 #   error "Python needs a typedef for Py_uintptr_t in pyport.h."
 #endif /* HAVE_UINTPTR_T */
 
+/* Py_ssize_t is a signed integral type such that sizeof(Py_ssize_t) ==
+ * sizeof(size_t).  C99 doesn't define such a thing directly (size_t is an
+ * unsigned integral type).  See PEP 353 for details.
+ */
 #ifdef HAVE_SSIZE_T
 typedef ssize_t		Py_ssize_t;
 #elif SIZEOF_VOID_P == SIZEOF_SIZE_T
@@ -92,7 +96,46 @@
 #else
 #   error "Python needs a typedef for Py_ssize_t in pyport.h."
 #endif
+
+/* Largest positive value of type Py_ssize_t. */
 #define PY_SSIZE_T_MAX ((Py_ssize_t)(((size_t)-1)>>1))
+/* Smallest negative value of type Py_ssize_t. */
+#define PY_SSIZE_T_MIN (-PY_SSIZE_T_MAX-1)
+
+/* PY_FORMAT_SIZE_T is a platform-specific modifier for use in a printf
+ * format to convert an argument with the width of a size_t or Py_ssize_t.
+ * C99 introduced "z" for this purpose, but not all platforms support that;
+ * e.g., MS compilers use "I" instead.
+ *
+ * These "high level" Python format functions interpret "z" correctly on
+ * all platforms (Python interprets the format string itself, and does whatever
+ * the platform C requires to convert a size_t/Py_ssize_t argument):
+ *
+ *     PyString_FromFormat
+ *     PyErr_Format
+ *     PyString_FromFormatV
+ *
+ * Lower-level uses require that you interpolate the correct format modifier
+ * yourself (e.g., calling printf, fprintf, sprintf, PyOS_snprintf); for
+ * example,
+ *
+ *     Py_ssize_t index;
+ *     fprintf(stderr, "index %" PY_FORMAT_SIZE_T "d sucks\n", index);
+ *
+ * That will expand to %ld, or %Id, or to something else correct for a
+ * Py_ssize_t on the platform.
+ */
+#ifndef PY_FORMAT_SIZE_T
+#   if SIZEOF_SIZE_T == SIZEOF_INT
+#       define PY_FORMAT_SIZE_T ""
+#   elif SIZEOF_SIZE_T == SIZEOF_LONG
+#       define PY_FORMAT_SIZE_T "l"
+#   elif defined(MS_WINDOWS)
+#       define PY_FORMAT_SIZE_T "I"
+#   else
+#       error "This platform's pyconfig.h needs to define PY_FORMAT_SIZE_T"
+#   endif
+#endif
 
 #include <stdlib.h>
 
@@ -367,7 +410,8 @@
  *    typedef int T1 Py_DEPRECATED(2.4);
  *    extern int x() Py_DEPRECATED(2.5);
  */
-#if defined(__GNUC__) && (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1)
+#if defined(__GNUC__) && ((__GNUC__ >= 4) || \
+			  (__GNUC__ == 3) && (__GNUC_MINOR__ >= 1))
 #define Py_DEPRECATED(VERSION_UNUSED) __attribute__((__deprecated__))
 #else
 #define Py_DEPRECATED(VERSION_UNUSED)
@@ -436,14 +480,12 @@
 #endif
 
 
-/*******************************************************************
-On 4.4BSD-descendants, ctype functions serves the whole range of
-wchar_t character set rather than single byte code points only.
-This characteristic can break some operations of string object
-including str.upper() and str.split() on UTF-8 locales.  This
-workaround was provided by Tim Robbins of FreeBSD project.  He said
-the incompatibility will be fixed in FreeBSD 6.
-********************************************************************/
+/* On 4.4BSD-descendants, ctype functions serves the whole range of
+ * wchar_t character set rather than single byte code points only.
+ * This characteristic can break some operations of string object
+ * including str.upper() and str.split() on UTF-8 locales.  This
+ * workaround was provided by Tim Robbins of FreeBSD project.
+ */
 
 #ifdef __FreeBSD__
 #include <osreldate.h>
diff --git a/Include/setobject.h b/Include/setobject.h
index cea95cc..cc93968 100644
--- a/Include/setobject.h
+++ b/Include/setobject.h
@@ -78,10 +78,13 @@
 PyAPI_FUNC(PyObject *) PyFrozenSet_New(PyObject *);
 PyAPI_FUNC(Py_ssize_t) PySet_Size(PyObject *anyset);
 #define PySet_GET_SIZE(so) (((PySetObject *)(so))->used)
+PyAPI_FUNC(int) PySet_Clear(PyObject *set);
 PyAPI_FUNC(int) PySet_Contains(PyObject *anyset, PyObject *key);
 PyAPI_FUNC(int) PySet_Discard(PyObject *set, PyObject *key);
 PyAPI_FUNC(int) PySet_Add(PyObject *set, PyObject *key);
+PyAPI_FUNC(int) _PySet_Next(PyObject *set, Py_ssize_t *pos, PyObject **entry);
 PyAPI_FUNC(PyObject *) PySet_Pop(PyObject *set);
+PyAPI_FUNC(int) _PySet_Update(PyObject *set, PyObject *iterable);
 
 #ifdef __cplusplus
 }
diff --git a/Include/sliceobject.h b/Include/sliceobject.h
index 17f36dc..dbc34b2 100644
--- a/Include/sliceobject.h
+++ b/Include/sliceobject.h
@@ -30,6 +30,7 @@
 
 PyAPI_FUNC(PyObject *) PySlice_New(PyObject* start, PyObject* stop,
                                   PyObject* step);
+PyAPI_FUNC(PyObject *) _PySlice_FromIndices(Py_ssize_t start, Py_ssize_t stop);
 PyAPI_FUNC(int) PySlice_GetIndices(PySliceObject *r, Py_ssize_t length,
                                   Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step);
 PyAPI_FUNC(int) PySlice_GetIndicesEx(PySliceObject *r, Py_ssize_t length,
diff --git a/LICENSE b/LICENSE
index f0fc62a..5affefc 100644
--- a/LICENSE
+++ b/LICENSE
@@ -51,6 +51,8 @@
     2.4             2.3         2004        PSF         yes
     2.4.1           2.4         2005        PSF         yes
     2.4.2           2.4.1       2005        PSF         yes
+    2.4.3           2.4.2       2006        PSF         yes
+    2.5             2.4         2006        PSF         yes
 
 Footnotes:
 
diff --git a/Lib/Queue.py b/Lib/Queue.py
index c6c608b..51ad354 100644
--- a/Lib/Queue.py
+++ b/Lib/Queue.py
@@ -35,6 +35,51 @@
         # Notify not_full whenever an item is removed from the queue;
         # a thread waiting to put is notified then.
         self.not_full = threading.Condition(self.mutex)
+        # Notify all_tasks_done whenever the number of unfinished tasks
+        # drops to zero; thread waiting to join() is notified to resume
+        self.all_tasks_done = threading.Condition(self.mutex)
+        self.unfinished_tasks = 0
+
+    def task_done(self):
+        """Indicate that a formerly enqueued task is complete.
+
+        Used by Queue consumer threads.  For each get() used to fetch a task,
+        a subsequent call to task_done() tells the queue that the processing
+        on the task is complete.
+
+        If a join() is currently blocking, it will resume when all items
+        have been processed (meaning that a task_done() call was received
+        for every item that had been put() into the queue).
+
+        Raises a ValueError if called more times than there were items
+        placed in the queue.
+        """
+        self.all_tasks_done.acquire()
+        try:
+            unfinished = self.unfinished_tasks - 1
+            if unfinished <= 0:
+                if unfinished < 0:
+                    raise ValueError('task_done() called too many times')
+                self.all_tasks_done.notifyAll()
+            self.unfinished_tasks = unfinished
+        finally:
+            self.all_tasks_done.release()
+
+    def join(self):
+        """Blocks until all items in the Queue have been gotten and processed.
+
+        The count of unfinished tasks goes up whenever an item is added to the
+        queue. The count goes down whenever a consumer thread calls task_done()
+        to indicate the item was retrieved and all work on it is complete.
+
+        When the count of unfinished tasks drops to zero, join() unblocks.
+        """
+        self.all_tasks_done.acquire()
+        try:
+            while self.unfinished_tasks:
+                self.all_tasks_done.wait()
+        finally:
+            self.all_tasks_done.release()
 
     def qsize(self):
         """Return the approximate size of the queue (not reliable!)."""
@@ -86,6 +131,7 @@
                         raise Full
                     self.not_full.wait(remaining)
             self._put(item)
+            self.unfinished_tasks += 1
             self.not_empty.notify()
         finally:
             self.not_full.release()
diff --git a/Lib/SimpleXMLRPCServer.py b/Lib/SimpleXMLRPCServer.py
index 156c2ba..a0b44e1 100644
--- a/Lib/SimpleXMLRPCServer.py
+++ b/Lib/SimpleXMLRPCServer.py
@@ -104,7 +104,11 @@
 import SocketServer
 import BaseHTTPServer
 import sys
-import os, fcntl
+import os
+try:
+    import fcntl
+except ImportError:
+    fcntl = None
 
 def resolve_dotted_attribute(obj, attr, allow_dotted_names=True):
     """resolve_dotted_attribute(a, 'b.c.d') => a.b.c.d
@@ -493,7 +497,7 @@
         # [Bug #1222790] If possible, set close-on-exec flag; if a
         # method spawns a subprocess, the subprocess shouldn't have
         # the listening socket open.
-        if hasattr(fcntl, 'FD_CLOEXEC'):
+        if fcntl is not None and hasattr(fcntl, 'FD_CLOEXEC'):
             flags = fcntl.fcntl(self.fileno(), fcntl.F_GETFD)
             flags |= fcntl.FD_CLOEXEC
             fcntl.fcntl(self.fileno(), fcntl.F_SETFD, flags)
diff --git a/Lib/__future__.py b/Lib/__future__.py
index d95ce5f..d8e14d1 100644
--- a/Lib/__future__.py
+++ b/Lib/__future__.py
@@ -64,7 +64,7 @@
 CO_NESTED            = 0x0010   # nested_scopes
 CO_GENERATOR_ALLOWED = 0        # generators (obsolete, was 0x1000)
 CO_FUTURE_DIVISION   = 0x2000   # division
-CO_FUTURE_ABSIMPORT  = 0x4000   # absolute_import
+CO_FUTURE_ABSOLUTE_IMPORT = 0x4000 # perform absolute imports by default
 CO_FUTURE_WITH_STATEMENT  = 0x8000   # with statement
 
 class _Feature:
@@ -109,7 +109,7 @@
 
 absolute_import = _Feature((2, 5, 0, "alpha", 1),
                            (2, 7, 0, "alpha", 0),
-                           CO_FUTURE_ABSIMPORT)
+                           CO_FUTURE_ABSOLUTE_IMPORT)
 
 with_statement = _Feature((2, 5, 0, "alpha", 1),
                           (2, 6, 0, "alpha", 0),
diff --git a/Lib/_threading_local.py b/Lib/_threading_local.py
index 90717a8..f0ce857 100644
--- a/Lib/_threading_local.py
+++ b/Lib/_threading_local.py
@@ -1,9 +1,9 @@
-"""Thread-local objects
+"""Thread-local objects.
 
-(Note that this module provides a Python version of thread
- threading.local class.  Depending on the version of Python you're
- using, there may be a faster one available.  You should always import
- the local class from threading.)
+(Note that this module provides a Python version of the threading.local
+ class.  Depending on the version of Python you're using, there may be a
+ faster one available.  You should always import the `local` class from
+ `threading`.)
 
 Thread-local objects support the management of thread-local data.
 If you have data that you want to be local to a thread, simply create
@@ -133,7 +133,17 @@
 >>> del mydata
 """
 
-# Threading import is at end
+__all__ = ["local"]
+
+# We need to use objects from the threading module, but the threading
+# module may also want to use our `local` class, if support for locals
+# isn't compiled in to the `thread` module.  This creates potential problems
+# with circular imports.  For that reason, we don't import `threading`
+# until the bottom of this file (a hack sufficient to worm around the
+# potential problems).  Note that almost all platforms do have support for
+# locals in the `thread` module, and there is no circular import problem
+# then, so problems introduced by fiddling the order of imports here won't
+# manifest on most boxes.
 
 class _localbase(object):
     __slots__ = '_local__key', '_local__args', '_local__lock'
@@ -202,36 +212,30 @@
         finally:
             lock.release()
 
+    def __del__(self):
+        import threading
 
-    def __del__():
-        threading_enumerate = enumerate
-        __getattribute__ = object.__getattribute__
+        key = object.__getattribute__(self, '_local__key')
 
-        def __del__(self):
-            key = __getattribute__(self, '_local__key')
+        try:
+            threads = list(threading.enumerate())
+        except:
+            # If enumerate fails, as it seems to do during
+            # shutdown, we'll skip cleanup under the assumption
+            # that there is nothing to clean up.
+            return
 
+        for thread in threads:
             try:
-                threads = list(threading_enumerate())
-            except:
-                # if enumerate fails, as it seems to do during
-                # shutdown, we'll skip cleanup under the assumption
-                # that there is nothing to clean up
-                return
+                __dict__ = thread.__dict__
+            except AttributeError:
+                # Thread is dying, rest in peace.
+                continue
 
-            for thread in threads:
+            if key in __dict__:
                 try:
-                    __dict__ = thread.__dict__
-                except AttributeError:
-                    # Thread is dying, rest in peace
-                    continue
+                    del __dict__[key]
+                except KeyError:
+                    pass # didn't have anything in this thread
 
-                if key in __dict__:
-                    try:
-                        del __dict__[key]
-                    except KeyError:
-                        pass # didn't have anything in this thread
-
-        return __del__
-    __del__ = __del__()
-
-from threading import currentThread, enumerate, RLock
+from threading import currentThread, RLock
diff --git a/Lib/bdb.py b/Lib/bdb.py
index 8f808cc..08b48c3 100644
--- a/Lib/bdb.py
+++ b/Lib/bdb.py
@@ -479,10 +479,10 @@
         else:
             disp = 'keep '
         if self.enabled:
-            disp = disp + 'yes'
+            disp = disp + 'yes  '
         else:
-            disp = disp + 'no '
-        print '%-4dbreakpoint    %s at %s:%d' % (self.number, disp,
+            disp = disp + 'no   '
+        print '%-4dbreakpoint   %s at %s:%d' % (self.number, disp,
                              self.file, self.line)
         if self.cond:
             print '\tstop only if %s' % (self.cond,)
diff --git a/Lib/bsddb/__init__.py b/Lib/bsddb/__init__.py
index d3ee773..c004c08 100644
--- a/Lib/bsddb/__init__.py
+++ b/Lib/bsddb/__init__.py
@@ -287,10 +287,9 @@
             cachesize=None, lorder=None, hflags=0):
 
     flags = _checkflag(flag, file)
-    e = _openDBEnv()
+    e = _openDBEnv(cachesize)
     d = db.DB(e)
     d.set_flags(hflags)
-    if cachesize is not None: d.set_cachesize(0, cachesize)
     if pgsize is not None:    d.set_pagesize(pgsize)
     if lorder is not None:    d.set_lorder(lorder)
     if ffactor is not None:   d.set_h_ffactor(ffactor)
@@ -305,9 +304,8 @@
             pgsize=None, lorder=None):
 
     flags = _checkflag(flag, file)
-    e = _openDBEnv()
+    e = _openDBEnv(cachesize)
     d = db.DB(e)
-    if cachesize is not None: d.set_cachesize(0, cachesize)
     if pgsize is not None: d.set_pagesize(pgsize)
     if lorder is not None: d.set_lorder(lorder)
     d.set_flags(btflags)
@@ -324,9 +322,8 @@
             rlen=None, delim=None, source=None, pad=None):
 
     flags = _checkflag(flag, file)
-    e = _openDBEnv()
+    e = _openDBEnv(cachesize)
     d = db.DB(e)
-    if cachesize is not None: d.set_cachesize(0, cachesize)
     if pgsize is not None: d.set_pagesize(pgsize)
     if lorder is not None: d.set_lorder(lorder)
     d.set_flags(rnflags)
@@ -339,8 +336,13 @@
 
 #----------------------------------------------------------------------
 
-def _openDBEnv():
+def _openDBEnv(cachesize):
     e = db.DBEnv()
+    if cachesize is not None:
+        if cachesize >= 20480:
+            e.set_cachesize(0, cachesize)
+        else:
+            raise error, "cachesize must be >= 20480"
     e.open('.', db.DB_PRIVATE | db.DB_CREATE | db.DB_THREAD | db.DB_INIT_LOCK | db.DB_INIT_MPOOL)
     return e
 
@@ -358,7 +360,7 @@
         #flags = db.DB_CREATE | db.DB_TRUNCATE
         # we used db.DB_TRUNCATE flag for this before but BerkeleyDB
         # 4.2.52 changed to disallowed truncate with txn environments.
-        if os.path.isfile(file):
+        if file is not None and os.path.isfile(file):
             os.unlink(file)
     else:
         raise error, "flags should be one of 'r', 'w', 'c' or 'n'"
diff --git a/Lib/bsddb/test/test_all.py b/Lib/bsddb/test/test_all.py
index 972cd06..abfaf47 100644
--- a/Lib/bsddb/test/test_all.py
+++ b/Lib/bsddb/test/test_all.py
@@ -65,6 +65,7 @@
         'test_join',
         'test_lock',
         'test_misc',
+        'test_pickle',
         'test_queue',
         'test_recno',
         'test_thread',
diff --git a/Lib/bsddb/test/test_pickle.py b/Lib/bsddb/test/test_pickle.py
new file mode 100644
index 0000000..3916e5c
--- /dev/null
+++ b/Lib/bsddb/test/test_pickle.py
@@ -0,0 +1,75 @@
+
+import sys, os, string
+import pickle
+try:
+    import cPickle
+except ImportError:
+    cPickle = None
+import unittest
+import glob
+
+try:
+    # For Pythons w/distutils pybsddb
+    from bsddb3 import db
+except ImportError, e:
+    # For Python 2.3
+    from bsddb import db
+
+
+#----------------------------------------------------------------------
+
+class pickleTestCase(unittest.TestCase):
+    """Verify that DBError can be pickled and unpickled"""
+    db_home = 'db_home'
+    db_name = 'test-dbobj.db'
+
+    def setUp(self):
+        homeDir = os.path.join(os.path.dirname(sys.argv[0]), 'db_home')
+        self.homeDir = homeDir
+        try: os.mkdir(homeDir)
+        except os.error: pass
+
+    def tearDown(self):
+        if hasattr(self, 'db'):
+            del self.db
+        if hasattr(self, 'env'):
+            del self.env
+        files = glob.glob(os.path.join(self.homeDir, '*'))
+        for file in files:
+            os.remove(file)
+
+    def _base_test_pickle_DBError(self, pickle):
+        self.env = db.DBEnv()
+        self.env.open(self.homeDir, db.DB_CREATE | db.DB_INIT_MPOOL)
+        self.db = db.DB(self.env)
+        self.db.open(self.db_name, db.DB_HASH, db.DB_CREATE)
+        self.db.put('spam', 'eggs')
+        assert self.db['spam'] == 'eggs'
+        try:
+            self.db.put('spam', 'ham', flags=db.DB_NOOVERWRITE)
+        except db.DBError, egg:
+            pickledEgg = pickle.dumps(egg)
+            #print repr(pickledEgg)
+            rottenEgg = pickle.loads(pickledEgg)
+            if rottenEgg.args != egg.args or type(rottenEgg) != type(egg):
+                raise Exception, (rottenEgg, '!=', egg)
+        else:
+            raise Exception, "where's my DBError exception?!?"
+
+        self.db.close()
+        self.env.close()
+
+    def test01_pickle_DBError(self):
+        self._base_test_pickle_DBError(pickle=pickle)
+
+    if cPickle:
+        def test02_cPickle_DBError(self):
+            self._base_test_pickle_DBError(pickle=cPickle)
+
+#----------------------------------------------------------------------
+
+def test_suite():
+    return unittest.makeSuite(pickleTestCase)
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='test_suite')
diff --git a/Lib/calendar.py b/Lib/calendar.py
index 3ffcff5..7800aae 100644
--- a/Lib/calendar.py
+++ b/Lib/calendar.py
@@ -5,17 +5,32 @@
 Sunday as the last (the European convention). Use setfirstweekday() to
 set the first day of the week (0=Monday, 6=Sunday)."""
 
-import datetime
+from __future__ import with_statement
+import sys, datetime, locale
 
-__all__ = ["error","setfirstweekday","firstweekday","isleap",
-           "leapdays","weekday","monthrange","monthcalendar",
-           "prmonth","month","prcal","calendar","timegm",
-           "month_name", "month_abbr", "day_name", "day_abbr",
-           "weekheader"]
+__all__ = ["IllegalMonthError", "IllegalWeekdayError", "setfirstweekday",
+           "firstweekday", "isleap", "leapdays", "weekday", "monthrange",
+           "monthcalendar", "prmonth", "month", "prcal", "calendar",
+           "timegm", "month_name", "month_abbr", "day_name", "day_abbr"]
 
 # Exception raised for bad input (with string parameter for details)
 error = ValueError
 
+# Exceptions raised for bad input
+class IllegalMonthError(ValueError):
+    def __init__(self, month):
+        self.month = month
+    def __str__(self):
+        return "bad month number %r; must be 1-12" % self.month
+
+
+class IllegalWeekdayError(ValueError):
+    def __init__(self, weekday):
+        self.weekday = weekday
+    def __str__(self):
+        return "bad weekday number %r; must be 0 (Monday) to 6 (Sunday)" % self.weekday
+
+
 # Constants for months referenced later
 January = 1
 February = 2
@@ -30,7 +45,7 @@
 
 class _localized_month:
 
-    _months = [datetime.date(2001, i+1, 1).strftime for i in range(12)]
+    _months = [datetime.date(2001, i+1, 1).strftime for i in xrange(12)]
     _months.insert(0, lambda x: "")
 
     def __init__(self, format):
@@ -46,10 +61,11 @@
     def __len__(self):
         return 13
 
+
 class _localized_day:
 
     # January 1, 2001, was a Monday.
-    _days = [datetime.date(2001, 1, i+1).strftime for i in range(7)]
+    _days = [datetime.date(2001, 1, i+1).strftime for i in xrange(7)]
 
     def __init__(self, format):
         self.format = format
@@ -64,6 +80,7 @@
     def __len__(self):
         return 7
 
+
 # Full and abbreviated names of weekdays
 day_name = _localized_day('%A')
 day_abbr = _localized_day('%a')
@@ -75,23 +92,12 @@
 # Constants for weekdays
 (MONDAY, TUESDAY, WEDNESDAY, THURSDAY, FRIDAY, SATURDAY, SUNDAY) = range(7)
 
-_firstweekday = 0                       # 0 = Monday, 6 = Sunday
-
-def firstweekday():
-    return _firstweekday
-
-def setfirstweekday(weekday):
-    """Set weekday (Monday=0, Sunday=6) to start each week."""
-    global _firstweekday
-    if not MONDAY <= weekday <= SUNDAY:
-        raise ValueError, \
-              'bad weekday number; must be 0 (Monday) to 6 (Sunday)'
-    _firstweekday = weekday
 
 def isleap(year):
     """Return 1 for leap years, 0 for non-leap years."""
     return year % 4 == 0 and (year % 100 != 0 or year % 400 == 0)
 
+
 def leapdays(y1, y2):
     """Return number of leap years in range [y1, y2).
        Assume y1 <= y2."""
@@ -99,128 +105,501 @@
     y2 -= 1
     return (y2//4 - y1//4) - (y2//100 - y1//100) + (y2//400 - y1//400)
 
+
 def weekday(year, month, day):
     """Return weekday (0-6 ~ Mon-Sun) for year (1970-...), month (1-12),
        day (1-31)."""
     return datetime.date(year, month, day).weekday()
 
+
 def monthrange(year, month):
     """Return weekday (0-6 ~ Mon-Sun) and number of days (28-31) for
        year, month."""
     if not 1 <= month <= 12:
-        raise ValueError, 'bad month number'
+        raise IllegalMonthError(month)
     day1 = weekday(year, month, 1)
     ndays = mdays[month] + (month == February and isleap(year))
     return day1, ndays
 
-def monthcalendar(year, month):
-    """Return a matrix representing a month's calendar.
-       Each row represents a week; days outside this month are zero."""
-    day1, ndays = monthrange(year, month)
-    rows = []
-    r7 = range(7)
-    day = (_firstweekday - day1 + 6) % 7 - 5   # for leading 0's in first week
-    while day <= ndays:
-        row = [0, 0, 0, 0, 0, 0, 0]
-        for i in r7:
-            if 1 <= day <= ndays: row[i] = day
-            day = day + 1
-        rows.append(row)
-    return rows
 
-def prweek(theweek, width):
-    """Print a single week (no newline)."""
-    print week(theweek, width),
+class Calendar(object):
+    """
+    Base calendar class. This class doesn't do any formatting. It simply
+    provides data to subclasses.
+    """
 
-def week(theweek, width):
-    """Returns a single week in a string (no newline)."""
-    days = []
-    for day in theweek:
+    def __init__(self, firstweekday=0):
+        self.firstweekday = firstweekday # 0 = Monday, 6 = Sunday
+
+    def getfirstweekday(self):
+        return self._firstweekday % 7
+
+    def setfirstweekday(self, firstweekday):
+        self._firstweekday = firstweekday
+
+    firstweekday = property(getfirstweekday, setfirstweekday)
+
+    def iterweekdays(self):
+        """
+        Return a iterator for one week of weekday numbers starting with the
+        configured first one.
+        """
+        for i in xrange(self.firstweekday, self.firstweekday + 7):
+            yield i%7
+
+    def itermonthdates(self, year, month):
+        """
+        Return an iterator for one month. The iterator will yield datetime.date
+        values and will always iterate through complete weeks, so it will yield
+        dates outside the specified month.
+        """
+        date = datetime.date(year, month, 1)
+        # Go back to the beginning of the week
+        days = (date.weekday() - self.firstweekday) % 7
+        date -= datetime.timedelta(days=days)
+        oneday = datetime.timedelta(days=1)
+        while True:
+            yield date
+            date += oneday
+            if date.month != month and date.weekday() == self.firstweekday:
+                break
+
+    def itermonthdays2(self, year, month):
+        """
+        Like itermonthdates(), but will yield (day number, weekday number)
+        tuples. For days outside the specified month the day number is 0.
+        """
+        for date in self.itermonthdates(year, month):
+            if date.month != month:
+                yield (0, date.weekday())
+            else:
+                yield (date.day, date.weekday())
+
+    def itermonthdays(self, year, month):
+        """
+        Like itermonthdates(), but will yield day numbers tuples. For days
+        outside the specified month the day number is 0.
+        """
+        for date in self.itermonthdates(year, month):
+            if date.month != month:
+                yield 0
+            else:
+                yield date.day
+
+    def monthdatescalendar(self, year, month):
+        """
+        Return a matrix (list of lists) representing a month's calendar.
+        Each row represents a week; week entries are datetime.date values.
+        """
+        dates = list(self.itermonthdates(year, month))
+        return [ dates[i:i+7] for i in xrange(0, len(dates), 7) ]
+
+    def monthdays2calendar(self, year, month):
+        """
+        Return a matrix representing a month's calendar.
+        Each row represents a week; week entries are
+        (day number, weekday number) tuples. Day numbers outside this month
+        are zero.
+        """
+        days = list(self.itermonthdays2(year, month))
+        return [ days[i:i+7] for i in xrange(0, len(days), 7) ]
+
+    def monthdayscalendar(self, year, month):
+        """
+        Return a matrix representing a month's calendar.
+        Each row represents a week; days outside this month are zero.
+        """
+        days = list(self.itermonthdays(year, month))
+        return [ days[i:i+7] for i in xrange(0, len(days), 7) ]
+
+    def yeardatescalendar(self, year, width=3):
+        """
+        Return the data for the specified year ready for formatting. The return
+        value is a list of month rows. Each month row contains upto width months.
+        Each month contains between 4 and 6 weeks and each week contains 1-7
+        days. Days are datetime.date objects.
+        """
+        months = [
+            self.monthdatescalendar(year, i)
+            for i in xrange(January, January+12)
+        ]
+        return [months[i:i+width] for i in xrange(0, len(months), width) ]
+
+    def yeardays2calendar(self, year, width=3):
+        """
+        Return the data for the specified year ready for formatting (similar to
+        yeardatescalendar()). Entries in the week lists are
+        (day number, weekday number) tuples. Day numbers outside this month are
+        zero.
+        """
+        months = [
+            self.monthdays2calendar(year, i)
+            for i in xrange(January, January+12)
+        ]
+        return [months[i:i+width] for i in xrange(0, len(months), width) ]
+
+    def yeardayscalendar(self, year, width=3):
+        """
+        Return the data for the specified year ready for formatting (similar to
+        yeardatescalendar()). Entries in the week lists are day numbers.
+        Day numbers outside this month are zero.
+        """
+        months = [
+            self.monthdayscalendar(year, i)
+            for i in xrange(January, January+12)
+        ]
+        return [months[i:i+width] for i in xrange(0, len(months), width) ]
+
+
+class TextCalendar(Calendar):
+    """
+    Subclass of Calendar that outputs a calendar as a simple plain text
+    similar to the UNIX program cal.
+    """
+
+    def prweek(self, theweek, width):
+        """
+        Print a single week (no newline).
+        """
+        print self.week(theweek, width),
+
+    def formatday(self, day, weekday, width):
+        """
+        Returns a formatted day.
+        """
         if day == 0:
             s = ''
         else:
             s = '%2i' % day             # right-align single-digit days
-        days.append(s.center(width))
-    return ' '.join(days)
+        return s.center(width)
 
-def weekheader(width):
-    """Return a header for a week."""
-    if width >= 9:
-        names = day_name
-    else:
-        names = day_abbr
-    days = []
-    for i in range(_firstweekday, _firstweekday + 7):
-        days.append(names[i%7][:width].center(width))
-    return ' '.join(days)
+    def formatweek(self, theweek, width):
+        """
+        Returns a single week in a string (no newline).
+        """
+        return ' '.join(self.formatday(d, wd, width) for (d, wd) in theweek)
 
-def prmonth(theyear, themonth, w=0, l=0):
-    """Print a month's calendar."""
-    print month(theyear, themonth, w, l),
+    def formatweekday(self, day, width):
+        """
+        Returns a formatted week day name.
+        """
+        if width >= 9:
+            names = day_name
+        else:
+            names = day_abbr
+        return names[day][:width].center(width)
 
-def month(theyear, themonth, w=0, l=0):
-    """Return a month's calendar string (multi-line)."""
-    w = max(2, w)
-    l = max(1, l)
-    s = ("%s %r" % (month_name[themonth], theyear)).center(
-                 7 * (w + 1) - 1).rstrip() + \
-         '\n' * l + weekheader(w).rstrip() + '\n' * l
-    for aweek in monthcalendar(theyear, themonth):
-        s = s + week(aweek, w).rstrip() + '\n' * l
-    return s[:-l] + '\n'
+    def formatweekheader(self, width):
+        """
+        Return a header for a week.
+        """
+        return ' '.join(self.formatweekday(i, width) for i in self.iterweekdays())
 
-# Spacing of month columns for 3-column year calendar
+    def formatmonthname(self, theyear, themonth, width, withyear=True):
+        """
+        Return a formatted month name.
+        """
+        s = month_name[themonth]
+        if withyear:
+            s = "%s %r" % (s, theyear)
+        return s.center(width)
+
+    def prmonth(self, theyear, themonth, w=0, l=0):
+        """
+        Print a month's calendar.
+        """
+        print self.formatmonth(theyear, themonth, w, l),
+
+    def formatmonth(self, theyear, themonth, w=0, l=0):
+        """
+        Return a month's calendar string (multi-line).
+        """
+        w = max(2, w)
+        l = max(1, l)
+        s = self.formatmonthname(theyear, themonth, 7 * (w + 1) - 1)
+        s = s.rstrip()
+        s += '\n' * l
+        s += self.formatweekheader(w).rstrip()
+        s += '\n' * l
+        for week in self.monthdays2calendar(theyear, themonth):
+            s += self.formatweek(week, w).rstrip()
+            s += '\n' * l
+        return s
+
+    def formatyear(self, theyear, w=2, l=1, c=6, m=3):
+        """
+        Returns a year's calendar as a multi-line string.
+        """
+        w = max(2, w)
+        l = max(1, l)
+        c = max(2, c)
+        colwidth = (w + 1) * 7 - 1
+        v = []
+        a = v.append
+        a(repr(theyear).center(colwidth*m+c*(m-1)).rstrip())
+        a('\n'*l)
+        header = self.formatweekheader(w)
+        for (i, row) in enumerate(self.yeardays2calendar(theyear, m)):
+            # months in this row
+            months = xrange(m*i+1, min(m*(i+1)+1, 13))
+            a('\n'*l)
+            names = (self.formatmonthname(theyear, k, colwidth, False)
+                     for k in months)
+            a(formatstring(names, colwidth, c).rstrip())
+            a('\n'*l)
+            headers = (header for k in months)
+            a(formatstring(headers, colwidth, c).rstrip())
+            a('\n'*l)
+            # max number of weeks for this row
+            height = max(len(cal) for cal in row)
+            for j in xrange(height):
+                weeks = []
+                for cal in row:
+                    if j >= len(cal):
+                        weeks.append('')
+                    else:
+                        weeks.append(self.formatweek(cal[j], w))
+                a(formatstring(weeks, colwidth, c).rstrip())
+                a('\n' * l)
+        return ''.join(v)
+
+    def pryear(self, theyear, w=0, l=0, c=6, m=3):
+        """Print a year's calendar."""
+        print self.formatyear(theyear, w, l, c, m)
+
+
+class HTMLCalendar(Calendar):
+    """
+    This calendar returns complete HTML pages.
+    """
+
+    # CSS classes for the day <td>s
+    cssclasses = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
+
+    def formatday(self, day, weekday):
+        """
+        Return a day as a table cell.
+        """
+        if day == 0:
+            return '<td class="noday">&nbsp;</td>' # day outside month
+        else:
+            return '<td class="%s">%d</td>' % (self.cssclasses[weekday], day)
+
+    def formatweek(self, theweek):
+        """
+        Return a complete week as a table row.
+        """
+        s = ''.join(self.formatday(d, wd) for (d, wd) in theweek)
+        return '<tr>%s</tr>' % s
+
+    def formatweekday(self, day):
+        """
+        Return a weekday name as a table header.
+        """
+        return '<th class="%s">%s</th>' % (self.cssclasses[day], day_abbr[day])
+
+    def formatweekheader(self):
+        """
+        Return a header for a week as a table row.
+        """
+        s = ''.join(self.formatweekday(i) for i in self.iterweekdays())
+        return '<tr>%s</tr>' % s
+
+    def formatmonthname(self, theyear, themonth, withyear=True):
+        """
+        Return a month name as a table row.
+        """
+        if withyear:
+            s = '%s %s' % (month_name[themonth], theyear)
+        else:
+            s = '%s' % month_name[themonth]
+        return '<tr><th colspan="7" class="month">%s</th></tr>' % s
+
+    def formatmonth(self, theyear, themonth, withyear=True):
+        """
+        Return a formatted month as a table.
+        """
+        v = []
+        a = v.append
+        a('<table border="0" cellpadding="0" cellspacing="0" class="month">')
+        a('\n')
+        a(self.formatmonthname(theyear, themonth, withyear=withyear))
+        a('\n')
+        a(self.formatweekheader())
+        a('\n')
+        for week in self.monthdays2calendar(theyear, themonth):
+            a(self.formatweek(week))
+            a('\n')
+        a('</table>')
+        a('\n')
+        return ''.join(v)
+
+    def formatyear(self, theyear, width=3):
+        """
+        Return a formatted year as a table of tables.
+        """
+        v = []
+        a = v.append
+        width = max(width, 1)
+        a('<table border="0" cellpadding="0" cellspacing="0" class="year">')
+        a('\n')
+        a('<tr><th colspan="%d" class="year">%s</th></tr>' % (width, theyear))
+        for i in xrange(January, January+12, width):
+            # months in this row
+            months = xrange(i, min(i+width, 13))
+            a('<tr>')
+            for m in months:
+                a('<td>')
+                a(self.formatmonth(theyear, m, withyear=False))
+                a('</td>')
+            a('</tr>')
+        a('</table>')
+        return ''.join(v)
+
+    def formatyearpage(self, theyear, width=3, css='calendar.css', encoding=None):
+        """
+        Return a formatted year as a complete HTML page.
+        """
+        if encoding is None:
+            encoding = sys.getdefaultencoding()
+        v = []
+        a = v.append
+        a('<?xml version="1.0" encoding="%s"?>\n' % encoding)
+        a('<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">\n')
+        a('<html>\n')
+        a('<head>\n')
+        a('<meta http-equiv="Content-Type" content="text/html; charset=%s" />\n' % encoding)
+        if css is not None:
+            a('<link rel="stylesheet" type="text/css" href="%s" />\n' % css)
+        a('<title>Calendar for %d</title\n' % theyear)
+        a('</head>\n')
+        a('<body>\n')
+        a(self.formatyear(theyear, width))
+        a('</body>\n')
+        a('</html>\n')
+        return ''.join(v).encode(encoding, "xmlcharrefreplace")
+
+
+class TimeEncoding:
+    def __init__(self, locale):
+        self.locale = locale
+
+    def __context__(self):
+        return self
+
+    def __enter__(self):
+        self.oldlocale = locale.setlocale(locale.LC_TIME, self.locale)
+        return locale.getlocale(locale.LC_TIME)[1]
+
+    def __exit__(self, *args):
+        locale.setlocale(locale.LC_TIME, self.oldlocale)
+
+
+class LocaleTextCalendar(TextCalendar):
+    """
+    This class can be passed a locale name in the constructor and will return
+    month and weekday names in the specified locale. If this locale includes
+    an encoding all strings containing month and weekday names will be returned
+    as unicode.
+    """
+
+    def __init__(self, firstweekday=0, locale=None):
+        TextCalendar.__init__(self, firstweekday)
+        if locale is None:
+            locale = locale.getdefaultlocale()
+        self.locale = locale
+
+    def formatweekday(self, day, width):
+        with TimeEncoding(self.locale) as encoding:
+            if width >= 9:
+                names = day_name
+            else:
+                names = day_abbr
+            name = names[day]
+            if encoding is not None:
+                name = name.decode(encoding)
+            return name[:width].center(width)
+
+    def formatmonthname(self, theyear, themonth, width, withyear=True):
+        with TimeEncoding(self.locale) as encoding:
+            s = month_name[themonth]
+            if encoding is not None:
+                s = s.decode(encoding)
+            if withyear:
+                s = "%s %r" % (s, theyear)
+            return s.center(width)
+
+
+class LocaleHTMLCalendar(HTMLCalendar):
+    """
+    This class can be passed a locale name in the constructor and will return
+    month and weekday names in the specified locale. If this locale includes
+    an encoding all strings containing month and weekday names will be returned
+    as unicode.
+    """
+    def __init__(self, firstweekday=0, locale=None):
+        HTMLCalendar.__init__(self, firstweekday)
+        if locale is None:
+            locale = locale.getdefaultlocale()
+        self.locale = locale
+
+    def formatweekday(self, day):
+        with TimeEncoding(self.locale) as encoding:
+            s = day_abbr[day]
+            if encoding is not None:
+                s = s.decode(encoding)
+            return '<th class="%s">%s</th>' % (self.cssclasses[day], s)
+
+    def formatmonthname(self, theyear, themonth, withyear=True):
+        with TimeEncoding(self.locale) as encoding:
+            s = month_name[themonth]
+            if encoding is not None:
+                s = s.decode(encoding)
+            if withyear:
+                s = '%s %s' % (s, theyear)
+            return '<tr><th colspan="7" class="month">%s</th></tr>' % s
+
+
+# Support for old module level interface
+c = TextCalendar()
+
+firstweekday = c.getfirstweekday
+
+def setfirstweekday(firstweekday):
+    if not MONDAY <= firstweekday <= SUNDAY:
+        raise IllegalWeekdayError(firstweekday)
+    c.firstweekday = firstweekday
+
+monthcalendar = c.monthdayscalendar
+prweek = c.prweek
+week = c.formatweek
+weekheader = c.formatweekheader
+prmonth = c.prmonth
+month = c.formatmonth
+calendar = c.formatyear
+prcal = c.pryear
+
+
+# Spacing of month columns for multi-column year calendar
 _colwidth = 7*3 - 1         # Amount printed by prweek()
 _spacing = 6                # Number of spaces between columns
 
-def format3c(a, b, c, colwidth=_colwidth, spacing=_spacing):
-    """Prints 3-column formatting for year calendars"""
-    print format3cstring(a, b, c, colwidth, spacing)
 
-def format3cstring(a, b, c, colwidth=_colwidth, spacing=_spacing):
-    """Returns a string formatted from 3 strings, centered within 3 columns."""
-    return (a.center(colwidth) + ' ' * spacing + b.center(colwidth) +
-            ' ' * spacing + c.center(colwidth))
+def format(cols, colwidth=_colwidth, spacing=_spacing):
+    """Prints multi-column formatting for year calendars"""
+    print formatstring(cols, colwidth, spacing)
 
-def prcal(year, w=0, l=0, c=_spacing):
-    """Print a year's calendar."""
-    print calendar(year, w, l, c),
 
-def calendar(year, w=0, l=0, c=_spacing):
-    """Returns a year's calendar as a multi-line string."""
-    w = max(2, w)
-    l = max(1, l)
-    c = max(2, c)
-    colwidth = (w + 1) * 7 - 1
-    s = repr(year).center(colwidth * 3 + c * 2).rstrip() + '\n' * l
-    header = weekheader(w)
-    header = format3cstring(header, header, header, colwidth, c).rstrip()
-    for q in range(January, January+12, 3):
-        s = (s + '\n' * l +
-             format3cstring(month_name[q], month_name[q+1], month_name[q+2],
-                            colwidth, c).rstrip() +
-             '\n' * l + header + '\n' * l)
-        data = []
-        height = 0
-        for amonth in range(q, q + 3):
-            cal = monthcalendar(year, amonth)
-            if len(cal) > height:
-                height = len(cal)
-            data.append(cal)
-        for i in range(height):
-            weeks = []
-            for cal in data:
-                if i >= len(cal):
-                    weeks.append('')
-                else:
-                    weeks.append(week(cal[i], w))
-            s = s + format3cstring(weeks[0], weeks[1], weeks[2],
-                                   colwidth, c).rstrip() + '\n' * l
-    return s[:-l] + '\n'
+def formatstring(cols, colwidth=_colwidth, spacing=_spacing):
+    """Returns a string formatted from n strings, centered within n columns."""
+    spacing *= ' '
+    return spacing.join(c.center(colwidth) for c in cols)
+
 
 EPOCH = 1970
 _EPOCH_ORD = datetime.date(EPOCH, 1, 1).toordinal()
 
+
 def timegm(tuple):
     """Unrelated but handy function to calculate Unix timestamp from GMT."""
     year, month, day, hour, minute, second = tuple[:6]
@@ -229,3 +608,97 @@
     minutes = hours*60 + minute
     seconds = minutes*60 + second
     return seconds
+
+
+def main(args):
+    import optparse
+    parser = optparse.OptionParser(usage="usage: %prog [options] [year [month]]")
+    parser.add_option(
+        "-w", "--width",
+        dest="width", type="int", default=2,
+        help="width of date column (default 2, text only)"
+    )
+    parser.add_option(
+        "-l", "--lines",
+        dest="lines", type="int", default=1,
+        help="number of lines for each week (default 1, text only)"
+    )
+    parser.add_option(
+        "-s", "--spacing",
+        dest="spacing", type="int", default=6,
+        help="spacing between months (default 6, text only)"
+    )
+    parser.add_option(
+        "-m", "--months",
+        dest="months", type="int", default=3,
+        help="months per row (default 3, text only)"
+    )
+    parser.add_option(
+        "-c", "--css",
+        dest="css", default="calendar.css",
+        help="CSS to use for page (html only)"
+    )
+    parser.add_option(
+        "-L", "--locale",
+        dest="locale", default=None,
+        help="locale to be used from month and weekday names"
+    )
+    parser.add_option(
+        "-e", "--encoding",
+        dest="encoding", default=None,
+        help="Encoding to use for output"
+    )
+    parser.add_option(
+        "-t", "--type",
+        dest="type", default="text",
+        choices=("text", "html"),
+        help="output type (text or html)"
+    )
+
+    (options, args) = parser.parse_args(args)
+
+    if options.locale and not options.encoding:
+        parser.error("if --locale is specified --encoding is required")
+        sys.exit(1)
+
+    if options.type == "html":
+        if options.locale:
+            cal = LocaleHTMLCalendar(locale=options.locale)
+        else:
+            cal = HTMLCalendar()
+        encoding = options.encoding
+        if encoding is None:
+            encoding = sys.getdefaultencoding()
+        optdict = dict(encoding=encoding, css=options.css)
+        if len(args) == 1:
+            print cal.formatyearpage(datetime.date.today().year, **optdict)
+        elif len(args) == 2:
+            print cal.formatyearpage(int(args[1]), **optdict)
+        else:
+            parser.error("incorrect number of arguments")
+            sys.exit(1)
+    else:
+        if options.locale:
+            cal = LocaleTextCalendar(locale=options.locale)
+        else:
+            cal = TextCalendar()
+        optdict = dict(w=options.width, l=options.lines)
+        if len(args) != 3:
+            optdict["c"] = options.spacing
+            optdict["m"] = options.months
+        if len(args) == 1:
+            result = cal.formatyear(datetime.date.today().year, **optdict)
+        elif len(args) == 2:
+            result = cal.formatyear(int(args[1]), **optdict)
+        elif len(args) == 3:
+            result = cal.formatmonth(int(args[1]), int(args[2]), **optdict)
+        else:
+            parser.error("incorrect number of arguments")
+            sys.exit(1)
+        if options.encoding:
+            result = result.encode(options.encoding)
+        print result
+
+
+if __name__ == "__main__":
+    main(sys.argv)
diff --git a/Lib/codecs.py b/Lib/codecs.py
index 28856c7..1518d75 100644
--- a/Lib/codecs.py
+++ b/Lib/codecs.py
@@ -14,8 +14,7 @@
 try:
     from _codecs import *
 except ImportError, why:
-    raise SystemError,\
-          'Failed to load the builtin codecs: %s' % why
+    raise SystemError('Failed to load the builtin codecs: %s' % why)
 
 __all__ = ["register", "lookup", "open", "EncodedFile", "BOM", "BOM_BE",
            "BOM_LE", "BOM32_BE", "BOM32_LE", "BOM64_BE", "BOM64_LE",
@@ -156,13 +155,13 @@
 
 class IncrementalEncoder(object):
     """
-    A IncrementalEncoder encodes an input in multiple steps. The input can be
+    An IncrementalEncoder encodes an input in multiple steps. The input can be
     passed piece by piece to the encode() method. The IncrementalEncoder remembers
     the state of the Encoding process between calls to encode().
     """
     def __init__(self, errors='strict'):
         """
-        Creates a IncrementalEncoder instance.
+        Creates an IncrementalEncoder instance.
 
         The IncrementalEncoder may use different error handling schemes by
         providing the errors keyword argument. See the module docstring
@@ -182,6 +181,33 @@
         Resets the encoder to the initial state.
         """
 
+class BufferedIncrementalEncoder(IncrementalEncoder):
+    """
+    This subclass of IncrementalEncoder can be used as the baseclass for an
+    incremental encoder if the encoder must keep some of the output in a
+    buffer between calls to encode().
+    """
+    def __init__(self, errors='strict'):
+        IncrementalEncoder.__init__(self, errors)
+        self.buffer = "" # unencoded input that is kept between calls to encode()
+
+    def _buffer_encode(self, input, errors, final):
+        # Overwrite this method in subclasses: It must encode input
+        # and return an (output, length consumed) tuple
+        raise NotImplementedError
+
+    def encode(self, input, final=False):
+        # encode input (taking the buffer into account)
+        data = self.buffer + input
+        (result, consumed) = self._buffer_encode(data, self.errors, final)
+        # keep unencoded input until the next call
+        self.buffer = data[consumed:]
+        return result
+
+    def reset(self):
+        IncrementalEncoder.reset(self)
+        self.buffer = ""
+
 class IncrementalDecoder(object):
     """
     An IncrementalDecoder decodes an input in multiple steps. The input can be
@@ -234,7 +260,7 @@
 
     def reset(self):
         IncrementalDecoder.reset(self)
-        self.bytebuffer = ""
+        self.buffer = ""
 
 #
 # The StreamWriter and StreamReader class provide generic working
diff --git a/Lib/contextlib.py b/Lib/contextlib.py
index 0a5d608..aa5335d 100644
--- a/Lib/contextlib.py
+++ b/Lib/contextlib.py
@@ -30,9 +30,22 @@
         else:
             try:
                 self.gen.throw(type, value, traceback)
-                return True
-            except StopIteration:
-                return True
+                raise RuntimeError("generator didn't stop after throw()")
+            except StopIteration, exc:
+                # Suppress the exception *unless* it's the same exception that
+                # was passed to throw().  This prevents a StopIteration
+                # raised inside the "with" statement from being suppressed
+                return exc is not value
+            except:
+                # only re-raise if it's *not* the exception that was
+                # passed to throw(), because __exit__() must not raise
+                # an exception unless __exit__() itself failed.  But throw()
+                # has to raise the exception to signal propagation, so this
+                # fixes the impedance mismatch between the throw() protocol
+                # and the __exit__() protocol.
+                #
+                if sys.exc_info()[1] is not value:
+                    raise
 
 
 def contextmanager(func):
@@ -68,6 +81,7 @@
     try:
         helper.__name__ = func.__name__
         helper.__doc__ = func.__doc__
+        helper.__dict__ = func.__dict__
     except:
         pass
     return helper
diff --git a/Lib/copy_reg.py b/Lib/copy_reg.py
index 169520d..f4661ed 100644
--- a/Lib/copy_reg.py
+++ b/Lib/copy_reg.py
@@ -111,8 +111,19 @@
         # Slots found -- gather slot names from all base classes
         for c in cls.__mro__:
             if "__slots__" in c.__dict__:
-                names += [name for name in c.__dict__["__slots__"]
-                               if name not in ("__dict__", "__weakref__")]
+                slots = c.__dict__['__slots__']
+                # if class has a single slot, it can be given as a string
+                if isinstance(slots, basestring):
+                    slots = (slots,)
+                for name in slots:
+                    # special descriptors
+                    if name in ("__dict__", "__weakref__"):
+                        continue
+                    # mangled names
+                    elif name.startswith('__') and not name.endswith('__'):
+                        names.append('_%s%s' % (c.__name__, name))
+                    else:
+                        names.append(name)
 
     # Cache the outcome in the class if at all possible
     try:
diff --git a/Lib/ctypes/__init__.py b/Lib/ctypes/__init__.py
index dd0f640..28ac180 100644
--- a/Lib/ctypes/__init__.py
+++ b/Lib/ctypes/__init__.py
@@ -9,11 +9,7 @@
 from _ctypes import _Pointer
 from _ctypes import CFuncPtr as _CFuncPtr
 from _ctypes import __version__ as _ctypes_version
-try:
-    from _ctypes import RTLD_LOCAL, RTLD_GLOBAL
-except (ImportError, AttributeError):
-    RTLD_GLOBAL = RTLD_LOCAL = None
-
+from _ctypes import RTLD_LOCAL, RTLD_GLOBAL
 from _ctypes import ArgumentError
 
 from struct import calcsize as _calcsize
@@ -304,10 +300,11 @@
             raise AttributeError, name
         return self.__getitem__(name)
 
-    def __getitem__(self, name):
-        func = self._FuncPtr(name, self)
-        func.__name__ = name
-        setattr(self, name, func)
+    def __getitem__(self, name_or_ordinal):
+        func = self._FuncPtr((name_or_ordinal, self))
+        if not isinstance(name_or_ordinal, (int, long)):
+            func.__name__ = name_or_ordinal
+            setattr(self, name_or_ordinal, func)
         return func
 
 class PyDLL(CDLL):
@@ -384,21 +381,29 @@
 
 _pointer_type_cache[None] = c_void_p
 
-# functions
-
-from _ctypes import _memmove_addr, _memset_addr, _string_at_addr, cast
-
 if sizeof(c_uint) == sizeof(c_void_p):
     c_size_t = c_uint
 elif sizeof(c_ulong) == sizeof(c_void_p):
     c_size_t = c_ulong
 
+# functions
+
+from _ctypes import _memmove_addr, _memset_addr, _string_at_addr, _cast_addr
+
 ## void *memmove(void *, const void *, size_t);
 memmove = CFUNCTYPE(c_void_p, c_void_p, c_void_p, c_size_t)(_memmove_addr)
 
 ## void *memset(void *, int, size_t)
 memset = CFUNCTYPE(c_void_p, c_void_p, c_int, c_size_t)(_memset_addr)
 
+def PYFUNCTYPE(restype, *argtypes):
+    class CFunctionType(_CFuncPtr):
+        _argtypes_ = argtypes
+        _restype_ = restype
+        _flags_ = _FUNCFLAG_CDECL | _FUNCFLAG_PYTHONAPI
+    return CFunctionType
+cast = PYFUNCTYPE(py_object, c_void_p, py_object)(_cast_addr)
+
 _string_at = CFUNCTYPE(py_object, c_void_p, c_int)(_string_at_addr)
 def string_at(ptr, size=0):
     """string_at(addr[, size]) -> string
diff --git a/Lib/ctypes/_loader.py b/Lib/ctypes/_loader.py
index 7bde6c6..7a48c1c 100644
--- a/Lib/ctypes/_loader.py
+++ b/Lib/ctypes/_loader.py
@@ -1,14 +1,11 @@
-# WORK IN PROGRESS!  DO NOT (yet) USE!
 import sys, os
 import ctypes
 
-__all__ = ["LibraryLoader", "RTLD_LOCAL", "RTLD_GLOBAL"]
-
 if os.name in ("nt", "ce"):
     from _ctypes import LoadLibrary as dlopen
-    RTLD_LOCAL = RTLD_GLOBAL = None
 else:
-    from _ctypes import dlopen, RTLD_LOCAL, RTLD_GLOBAL
+    from _ctypes import dlopen
+from _ctypes import RTLD_LOCAL, RTLD_GLOBAL
 
 # _findLib(name) returns an iterable of possible names for a library.
 if os.name in ("nt", "ce"):
@@ -56,7 +53,10 @@
         expr = '/[^\(\)\s]*lib%s\.[^\(\)\s]*' % name
         res = re.search(expr, os.popen('/sbin/ldconfig -p 2>/dev/null').read())
         if not res:
-            return None
+            cmd = 'ldd %s 2>/dev/null' % sys.executable
+            res = re.search(expr, os.popen(cmd).read())
+            if not res:
+                return None
         return res.group(0)
 
     def _get_soname(f):
diff --git a/Lib/ctypes/test/test_byteswap.py b/Lib/ctypes/test/test_byteswap.py
index 1b31f90..d0ada40 100644
--- a/Lib/ctypes/test/test_byteswap.py
+++ b/Lib/ctypes/test/test_byteswap.py
@@ -149,7 +149,7 @@
         self.failUnless(c_char.__ctype_le__ is c_char)
         self.failUnless(c_char.__ctype_be__ is c_char)
 
-    def test_struct_fields(self):
+    def test_struct_fields_1(self):
         if sys.byteorder == "little":
             base = BigEndianStructure
         else:
@@ -198,17 +198,20 @@
             pass
         self.assertRaises(TypeError, setattr, S, "_fields_", [("s", T)])
 
-    # crashes on solaris with a core dump.
-    def X_test_struct_fields(self):
+    def test_struct_fields_2(self):
+        # standard packing in struct uses no alignment.
+        # So, we have to align using pad bytes.
+        #
+        # Unaligned accesses will crash Python (on those platforms that
+        # don't allow it, like sparc solaris).
         if sys.byteorder == "little":
             base = BigEndianStructure
-            fmt = ">bhid"
+            fmt = ">bxhid"
         else:
             base = LittleEndianStructure
-            fmt = "<bhid"
+            fmt = "<bxhid"
 
         class S(base):
-            _pack_ = 1 # struct with '<' or '>' uses standard alignment.
             _fields_ = [("b", c_byte),
                         ("h", c_short),
                         ("i", c_int),
@@ -218,5 +221,60 @@
         s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
         self.failUnlessEqual(bin(s1), bin(s2))
 
+    def test_unaligned_nonnative_struct_fields(self):
+        if sys.byteorder == "little":
+            base = BigEndianStructure
+            fmt = ">b h xi xd"
+        else:
+            base = LittleEndianStructure
+            fmt = "<b h xi xd"
+
+        class S(base):
+            _pack_ = 1
+            _fields_ = [("b", c_byte),
+
+                        ("h", c_short),
+
+                        ("_1", c_byte),
+                        ("i", c_int),
+
+                        ("_2", c_byte),
+                        ("d", c_double)]
+
+        s1 = S()
+        s1.b = 0x12
+        s1.h = 0x1234
+        s1.i = 0x12345678
+        s1.d = 3.14
+        s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
+        self.failUnlessEqual(bin(s1), bin(s2))
+
+    def test_unaligned_native_struct_fields(self):
+        if sys.byteorder == "little":
+            fmt = "<b h xi xd"
+        else:
+            base = LittleEndianStructure
+            fmt = ">b h xi xd"
+
+        class S(Structure):
+            _pack_ = 1
+            _fields_ = [("b", c_byte),
+
+                        ("h", c_short),
+
+                        ("_1", c_byte),
+                        ("i", c_int),
+
+                        ("_2", c_byte),
+                        ("d", c_double)]
+
+        s1 = S()
+        s1.b = 0x12
+        s1.h = 0x1234
+        s1.i = 0x12345678
+        s1.d = 3.14
+        s2 = struct.pack(fmt, 0x12, 0x1234, 0x12345678, 3.14)
+        self.failUnlessEqual(bin(s1), bin(s2))
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/Lib/ctypes/test/test_cfuncs.py b/Lib/ctypes/test/test_cfuncs.py
index 7c2b28b..6e0798d 100644
--- a/Lib/ctypes/test/test_cfuncs.py
+++ b/Lib/ctypes/test/test_cfuncs.py
@@ -179,7 +179,7 @@
         def __getattr__(self, name):
             if name[:2] == '__' and name[-2:] == '__':
                 raise AttributeError, name
-            func = self._FuncPtr("s_" + name, self)
+            func = self._FuncPtr(("s_" + name, self))
             setattr(self, name, func)
             return func
 
diff --git a/Lib/ctypes/test/test_keeprefs.py b/Lib/ctypes/test/test_keeprefs.py
index 39e70e3..7318f29 100644
--- a/Lib/ctypes/test/test_keeprefs.py
+++ b/Lib/ctypes/test/test_keeprefs.py
@@ -140,5 +140,10 @@
         r.a[0].x = 42
         r.a[0].y = 99
 
+        # to avoid leaking when tests are run several times
+        # clean up the types left in the cache.
+        from ctypes import _pointer_type_cache
+        del _pointer_type_cache[POINT]
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/Lib/ctypes/test/test_loading.py b/Lib/ctypes/test/test_loading.py
index 80564b8..4558417 100644
--- a/Lib/ctypes/test/test_loading.py
+++ b/Lib/ctypes/test/test_loading.py
@@ -2,40 +2,72 @@
 import sys, unittest
 import os, StringIO
 
+libc_name = None
+if os.name == "nt":
+    libc_name = "msvcrt"
+elif os.name == "ce":
+    libc_name = "coredll"
+elif sys.platform == "darwin":
+    libc_name = "libc.dylib"
+elif sys.platform == "cygwin":
+    libc_name = "cygwin1.dll"
+else:
+    for line in os.popen("ldd %s" % sys.executable):
+        if "libc.so" in line:
+            if sys.platform == "openbsd3":
+                libc_name = line.split()[4]
+            else:
+                libc_name = line.split()[2]
+##            print "libc_name is", libc_name
+            break
+
 class LoaderTest(unittest.TestCase):
 
     unknowndll = "xxrandomnamexx"
 
-    def test_load(self):
-        if os.name == "nt":
-            name = "msvcrt"
-        elif os.name == "ce":
-            name = "coredll"
-        elif sys.platform == "darwin":
-            name = "libc.dylib"
-        elif sys.platform.startswith("freebsd"):
-            name = "libc.so"
-        elif sys.platform == "sunos5":
-            name = "libc.so"
-        else:
-            name = "libc.so.6"
-        cdll.load(name)
-        self.assertRaises(OSError, cdll.load, self.unknowndll)
+    if libc_name is not None:
+        def test_load(self):
+            cdll.load(libc_name)
+            cdll.load(os.path.basename(libc_name))
+            self.assertRaises(OSError, cdll.load, self.unknowndll)
 
-    def test_load_version(self):
-        version = "6"
-        name = "c"
-        if sys.platform == "linux2":
-            cdll.load_version(name, version)
+    if libc_name is not None and os.path.basename(libc_name) == "libc.so.6":
+        def test_load_version(self):
+            cdll.load_version("c", "6")
             # linux uses version, libc 9 should not exist
-            self.assertRaises(OSError, cdll.load_version, name, "9")
-        self.assertRaises(OSError, cdll.load_version, self.unknowndll, "")
+            self.assertRaises(OSError, cdll.load_version, "c", "9")
+            self.assertRaises(OSError, cdll.load_version, self.unknowndll, "")
 
-    if os.name == "posix" and sys.platform != "sunos5":
         def test_find(self):
             name = "c"
             cdll.find(name)
             self.assertRaises(OSError, cdll.find, self.unknowndll)
 
+    if os.name in ("nt", "ce"):
+        def test_load_library(self):
+            if os.name == "nt":
+                windll.load_library("kernel32").GetModuleHandleW
+                windll.LoadLibrary("kernel32").GetModuleHandleW
+                WinDLL("kernel32").GetModuleHandleW
+            elif os.name == "ce":
+                windll.load_library("coredll").GetModuleHandleW
+                windll.LoadLibrary("coredll").GetModuleHandleW
+                WinDLL("coredll").GetModuleHandleW
+
+        def test_load_ordinal_functions(self):
+            import _ctypes_test
+            dll = WinDLL(_ctypes_test.__file__)
+            # We load the same function both via ordinal and name
+            func_ord = dll[2]
+            func_name = dll.GetString
+            # addressof gets the address where the function pointer is stored
+            a_ord = addressof(func_ord)
+            a_name = addressof(func_name)
+            f_ord_addr = c_void_p.from_address(a_ord).value
+            f_name_addr = c_void_p.from_address(a_name).value
+            self.failUnlessEqual(hex(f_ord_addr), hex(f_name_addr))
+
+            self.failUnlessRaises(AttributeError, dll.__getitem__, 1234)
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/Lib/ctypes/test/test_pointers.py b/Lib/ctypes/test/test_pointers.py
index 6172abb..3a324a6 100644
--- a/Lib/ctypes/test/test_pointers.py
+++ b/Lib/ctypes/test/test_pointers.py
@@ -166,6 +166,18 @@
         result = func( byref(argc), argv )
         assert result == 'world', result
 
+    def test_bug_1467852(self):
+        # http://sourceforge.net/tracker/?func=detail&atid=532154&aid=1467852&group_id=71702
+        x = c_int(5)
+        dummy = []
+        for i in range(32000):
+            dummy.append(c_int(i))
+        y = c_int(6)
+        p = pointer(x)
+        pp = pointer(p)
+        q = pointer(y)
+        pp[0] = q         # <==
+        self.failUnlessEqual(p[0], 6)
 
 if __name__ == '__main__':
     unittest.main()
diff --git a/Lib/ctypes/test/test_posix.py b/Lib/ctypes/test/test_posix.py
index 2b4fdff..fe0a40a 100644
--- a/Lib/ctypes/test/test_posix.py
+++ b/Lib/ctypes/test/test_posix.py
@@ -8,8 +8,10 @@
 
     class TestRTLD_GLOBAL(unittest.TestCase):
         def test_GL(self):
-            cdll.load('libGL.so', mode=RTLD_GLOBAL)
-            cdll.load('libGLU.so')
+            if os.path.exists('/usr/lib/libGL.so'):
+                cdll.load('libGL.so', mode=RTLD_GLOBAL)
+            if os.path.exists('/usr/lib/libGLU.so'):
+                cdll.load('libGLU.so')
 
 ##if os.name == "posix" and sys.platform != "darwin":
 
diff --git a/Lib/ctypes/test/test_prototypes.py b/Lib/ctypes/test/test_prototypes.py
index 2c3d75b..47f5da1 100644
--- a/Lib/ctypes/test/test_prototypes.py
+++ b/Lib/ctypes/test/test_prototypes.py
@@ -24,6 +24,19 @@
 import _ctypes_test
 testdll = cdll.load(_ctypes_test.__file__)
 
+# Return machine address `a` as a (possibly long) non-negative integer.
+# Starting with Python 2.5, id(anything) is always non-negative, and
+# the ctypes addressof() inherits that via PyLong_FromVoidPtr().
+def positive_address(a):
+    if a >= 0:
+        return a
+    # View the bits in `a` as unsigned instead.
+    import struct
+    num_bits = struct.calcsize("P") * 8 # num bits in native machine address
+    a += 1L << num_bits
+    assert a >= 0
+    return a
+
 def c_wbuffer(init):
     n = len(init) + 1
     return (c_wchar * n)(*init)
@@ -43,7 +56,8 @@
         ci = c_int(0)
 
         func.argtypes = POINTER(c_int),
-        self.failUnlessEqual(addressof(ci), func(byref(ci)))
+        self.failUnlessEqual(positive_address(addressof(ci)),
+                             positive_address(func(byref(ci))))
 
         func.argtypes = c_char_p,
         self.assertRaises(ArgumentError, func, byref(ci))
diff --git a/Lib/ctypes/test/test_random_things.py b/Lib/ctypes/test/test_random_things.py
index cd50ca8..78a665b 100644
--- a/Lib/ctypes/test/test_random_things.py
+++ b/Lib/ctypes/test/test_random_things.py
@@ -51,16 +51,14 @@
     def test_IntegerDivisionError(self):
         cb = CFUNCTYPE(c_int, c_int)(callback_func)
         out = self.capture_stderr(cb, 0)
-        self.failUnlessEqual(out.splitlines()[-1],
-                             "ZeroDivisionError: "
-                             "integer division or modulo by zero")
+        self.failUnlessEqual(out.splitlines()[-1][:19],
+                             "ZeroDivisionError: ")
 
     def test_FloatDivisionError(self):
         cb = CFUNCTYPE(c_int, c_double)(callback_func)
         out = self.capture_stderr(cb, 0.0)
-        self.failUnlessEqual(out.splitlines()[-1],
-                             "ZeroDivisionError: "
-                             "float division")
+        self.failUnlessEqual(out.splitlines()[-1][:19],
+                             "ZeroDivisionError: ")
 
     def test_TypeErrorDivisionError(self):
         cb = CFUNCTYPE(c_int, c_char_p)(callback_func)
diff --git a/Lib/ctypes/test/test_sizes.py b/Lib/ctypes/test/test_sizes.py
index 6fb9ca0..208c00e 100644
--- a/Lib/ctypes/test/test_sizes.py
+++ b/Lib/ctypes/test/test_sizes.py
@@ -20,5 +20,8 @@
         self.failUnlessEqual(8, sizeof(c_int64))
         self.failUnlessEqual(8, sizeof(c_uint64))
 
+    def test_size_t(self):
+        self.failUnlessEqual(sizeof(c_void_p), sizeof(c_size_t))
+
 if __name__ == "__main__":
     unittest.main()
diff --git a/Lib/ctypes/test/test_unaligned_structures.py b/Lib/ctypes/test/test_unaligned_structures.py
new file mode 100644
index 0000000..89343ba
--- /dev/null
+++ b/Lib/ctypes/test/test_unaligned_structures.py
@@ -0,0 +1,45 @@
+import sys, unittest
+from ctypes import *
+
+structures = []
+byteswapped_structures = []
+
+
+if sys.byteorder == "little":
+    SwappedStructure = BigEndianStructure
+else:
+    SwappedStructure = LittleEndianStructure
+
+for typ in [c_short, c_int, c_long, c_longlong,
+            c_float, c_double,
+            c_ushort, c_uint, c_ulong, c_ulonglong]:
+    class X(Structure):
+        _pack_ = 1
+        _fields_ = [("pad", c_byte),
+                    ("value", typ)]
+    class Y(SwappedStructure):
+        _pack_ = 1
+        _fields_ = [("pad", c_byte),
+                    ("value", typ)]
+    structures.append(X)
+    byteswapped_structures.append(Y)
+
+class TestStructures(unittest.TestCase):
+    def test_native(self):
+        for typ in structures:
+##            print typ.value
+            self.failUnlessEqual(typ.value.offset, 1)
+            o = typ()
+            o.value = 4
+            self.failUnlessEqual(o.value, 4)
+
+    def test_swapped(self):
+        for typ in byteswapped_structures:
+##            print >> sys.stderr, typ.value
+            self.failUnlessEqual(typ.value.offset, 1)
+            o = typ()
+            o.value = 4
+            self.failUnlessEqual(o.value, 4)
+
+if __name__ == '__main__':
+    unittest.main()
diff --git a/Lib/distutils/command/build_ext.py b/Lib/distutils/command/build_ext.py
index 6ea5d57..5771252 100644
--- a/Lib/distutils/command/build_ext.py
+++ b/Lib/distutils/command/build_ext.py
@@ -185,7 +185,9 @@
 
         # for extensions under Cygwin and AtheOS Python's library directory must be
         # appended to library_dirs
-        if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos':
+        if sys.platform[:6] == 'cygwin' or sys.platform[:6] == 'atheos' or \
+               (sys.platform.startswith('linux') and
+                sysconfig.get_config_var('Py_ENABLE_SHARED')):
             if string.find(sys.executable, sys.exec_prefix) != -1:
                 # building third party extensions
                 self.library_dirs.append(os.path.join(sys.prefix, "lib",
@@ -688,6 +690,13 @@
             # extensions, it is a reference to the original list
             return ext.libraries + [pythonlib, "m"] + extra
         else:
-            return ext.libraries
+            from distutils import sysconfig
+            if sysconfig.get_config_var('Py_ENABLE_SHARED'):
+                template = "python%d.%d"
+                pythonlib = (template %
+                             (sys.hexversion >> 24, (sys.hexversion >> 16) & 0xff))
+                return ext.libraries + [pythonlib]
+            else:
+                return ext.libraries
 
 # class build_ext
diff --git a/Lib/distutils/command/install.py b/Lib/distutils/command/install.py
index 7723761..453151d 100644
--- a/Lib/distutils/command/install.py
+++ b/Lib/distutils/command/install.py
@@ -601,6 +601,7 @@
                     ('install_headers', has_headers),
                     ('install_scripts', has_scripts),
                     ('install_data',    has_data),
+                    ('install_egg_info', lambda self:True),
                    ]
 
 # class install
diff --git a/Lib/distutils/command/install_egg_info.py b/Lib/distutils/command/install_egg_info.py
new file mode 100644
index 0000000..c31ac29
--- /dev/null
+++ b/Lib/distutils/command/install_egg_info.py
@@ -0,0 +1,75 @@
+"""distutils.command.install_egg_info
+
+Implements the Distutils 'install_egg_info' command, for installing
+a package's PKG-INFO metadata."""
+
+
+from distutils.cmd import Command
+from distutils import log, dir_util
+import os, sys, re
+
+class install_egg_info(Command):
+    """Install an .egg-info file for the package"""
+
+    description = "Install package's PKG-INFO metadata as an .egg-info file"
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+    ]
+
+    def initialize_options(self):
+        self.install_dir = None
+
+    def finalize_options(self):
+        self.set_undefined_options('install_lib',('install_dir','install_dir'))
+        basename = "%s-%s-py%s.egg-info" % (
+            to_filename(safe_name(self.distribution.get_name())),
+            to_filename(safe_version(self.distribution.get_version())),
+            sys.version[:3]
+        )
+        self.target = os.path.join(self.install_dir, basename)
+        self.outputs = [self.target]
+
+    def run(self):
+        target = self.target
+        if os.path.isdir(target) and not os.path.islink(target):
+            dir_util.remove_tree(target, dry_run=self.dry_run)
+        elif os.path.exists(target):
+            self.execute(os.unlink,(self.target,),"Removing "+target)
+        log.info("Writing %s", target)
+        if not self.dry_run:
+            f = open(target, 'w')
+            self.distribution.metadata.write_pkg_file(f)
+            f.close()
+
+    def get_outputs(self):
+        return self.outputs
+
+
+# The following routines are taken from setuptools' pkg_resources module and
+# can be replaced by importing them from pkg_resources once it is included
+# in the stdlib.
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """Convert an arbitrary string to a standard version string
+
+    Spaces become dots, and all other non-alphanumeric characters become
+    dashes, with runs of multiple dashes condensed to a single dash.
+    """
+    version = version.replace(' ','.')
+    return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-','_')
diff --git a/Lib/distutils/command/upload.py b/Lib/distutils/command/upload.py
index 62767a3..6f4ce81 100644
--- a/Lib/distutils/command/upload.py
+++ b/Lib/distutils/command/upload.py
@@ -29,6 +29,7 @@
          'display full response text from server'),
         ('sign', 's',
          'sign files to upload using gpg'),
+        ('identity=', 'i', 'GPG identity used to sign files'),
         ]
     boolean_options = ['show-response', 'sign']
 
@@ -38,8 +39,13 @@
         self.repository = ''
         self.show_response = 0
         self.sign = False
+        self.identity = None
 
     def finalize_options(self):
+        if self.identity and not self.sign:
+            raise DistutilsOptionError(
+                "Must use --sign for --identity to have meaning"
+            )
         if os.environ.has_key('HOME'):
             rc = os.path.join(os.environ['HOME'], '.pypirc')
             if os.path.exists(rc):
@@ -67,7 +73,10 @@
     def upload_file(self, command, pyversion, filename):
         # Sign if requested
         if self.sign:
-            spawn(("gpg", "--detach-sign", "-a", filename),
+            gpg_args = ["gpg", "--detach-sign", "-a", filename]
+            if self.identity:
+                gpg_args[2:2] = ["--local-user", self.identity]
+            spawn(gpg_args,
                   dry_run=self.dry_run)
 
         # Fill in the data - send all the meta-data in case we need to
diff --git a/Lib/distutils/log.py b/Lib/distutils/log.py
index cf3ee13..95d4c1c 100644
--- a/Lib/distutils/log.py
+++ b/Lib/distutils/log.py
@@ -20,7 +20,12 @@
 
     def _log(self, level, msg, args):
         if level >= self.threshold:
-            print msg % args
+            if not args:
+                # msg may contain a '%'. If args is empty,
+                # don't even try to string-format
+                print msg
+            else:
+                print msg % args
             sys.stdout.flush()
 
     def log(self, level, msg, *args):
diff --git a/Lib/distutils/sysconfig.py b/Lib/distutils/sysconfig.py
index dc603be..49536f0 100644
--- a/Lib/distutils/sysconfig.py
+++ b/Lib/distutils/sysconfig.py
@@ -31,7 +31,7 @@
 
 python_build = os.path.isfile(landmark)
 
-del argv0_path, landmark
+del landmark
 
 
 def get_python_version():
@@ -185,7 +185,7 @@
 def get_config_h_filename():
     """Return full pathname of installed pyconfig.h file."""
     if python_build:
-        inc_dir = os.curdir
+        inc_dir = argv0_path
     else:
         inc_dir = get_python_inc(plat_specific=1)
     if get_python_version() < '2.2':
@@ -213,8 +213,8 @@
     """
     if g is None:
         g = {}
-    define_rx = re.compile("#define ([A-Z][A-Z0-9_]+) (.*)\n")
-    undef_rx = re.compile("/[*] #undef ([A-Z][A-Z0-9_]+) [*]/\n")
+    define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
+    undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
     #
     while 1:
         line = fp.readline()
@@ -351,6 +351,17 @@
 
         raise DistutilsPlatformError(my_msg)
 
+    # load the installed pyconfig.h:
+    try:
+        filename = get_config_h_filename()
+        parse_config_h(file(filename), g)
+    except IOError, msg:
+        my_msg = "invalid Python installation: unable to open %s" % filename
+        if hasattr(msg, "strerror"):
+            my_msg = my_msg + " (%s)" % msg.strerror
+
+        raise DistutilsPlatformError(my_msg)
+
     # On MacOSX we need to check the setting of the environment variable
     # MACOSX_DEPLOYMENT_TARGET: configure bases some choices on it so
     # it needs to be compatible.
@@ -361,7 +372,7 @@
         if cur_target == '':
             cur_target = cfg_target
             os.putenv('MACOSX_DEPLOYMENT_TARGET', cfg_target)
-        if cfg_target != cur_target:
+        elif map(int, cfg_target.split('.')) > map(int, cur_target.split('.')):
             my_msg = ('$MACOSX_DEPLOYMENT_TARGET mismatch: now "%s" but "%s" during configure'
                 % (cur_target, cfg_target))
             raise DistutilsPlatformError(my_msg)
diff --git a/Lib/doctest.py b/Lib/doctest.py
index 6244fae..70c355a 100644
--- a/Lib/doctest.py
+++ b/Lib/doctest.py
@@ -236,6 +236,15 @@
     else:
         raise TypeError("Expected a module, string, or None")
 
+def _load_testfile(filename, package, module_relative):
+    if module_relative:
+        package = _normalize_module(package, 3)
+        filename = _module_relative_path(package, filename)
+        if hasattr(package, '__loader__'):
+            if hasattr(package.__loader__, 'get_data'):
+                return package.__loader__.get_data(filename), filename
+    return open(filename).read(), filename
+
 def _indent(s, indent=4):
     """
     Add the given number of space characters to the beginning every
@@ -1319,13 +1328,13 @@
     __LINECACHE_FILENAME_RE = re.compile(r'<doctest '
                                          r'(?P<name>[\w\.]+)'
                                          r'\[(?P<examplenum>\d+)\]>$')
-    def __patched_linecache_getlines(self, filename):
+    def __patched_linecache_getlines(self, filename, module_globals=None):
         m = self.__LINECACHE_FILENAME_RE.match(filename)
         if m and m.group('name') == self.test.name:
             example = self.test.examples[int(m.group('examplenum'))]
             return example.source.splitlines(True)
         else:
-            return self.save_linecache_getlines(filename)
+            return self.save_linecache_getlines(filename, module_globals)
 
     def run(self, test, compileflags=None, out=None, clear_globs=True):
         """
@@ -1933,9 +1942,7 @@
                          "relative paths.")
 
     # Relativize the path
-    if module_relative:
-        package = _normalize_module(package)
-        filename = _module_relative_path(package, filename)
+    text, filename = _load_testfile(filename, package, module_relative)
 
     # If no name was given, then use the file's name.
     if name is None:
@@ -1955,8 +1962,7 @@
         runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
 
     # Read the file, convert it to a test, and run it.
-    s = open(filename).read()
-    test = parser.get_doctest(s, globs, name, filename, 0)
+    test = parser.get_doctest(text, globs, name, filename, 0)
     runner.run(test)
 
     if report:
@@ -2336,15 +2342,13 @@
                          "relative paths.")
 
     # Relativize the path.
-    if module_relative:
-        package = _normalize_module(package)
-        path = _module_relative_path(package, path)
+    doc, path = _load_testfile(path, package, module_relative)
+
     if "__file__" not in globs:
         globs["__file__"] = path
 
     # Find the file and read it.
     name = os.path.basename(path)
-    doc = open(path).read()
 
     # Convert it to a test, and wrap it in a DocFileCase.
     test = parser.get_doctest(doc, globs, name, path, 0)
diff --git a/Lib/dummy_thread.py b/Lib/dummy_thread.py
index fb3abbf..d69d840 100644
--- a/Lib/dummy_thread.py
+++ b/Lib/dummy_thread.py
@@ -113,6 +113,14 @@
             self.locked_status = True
             return True
 
+    __enter__ = acquire
+
+    def __exit__(self, typ, val, tb):
+        self.release()
+
+    def __context__(self):
+        return self
+
     def release(self):
         """Release the dummy lock."""
         # XXX Perhaps shouldn't actually bother to test?  Could lead
diff --git a/Lib/easy_install.py b/Lib/easy_install.py
new file mode 100644
index 0000000..d87e984
--- /dev/null
+++ b/Lib/easy_install.py
@@ -0,0 +1,5 @@
+"""Run the EasyInstall command"""
+
+if __name__ == '__main__':
+    from setuptools.command.easy_install import main
+    main()
diff --git a/Lib/email/__init__.py b/Lib/email/__init__.py
index e5c0e2e..f01260f 100644
--- a/Lib/email/__init__.py
+++ b/Lib/email/__init__.py
@@ -4,9 +4,10 @@
 
 """A package for parsing, handling, and generating email messages."""
 
-__version__ = '3.0.1'
+__version__ = '4.0a2'
 
 __all__ = [
+    # Old names
     'base64MIME',
     'Charset',
     'Encoders',
@@ -27,6 +28,19 @@
     'Utils',
     'message_from_string',
     'message_from_file',
+    # new names
+    'base64mime',
+    'charset',
+    'encoders',
+    'errors',
+    'generator',
+    'header',
+    'iterators',
+    'message',
+    'mime',
+    'parser',
+    'quoprimime',
+    'utils',
     ]
 
 
@@ -39,7 +53,7 @@
 
     Optional _class and strict are passed to the Parser constructor.
     """
-    from email.Parser import Parser
+    from email.parser import Parser
     return Parser(*args, **kws).parsestr(s)
 
 
@@ -48,5 +62,62 @@
 
     Optional _class and strict are passed to the Parser constructor.
     """
-    from email.Parser import Parser
+    from email.parser import Parser
     return Parser(*args, **kws).parse(fp)
+
+
+
+# Lazy loading to provide name mapping from new-style names (PEP 8 compatible
+# email 4.0 module names), to old-style names (email 3.0 module names).
+import sys
+
+class LazyImporter(object):
+    def __init__(self, module_name):
+        self.__name__ = 'email.' + module_name
+
+    def __getattr__(self, name):
+        __import__(self.__name__)
+        mod = sys.modules[self.__name__]
+        self.__dict__.update(mod.__dict__)
+        return getattr(mod, name)
+
+
+_LOWERNAMES = [
+    # email.<old name> -> email.<new name is lowercased old name>
+    'Charset',
+    'Encoders',
+    'Errors',
+    'FeedParser',
+    'Generator',
+    'Header',
+    'Iterators',
+    'Message',
+    'Parser',
+    'Utils',
+    'base64MIME',
+    'quopriMIME',
+    ]
+
+_MIMENAMES = [
+    # email.MIME<old name> -> email.mime.<new name is lowercased old name>
+    'Audio',
+    'Base',
+    'Image',
+    'Message',
+    'Multipart',
+    'NonMultipart',
+    'Text',
+    ]
+
+for _name in _LOWERNAMES:
+    importer = LazyImporter(_name.lower())
+    sys.modules['email.' + _name] = importer
+    setattr(sys.modules['email'], _name, importer)
+
+
+import email.mime
+for _name in _MIMENAMES:
+    importer = LazyImporter('mime.' + _name.lower())
+    sys.modules['email.MIME' + _name] = importer
+    setattr(sys.modules['email'], 'MIME' + _name, importer)
+    setattr(sys.modules['email.mime'], _name, importer)
diff --git a/Lib/email/_parseaddr.py b/Lib/email/_parseaddr.py
index 7d759ef..5821ddf 100644
--- a/Lib/email/_parseaddr.py
+++ b/Lib/email/_parseaddr.py
@@ -6,6 +6,13 @@
 Lifted directly from rfc822.py.  This should eventually be rewritten.
 """
 
+__all__ = [
+    'mktime_tz',
+    'parsedate',
+    'parsedate_tz',
+    'quote',
+    ]
+
 import time
 
 SPACE = ' '
@@ -117,7 +124,8 @@
         else:
             tzsign = 1
         tzoffset = tzsign * ( (tzoffset//100)*3600 + (tzoffset % 100)*60)
-    return yy, mm, dd, thh, tmm, tss, 0, 1, 0, tzoffset
+    # Daylight Saving Time flag is set to -1, since DST is unknown.
+    return yy, mm, dd, thh, tmm, tss, 0, 1, -1, tzoffset
 
 
 def parsedate(data):
diff --git a/Lib/email/base64MIME.py b/Lib/email/base64mime.py
similarity index 95%
rename from Lib/email/base64MIME.py
rename to Lib/email/base64mime.py
index 6ed1d53..0129d9d 100644
--- a/Lib/email/base64MIME.py
+++ b/Lib/email/base64mime.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2002-2004 Python Software Foundation
+# Copyright (C) 2002-2006 Python Software Foundation
 # Author: Ben Gertzfield
 # Contact: email-sig@python.org
 
@@ -24,9 +24,21 @@
 module.
 """
 
+__all__ = [
+    'base64_len',
+    'body_decode',
+    'body_encode',
+    'decode',
+    'decodestring',
+    'encode',
+    'encodestring',
+    'header_encode',
+    ]
+
 import re
+
 from binascii import b2a_base64, a2b_base64
-from email.Utils import fix_eols
+from email.utils import fix_eols
 
 CRLF = '\r\n'
 NL = '\n'
diff --git a/Lib/email/Charset.py b/Lib/email/charset.py
similarity index 90%
rename from Lib/email/Charset.py
rename to Lib/email/charset.py
index fd4043b..8f218b2 100644
--- a/Lib/email/Charset.py
+++ b/Lib/email/charset.py
@@ -2,9 +2,18 @@
 # Author: Ben Gertzfield, Barry Warsaw
 # Contact: email-sig@python.org
 
-import email.base64MIME
-import email.quopriMIME
-from email.Encoders import encode_7or8bit
+__all__ = [
+    'Charset',
+    'add_alias',
+    'add_charset',
+    'add_codec',
+    ]
+
+import email.base64mime
+import email.quoprimime
+
+from email import errors
+from email.encoders import encode_7or8bit
 
 
 
@@ -186,8 +195,17 @@
     """
     def __init__(self, input_charset=DEFAULT_CHARSET):
         # RFC 2046, $4.1.2 says charsets are not case sensitive.  We coerce to
-        # unicode because its .lower() is locale insensitive.
-        input_charset = unicode(input_charset, 'ascii').lower()
+        # unicode because its .lower() is locale insensitive.  If the argument
+        # is already a unicode, we leave it at that, but ensure that the
+        # charset is ASCII, as the standard (RFC XXX) requires.
+        try:
+            if isinstance(input_charset, unicode):
+                input_charset.encode('ascii')
+            else:
+                input_charset = unicode(input_charset, 'ascii')
+        except UnicodeError:
+            raise errors.CharsetError(input_charset)
+        input_charset = input_charset.lower()
         # Set the input charset after filtering through the aliases
         self.input_charset = ALIASES.get(input_charset, input_charset)
         # We can try to guess which encoding and conversion to use by the
@@ -307,12 +325,12 @@
         cset = self.get_output_charset()
         # The len(s) of a 7bit encoding is len(s)
         if self.header_encoding == BASE64:
-            return email.base64MIME.base64_len(s) + len(cset) + MISC_LEN
+            return email.base64mime.base64_len(s) + len(cset) + MISC_LEN
         elif self.header_encoding == QP:
-            return email.quopriMIME.header_quopri_len(s) + len(cset) + MISC_LEN
+            return email.quoprimime.header_quopri_len(s) + len(cset) + MISC_LEN
         elif self.header_encoding == SHORTEST:
-            lenb64 = email.base64MIME.base64_len(s)
-            lenqp = email.quopriMIME.header_quopri_len(s)
+            lenb64 = email.base64mime.base64_len(s)
+            lenqp = email.quoprimime.header_quopri_len(s)
             return min(lenb64, lenqp) + len(cset) + MISC_LEN
         else:
             return len(s)
@@ -335,16 +353,16 @@
             s = self.convert(s)
         # 7bit/8bit encodings return the string unchanged (modulo conversions)
         if self.header_encoding == BASE64:
-            return email.base64MIME.header_encode(s, cset)
+            return email.base64mime.header_encode(s, cset)
         elif self.header_encoding == QP:
-            return email.quopriMIME.header_encode(s, cset, maxlinelen=None)
+            return email.quoprimime.header_encode(s, cset, maxlinelen=None)
         elif self.header_encoding == SHORTEST:
-            lenb64 = email.base64MIME.base64_len(s)
-            lenqp = email.quopriMIME.header_quopri_len(s)
+            lenb64 = email.base64mime.base64_len(s)
+            lenqp = email.quoprimime.header_quopri_len(s)
             if lenb64 < lenqp:
-                return email.base64MIME.header_encode(s, cset)
+                return email.base64mime.header_encode(s, cset)
             else:
-                return email.quopriMIME.header_encode(s, cset, maxlinelen=None)
+                return email.quoprimime.header_encode(s, cset, maxlinelen=None)
         else:
             return s
 
@@ -363,8 +381,8 @@
             s = self.convert(s)
         # 7bit/8bit encodings return the string unchanged (module conversions)
         if self.body_encoding is BASE64:
-            return email.base64MIME.body_encode(s)
+            return email.base64mime.body_encode(s)
         elif self.body_encoding is QP:
-            return email.quopriMIME.body_encode(s)
+            return email.quoprimime.body_encode(s)
         else:
             return s
diff --git a/Lib/email/Encoders.py b/Lib/email/encoders.py
similarity index 93%
rename from Lib/email/Encoders.py
rename to Lib/email/encoders.py
index baac2a3..06016cd 100644
--- a/Lib/email/Encoders.py
+++ b/Lib/email/encoders.py
@@ -1,12 +1,22 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
 """Encodings and related functions."""
 
+__all__ = [
+    'encode_7or8bit',
+    'encode_base64',
+    'encode_noop',
+    'encode_quopri',
+    ]
+
 import base64
+
 from quopri import encodestring as _encodestring
 
+
+
 def _qencode(s):
     enc = _encodestring(s, quotetabs=True)
     # Must encode spaces, which quopri.encodestring() doesn't do
diff --git a/Lib/email/Errors.py b/Lib/email/errors.py
similarity index 92%
rename from Lib/email/Errors.py
rename to Lib/email/errors.py
index e13a2c7..d52a624 100644
--- a/Lib/email/Errors.py
+++ b/Lib/email/errors.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
@@ -26,6 +26,10 @@
     """Conversion to a multipart is prohibited."""
 
 
+class CharsetError(MessageError):
+    """An illegal charset was given."""
+
+
 
 # These are parsing defects which the parser was able to work around.
 class MessageDefect:
diff --git a/Lib/email/FeedParser.py b/Lib/email/feedparser.py
similarity index 97%
rename from Lib/email/FeedParser.py
rename to Lib/email/feedparser.py
index a2130e2..afb02b3 100644
--- a/Lib/email/FeedParser.py
+++ b/Lib/email/feedparser.py
@@ -19,9 +19,12 @@
 object's .defects attribute.
 """
 
+__all__ = ['FeedParser']
+
 import re
-from email import Errors
-from email import Message
+
+from email import errors
+from email import message
 
 NLCRE = re.compile('\r\n|\r|\n')
 NLCRE_bol = re.compile('(\r\n|\r|\n)')
@@ -130,7 +133,7 @@
 class FeedParser:
     """A feed-style parser of email."""
 
-    def __init__(self, _factory=Message.Message):
+    def __init__(self, _factory=message.Message):
         """_factory is called with no arguments to create a new message obj"""
         self._factory = _factory
         self._input = BufferedSubFile()
@@ -164,7 +167,7 @@
         # Look for final set of defects
         if root.get_content_maintype() == 'multipart' \
                and not root.is_multipart():
-            root.defects.append(Errors.MultipartInvariantViolationDefect())
+            root.defects.append(errors.MultipartInvariantViolationDefect())
         return root
 
     def _new_message(self):
@@ -277,7 +280,7 @@
                 # defined a boundary.  That's a problem which we'll handle by
                 # reading everything until the EOF and marking the message as
                 # defective.
-                self._cur.defects.append(Errors.NoBoundaryInMultipartDefect())
+                self._cur.defects.append(errors.NoBoundaryInMultipartDefect())
                 lines = []
                 for line in self._input:
                     if line is NeedMoreData:
@@ -381,7 +384,7 @@
             # that as a defect and store the captured text as the payload.
             # Everything from here to the EOF is epilogue.
             if capturing_preamble:
-                self._cur.defects.append(Errors.StartBoundaryNotFoundDefect())
+                self._cur.defects.append(errors.StartBoundaryNotFoundDefect())
                 self._cur.set_payload(EMPTYSTRING.join(preamble))
                 epilogue = []
                 for line in self._input:
@@ -432,7 +435,7 @@
                     # The first line of the headers was a continuation.  This
                     # is illegal, so let's note the defect, store the illegal
                     # line, and ignore it for purposes of headers.
-                    defect = Errors.FirstHeaderLineIsContinuationDefect(line)
+                    defect = errors.FirstHeaderLineIsContinuationDefect(line)
                     self._cur.defects.append(defect)
                     continue
                 lastvalue.append(line)
@@ -460,13 +463,13 @@
                 else:
                     # Weirdly placed unix-from line.  Note this as a defect
                     # and ignore it.
-                    defect = Errors.MisplacedEnvelopeHeaderDefect(line)
+                    defect = errors.MisplacedEnvelopeHeaderDefect(line)
                     self._cur.defects.append(defect)
                     continue
             # Split the line on the colon separating field name from value.
             i = line.find(':')
             if i < 0:
-                defect = Errors.MalformedHeaderDefect(line)
+                defect = errors.MalformedHeaderDefect(line)
                 self._cur.defects.append(defect)
                 continue
             lastheader = line[:i]
diff --git a/Lib/email/Generator.py b/Lib/email/generator.py
similarity index 97%
rename from Lib/email/Generator.py
rename to Lib/email/generator.py
index 7969916..6e7a515 100644
--- a/Lib/email/Generator.py
+++ b/Lib/email/generator.py
@@ -4,14 +4,16 @@
 
 """Classes to generate plain text from a message object tree."""
 
+__all__ = ['Generator', 'DecodedGenerator']
+
 import re
 import sys
 import time
 import random
 import warnings
-from cStringIO import StringIO
 
-from email.Header import Header
+from cStringIO import StringIO
+from email.header import Header
 
 UNDERSCORE = '_'
 NL = '\n'
@@ -81,12 +83,6 @@
             print >> self._fp, ufrom
         self._write(msg)
 
-    # For backwards compatibility, but this is slower
-    def __call__(self, msg, unixfrom=False):
-        warnings.warn('__call__() deprecated; use flatten()',
-                      DeprecationWarning, 2)
-        self.flatten(msg, unixfrom)
-
     def clone(self, fp):
         """Clone this generator with the exact same options."""
         return self.__class__(fp, self._mangle_from_, self._maxheaderlen)
diff --git a/Lib/email/Header.py b/Lib/email/header.py
similarity index 98%
rename from Lib/email/Header.py
rename to Lib/email/header.py
index 5e24afe..183c337 100644
--- a/Lib/email/Header.py
+++ b/Lib/email/header.py
@@ -1,16 +1,23 @@
-# Copyright (C) 2002-2004 Python Software Foundation
+# Copyright (C) 2002-2006 Python Software Foundation
 # Author: Ben Gertzfield, Barry Warsaw
 # Contact: email-sig@python.org
 
 """Header encoding and decoding functionality."""
 
+__all__ = [
+    'Header',
+    'decode_header',
+    'make_header',
+    ]
+
 import re
 import binascii
 
-import email.quopriMIME
-import email.base64MIME
-from email.Errors import HeaderParseError
-from email.Charset import Charset
+import email.quoprimime
+import email.base64mime
+
+from email.errors import HeaderParseError
+from email.charset import Charset
 
 NL = '\n'
 SPACE = ' '
@@ -42,7 +49,7 @@
 
 
 # Helpers
-_max_append = email.quopriMIME._max_append
+_max_append = email.quoprimime._max_append
 
 
 
@@ -82,10 +89,10 @@
                 encoded = parts[2]
                 dec = None
                 if encoding == 'q':
-                    dec = email.quopriMIME.header_decode(encoded)
+                    dec = email.quoprimime.header_decode(encoded)
                 elif encoding == 'b':
                     try:
-                        dec = email.base64MIME.decode(encoded)
+                        dec = email.base64mime.decode(encoded)
                     except binascii.Error:
                         # Turn this into a higher level exception.  BAW: Right
                         # now we throw the lower level exception away but
diff --git a/Lib/email/Iterators.py b/Lib/email/iterators.py
similarity index 90%
rename from Lib/email/Iterators.py
rename to Lib/email/iterators.py
index 74a93c7..e99f228 100644
--- a/Lib/email/Iterators.py
+++ b/Lib/email/iterators.py
@@ -1,9 +1,16 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
 """Various types of useful iterators and generators."""
 
+__all__ = [
+    'body_line_iterator',
+    'typed_subpart_iterator',
+    'walk',
+    # Do not include _structure() since it's part of the debugging API.
+    ]
+
 import sys
 from cStringIO import StringIO
 
@@ -25,7 +32,6 @@
 
 
 # These two functions are imported into the Iterators.py interface module.
-# The Python 2.2 version uses generators for efficiency.
 def body_line_iterator(msg, decode=False):
     """Iterate over the parts, returning string payloads line-by-line.
 
diff --git a/Lib/email/Message.py b/Lib/email/message.py
similarity index 92%
rename from Lib/email/Message.py
rename to Lib/email/message.py
index bc76416..50d90b4 100644
--- a/Lib/email/Message.py
+++ b/Lib/email/message.py
@@ -4,6 +4,8 @@
 
 """Basic message object for the email package object model."""
 
+__all__ = ['Message']
+
 import re
 import uu
 import binascii
@@ -11,9 +13,9 @@
 from cStringIO import StringIO
 
 # Intrapackage imports
-from email import Utils
-from email import Errors
-from email import Charset
+import email.charset
+from email import utils
+from email import errors
 
 SEMISPACE = '; '
 
@@ -41,11 +43,11 @@
         if isinstance(value, tuple):
             # Encode as per RFC 2231
             param += '*'
-            value = Utils.encode_rfc2231(value[2], value[0], value[1])
+            value = utils.encode_rfc2231(value[2], value[0], value[1])
         # BAW: Please check this.  I think that if quote is set it should
         # force quoting even if not necessary.
         if quote or tspecials.search(value):
-            return '%s="%s"' % (param, Utils.quote(value))
+            return '%s="%s"' % (param, utils.quote(value))
         else:
             return '%s=%s' % (param, value)
     else:
@@ -70,14 +72,14 @@
 
 
 def _unquotevalue(value):
-    # This is different than Utils.collapse_rfc2231_value() because it doesn't
+    # This is different than utils.collapse_rfc2231_value() because it doesn't
     # try to convert the value to a unicode.  Message.get_param() and
     # Message.get_params() are both currently defined to return the tuple in
     # the face of RFC 2231 parameters.
     if isinstance(value, tuple):
-        return value[0], value[1], Utils.unquote(value[2])
+        return value[0], value[1], utils.unquote(value[2])
     else:
-        return Utils.unquote(value)
+        return utils.unquote(value)
 
 
 
@@ -188,17 +190,17 @@
                 return None
             cte = self.get('content-transfer-encoding', '').lower()
             if cte == 'quoted-printable':
-                return Utils._qdecode(payload)
+                return utils._qdecode(payload)
             elif cte == 'base64':
                 try:
-                    return Utils._bdecode(payload)
+                    return utils._bdecode(payload)
                 except binascii.Error:
                     # Incorrect padding
                     return payload
             elif cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'):
                 sfp = StringIO()
                 try:
-                    uu.decode(StringIO(payload+'\n'), sfp)
+                    uu.decode(StringIO(payload+'\n'), sfp, quiet=True)
                     payload = sfp.getvalue()
                 except uu.Error:
                     # Some decoding problem
@@ -237,8 +239,8 @@
             self._charset = None
             return
         if isinstance(charset, str):
-            charset = Charset.Charset(charset)
-        if not isinstance(charset, Charset.Charset):
+            charset = email.charset.Charset(charset)
+        if not isinstance(charset, email.charset.Charset):
             raise TypeError(charset)
         # BAW: should we accept strings that can serve as arguments to the
         # Charset constructor?
@@ -413,49 +415,6 @@
             raise KeyError(_name)
 
     #
-    # Deprecated methods.  These will be removed in email 3.1.
-    #
-
-    def get_type(self, failobj=None):
-        """Returns the message's content type.
-
-        The returned string is coerced to lowercase and returned as a single
-        string of the form `maintype/subtype'.  If there was no Content-Type
-        header in the message, failobj is returned (defaults to None).
-        """
-        warnings.warn('get_type() deprecated; use get_content_type()',
-                      DeprecationWarning, 2)
-        missing = object()
-        value = self.get('content-type', missing)
-        if value is missing:
-            return failobj
-        return paramre.split(value)[0].lower().strip()
-
-    def get_main_type(self, failobj=None):
-        """Return the message's main content type if present."""
-        warnings.warn('get_main_type() deprecated; use get_content_maintype()',
-                      DeprecationWarning, 2)
-        missing = object()
-        ctype = self.get_type(missing)
-        if ctype is missing:
-            return failobj
-        if ctype.count('/') <> 1:
-            return failobj
-        return ctype.split('/')[0]
-
-    def get_subtype(self, failobj=None):
-        """Return the message's content subtype if present."""
-        warnings.warn('get_subtype() deprecated; use get_content_subtype()',
-                      DeprecationWarning, 2)
-        missing = object()
-        ctype = self.get_type(missing)
-        if ctype is missing:
-            return failobj
-        if ctype.count('/') <> 1:
-            return failobj
-        return ctype.split('/')[1]
-
-    #
     # Use these three methods instead of the three above.
     #
 
@@ -537,7 +496,7 @@
                 name = p.strip()
                 val = ''
             params.append((name, val))
-        params = Utils.decode_params(params)
+        params = utils.decode_params(params)
         return params
 
     def get_params(self, failobj=None, header='content-type', unquote=True):
@@ -714,7 +673,7 @@
             filename = self.get_param('name', missing, 'content-disposition')
         if filename is missing:
             return failobj
-        return Utils.collapse_rfc2231_value(filename).strip()
+        return utils.collapse_rfc2231_value(filename).strip()
 
     def get_boundary(self, failobj=None):
         """Return the boundary associated with the payload if present.
@@ -727,7 +686,7 @@
         if boundary is missing:
             return failobj
         # RFC 2046 says that boundaries may begin but not end in w/s
-        return Utils.collapse_rfc2231_value(boundary).rstrip()
+        return utils.collapse_rfc2231_value(boundary).rstrip()
 
     def set_boundary(self, boundary):
         """Set the boundary parameter in Content-Type to 'boundary'.
@@ -744,7 +703,7 @@
         if params is missing:
             # There was no Content-Type header, and we don't know what type
             # to set it to, so raise an exception.
-            raise Errors.HeaderParseError, 'No Content-Type header found'
+            raise errors.HeaderParseError('No Content-Type header found')
         newparams = []
         foundp = False
         for pk, pv in params:
diff --git a/Lib/email/mime/__init__.py b/Lib/email/mime/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/email/mime/__init__.py
diff --git a/Lib/email/mime/application.py b/Lib/email/mime/application.py
new file mode 100644
index 0000000..6f8bb8a
--- /dev/null
+++ b/Lib/email/mime/application.py
@@ -0,0 +1,36 @@
+# Copyright (C) 2001-2006 Python Software Foundation
+# Author: Keith Dart
+# Contact: email-sig@python.org
+
+"""Class representing application/* type MIME documents."""
+
+__all__ = ["MIMEApplication"]
+
+from email import encoders
+from email.mime.nonmultipart import MIMENonMultipart
+
+
+class MIMEApplication(MIMENonMultipart):
+    """Class for generating application/* MIME documents."""
+
+    def __init__(self, _data, _subtype='octet-stream',
+                 _encoder=encoders.encode_base64, **_params):
+        """Create an application/* type MIME document.
+
+        _data is a string containing the raw applicatoin data.
+
+        _subtype is the MIME content type subtype, defaulting to
+        'octet-stream'.
+
+        _encoder is a function which will perform the actual encoding for
+        transport of the application data, defaulting to base64 encoding.
+
+        Any additional keyword arguments are passed to the base class
+        constructor, which turns them into parameters on the Content-Type
+        header.
+        """
+        if _subtype is None:
+            raise TypeError('Invalid application MIME subtype')
+        MIMENonMultipart.__init__(self, 'application', _subtype, **_params)
+        self.set_payload(_data)
+        _encoder(self)
diff --git a/Lib/email/MIMEAudio.py b/Lib/email/mime/audio.py
similarity index 91%
rename from Lib/email/MIMEAudio.py
rename to Lib/email/mime/audio.py
index 266ec4c..c7290c4 100644
--- a/Lib/email/MIMEAudio.py
+++ b/Lib/email/mime/audio.py
@@ -1,15 +1,16 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Anthony Baxter
 # Contact: email-sig@python.org
 
 """Class representing audio/* type MIME documents."""
 
-import sndhdr
-from cStringIO import StringIO
+__all__ = ['MIMEAudio']
 
-from email import Errors
-from email import Encoders
-from email.MIMENonMultipart import MIMENonMultipart
+import sndhdr
+
+from cStringIO import StringIO
+from email import encoders
+from email.mime.nonmultipart import MIMENonMultipart
 
 
 
@@ -42,7 +43,7 @@
     """Class for generating audio/* MIME documents."""
 
     def __init__(self, _audiodata, _subtype=None,
-                 _encoder=Encoders.encode_base64, **_params):
+                 _encoder=encoders.encode_base64, **_params):
         """Create an audio/* type MIME document.
 
         _audiodata is a string containing the raw audio data.  If this data
diff --git a/Lib/email/MIMEBase.py b/Lib/email/mime/base.py
similarity index 77%
rename from Lib/email/MIMEBase.py
rename to Lib/email/mime/base.py
index 88691f8..ac91925 100644
--- a/Lib/email/MIMEBase.py
+++ b/Lib/email/mime/base.py
@@ -1,14 +1,16 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
 """Base class for MIME specializations."""
 
-from email import Message
+__all__ = ['MIMEBase']
+
+from email import message
 
 
 
-class MIMEBase(Message.Message):
+class MIMEBase(message.Message):
     """Base class for MIME specializations."""
 
     def __init__(self, _maintype, _subtype, **_params):
@@ -18,7 +20,7 @@
         arguments.  Additional parameters for this header are taken from the
         keyword arguments.
         """
-        Message.Message.__init__(self)
+        message.Message.__init__(self)
         ctype = '%s/%s' % (_maintype, _subtype)
         self.add_header('Content-Type', ctype, **_params)
         self['MIME-Version'] = '1.0'
diff --git a/Lib/email/MIMEImage.py b/Lib/email/mime/image.py
similarity index 87%
rename from Lib/email/MIMEImage.py
rename to Lib/email/mime/image.py
index a658067..5563823 100644
--- a/Lib/email/MIMEImage.py
+++ b/Lib/email/mime/image.py
@@ -1,14 +1,15 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
 """Class representing image/* type MIME documents."""
 
+__all__ = ['MIMEImage']
+
 import imghdr
 
-from email import Errors
-from email import Encoders
-from email.MIMENonMultipart import MIMENonMultipart
+from email import encoders
+from email.mime.nonmultipart import MIMENonMultipart
 
 
 
@@ -16,7 +17,7 @@
     """Class for generating image/* type MIME documents."""
 
     def __init__(self, _imagedata, _subtype=None,
-                 _encoder=Encoders.encode_base64, **_params):
+                 _encoder=encoders.encode_base64, **_params):
         """Create an image/* type MIME document.
 
         _imagedata is a string containing the raw image data.  If this data
diff --git a/Lib/email/MIMEMessage.py b/Lib/email/mime/message.py
similarity index 80%
rename from Lib/email/MIMEMessage.py
rename to Lib/email/mime/message.py
index 3021934..275dbfd 100644
--- a/Lib/email/MIMEMessage.py
+++ b/Lib/email/mime/message.py
@@ -1,11 +1,13 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
 """Class representing message/* MIME documents."""
 
-from email import Message
-from email.MIMENonMultipart import MIMENonMultipart
+__all__ = ['MIMEMessage']
+
+from email import message
+from email.mime.nonmultipart import MIMENonMultipart
 
 
 
@@ -23,10 +25,10 @@
         the term "rfc822" is technically outdated by RFC 2822).
         """
         MIMENonMultipart.__init__(self, 'message', _subtype)
-        if not isinstance(_msg, Message.Message):
+        if not isinstance(_msg, message.Message):
             raise TypeError('Argument is not an instance of Message')
         # It's convenient to use this base class method.  We need to do it
         # this way or we'll get an exception
-        Message.Message.attach(self, _msg)
+        message.Message.attach(self, _msg)
         # And be sure our default type is set correctly
         self.set_default_type('message/rfc822')
diff --git a/Lib/email/MIMEMultipart.py b/Lib/email/mime/multipart.py
similarity index 83%
rename from Lib/email/MIMEMultipart.py
rename to Lib/email/mime/multipart.py
index 9072a64..5c8c9db 100644
--- a/Lib/email/MIMEMultipart.py
+++ b/Lib/email/mime/multipart.py
@@ -1,14 +1,16 @@
-# Copyright (C) 2002-2004 Python Software Foundation
+# Copyright (C) 2002-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
 """Base class for MIME multipart/* type messages."""
 
-from email import MIMEBase
+__all__ = ['MIMEMultipart']
+
+from email.mime.base import MIMEBase
 
 
 
-class MIMEMultipart(MIMEBase.MIMEBase):
+class MIMEMultipart(MIMEBase):
     """Base class for MIME multipart/* type messages."""
 
     def __init__(self, _subtype='mixed', boundary=None, _subparts=None,
@@ -31,7 +33,7 @@
         Additional parameters for the Content-Type header are taken from the
         keyword arguments (or passed into the _params argument).
         """
-        MIMEBase.MIMEBase.__init__(self, 'multipart', _subtype, **_params)
+        MIMEBase.__init__(self, 'multipart', _subtype, **_params)
         if _subparts:
             for p in _subparts:
                 self.attach(p)
diff --git a/Lib/email/MIMENonMultipart.py b/Lib/email/mime/nonmultipart.py
similarity index 69%
rename from Lib/email/MIMENonMultipart.py
rename to Lib/email/mime/nonmultipart.py
index 4195d2a..dd280b5 100644
--- a/Lib/email/MIMENonMultipart.py
+++ b/Lib/email/mime/nonmultipart.py
@@ -1,15 +1,17 @@
-# Copyright (C) 2002-2004 Python Software Foundation
+# Copyright (C) 2002-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
 """Base class for MIME type messages that are not multipart."""
 
-from email import Errors
-from email import MIMEBase
+__all__ = ['MIMENonMultipart']
+
+from email import errors
+from email.mime.base import MIMEBase
 
 
 
-class MIMENonMultipart(MIMEBase.MIMEBase):
+class MIMENonMultipart(MIMEBase):
     """Base class for MIME multipart/* type messages."""
 
     __pychecker__ = 'unusednames=payload'
@@ -18,7 +20,7 @@
         # The public API prohibits attaching multiple subparts to MIMEBase
         # derived subtypes since none of them are, by definition, of content
         # type multipart/*
-        raise Errors.MultipartConversionError(
+        raise errors.MultipartConversionError(
             'Cannot attach additional subparts to non-multipart/*')
 
     del __pychecker__
diff --git a/Lib/email/MIMEText.py b/Lib/email/mime/text.py
similarity index 82%
rename from Lib/email/MIMEText.py
rename to Lib/email/mime/text.py
index 5ef1876..5747db5 100644
--- a/Lib/email/MIMEText.py
+++ b/Lib/email/mime/text.py
@@ -1,11 +1,13 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
 """Class representing text/* type MIME documents."""
 
-from email.MIMENonMultipart import MIMENonMultipart
-from email.Encoders import encode_7or8bit
+__all__ = ['MIMEText']
+
+from email.encoders import encode_7or8bit
+from email.mime.nonmultipart import MIMENonMultipart
 
 
 
diff --git a/Lib/email/Parser.py b/Lib/email/parser.py
similarity index 94%
rename from Lib/email/Parser.py
rename to Lib/email/parser.py
index 0c05224..2fcaf25 100644
--- a/Lib/email/Parser.py
+++ b/Lib/email/parser.py
@@ -1,13 +1,16 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Barry Warsaw, Thomas Wouters, Anthony Baxter
 # Contact: email-sig@python.org
 
 """A parser of RFC 2822 and MIME email messages."""
 
+__all__ = ['Parser', 'HeaderParser']
+
 import warnings
 from cStringIO import StringIO
-from email.FeedParser import FeedParser
-from email.Message import Message
+
+from email.feedparser import FeedParser
+from email.message import Message
 
 
 
diff --git a/Lib/email/quopriMIME.py b/Lib/email/quoprimime.py
similarity index 96%
rename from Lib/email/quopriMIME.py
rename to Lib/email/quoprimime.py
index a9b5d49..a5658dd 100644
--- a/Lib/email/quopriMIME.py
+++ b/Lib/email/quoprimime.py
@@ -1,4 +1,4 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Ben Gertzfield
 # Contact: email-sig@python.org
 
@@ -26,9 +26,27 @@
 wrapping issues, use the email.Header module.
 """
 
+__all__ = [
+    'body_decode',
+    'body_encode',
+    'body_quopri_check',
+    'body_quopri_len',
+    'decode',
+    'decodestring',
+    'encode',
+    'encodestring',
+    'header_decode',
+    'header_encode',
+    'header_quopri_check',
+    'header_quopri_len',
+    'quote',
+    'unquote',
+    ]
+
 import re
+
 from string import hexdigits
-from email.Utils import fix_eols
+from email.utils import fix_eols
 
 CRLF = '\r\n'
 NL = '\n'
diff --git a/Lib/email/test/test_email.py b/Lib/email/test/test_email.py
index 5a42c227..d977693 100644
--- a/Lib/email/test/test_email.py
+++ b/Lib/email/test/test_email.py
@@ -39,9 +39,6 @@
 EMPTYSTRING = ''
 SPACE = ' '
 
-# We don't care about DeprecationWarnings
-warnings.filterwarnings('ignore', '', DeprecationWarning, __name__)
-
 
 
 def openfile(filename, mode='r'):
@@ -87,7 +84,7 @@
         charset = Charset('iso-8859-1')
         msg.set_charset(charset)
         eq(msg['mime-version'], '1.0')
-        eq(msg.get_type(), 'text/plain')
+        eq(msg.get_content_type(), 'text/plain')
         eq(msg['content-type'], 'text/plain; charset="iso-8859-1"')
         eq(msg.get_param('charset'), 'iso-8859-1')
         eq(msg['content-transfer-encoding'], 'quoted-printable')
@@ -211,6 +208,19 @@
         msg.set_payload('foo')
         eq(msg.get_payload(decode=True), 'foo')
 
+    def test_decode_bogus_uu_payload_quietly(self):
+        msg = Message()
+        msg.set_payload('begin 664 foo.txt\n%<W1F=0000H \n \nend\n')
+        msg['Content-Transfer-Encoding'] = 'x-uuencode'
+        old_stderr = sys.stderr
+        try:
+            sys.stderr = sfp = StringIO()
+            # We don't care about the payload
+            msg.get_payload(decode=True)
+        finally:
+            sys.stderr = old_stderr
+        self.assertEqual(sfp.getvalue(), '')
+
     def test_decoded_generator(self):
         eq = self.assertEqual
         msg = self._msgobj('msg_07.txt')
@@ -893,7 +903,7 @@
         self._au = MIMEAudio(self._audiodata)
 
     def test_guess_minor_type(self):
-        self.assertEqual(self._au.get_type(), 'audio/basic')
+        self.assertEqual(self._au.get_content_type(), 'audio/basic')
 
     def test_encoding(self):
         payload = self._au.get_payload()
@@ -901,7 +911,7 @@
 
     def test_checkSetMinor(self):
         au = MIMEAudio(self._audiodata, 'fish')
-        self.assertEqual(au.get_type(), 'audio/fish')
+        self.assertEqual(au.get_content_type(), 'audio/fish')
 
     def test_add_header(self):
         eq = self.assertEqual
@@ -936,7 +946,7 @@
         self._im = MIMEImage(self._imgdata)
 
     def test_guess_minor_type(self):
-        self.assertEqual(self._im.get_type(), 'image/gif')
+        self.assertEqual(self._im.get_content_type(), 'image/gif')
 
     def test_encoding(self):
         payload = self._im.get_payload()
@@ -944,7 +954,7 @@
 
     def test_checkSetMinor(self):
         im = MIMEImage(self._imgdata, 'fish')
-        self.assertEqual(im.get_type(), 'image/fish')
+        self.assertEqual(im.get_content_type(), 'image/fish')
 
     def test_add_header(self):
         eq = self.assertEqual
@@ -976,7 +986,7 @@
     def test_types(self):
         eq = self.assertEqual
         unless = self.failUnless
-        eq(self._msg.get_type(), 'text/plain')
+        eq(self._msg.get_content_type(), 'text/plain')
         eq(self._msg.get_param('charset'), 'us-ascii')
         missing = []
         unless(self._msg.get_param('foobar', missing) is missing)
@@ -1045,7 +1055,7 @@
         # tests
         m = self._msg
         unless(m.is_multipart())
-        eq(m.get_type(), 'multipart/mixed')
+        eq(m.get_content_type(), 'multipart/mixed')
         eq(len(m.get_payload()), 2)
         raises(IndexError, m.get_payload, 2)
         m0 = m.get_payload(0)
@@ -1379,7 +1389,7 @@
     def test_parse_missing_minor_type(self):
         eq = self.assertEqual
         msg = self._msgobj('msg_14.txt')
-        eq(msg.get_type(), 'text')
+        eq(msg.get_content_type(), 'text/plain')
         eq(msg.get_content_maintype(), 'text')
         eq(msg.get_content_subtype(), 'plain')
 
@@ -1531,7 +1541,7 @@
         m = Message()
         m['Subject'] = subject
         r = MIMEMessage(m)
-        eq(r.get_type(), 'message/rfc822')
+        eq(r.get_content_type(), 'message/rfc822')
         payload = r.get_payload()
         unless(isinstance(payload, list))
         eq(len(payload), 1)
@@ -1572,7 +1582,7 @@
         eq = self.assertEqual
         unless = self.failUnless
         msg = self._msgobj('msg_11.txt')
-        eq(msg.get_type(), 'message/rfc822')
+        eq(msg.get_content_type(), 'message/rfc822')
         payload = msg.get_payload()
         unless(isinstance(payload, list))
         eq(len(payload), 1)
@@ -1586,12 +1596,12 @@
         unless = self.failUnless
         # msg 16 is a Delivery Status Notification, see RFC 1894
         msg = self._msgobj('msg_16.txt')
-        eq(msg.get_type(), 'multipart/report')
+        eq(msg.get_content_type(), 'multipart/report')
         unless(msg.is_multipart())
         eq(len(msg.get_payload()), 3)
         # Subpart 1 is a text/plain, human readable section
         subpart = msg.get_payload(0)
-        eq(subpart.get_type(), 'text/plain')
+        eq(subpart.get_content_type(), 'text/plain')
         eq(subpart.get_payload(), """\
 This report relates to a message you sent with the following header fields:
 
@@ -1611,7 +1621,7 @@
         # consists of two blocks of headers, represented by two nested Message
         # objects.
         subpart = msg.get_payload(1)
-        eq(subpart.get_type(), 'message/delivery-status')
+        eq(subpart.get_content_type(), 'message/delivery-status')
         eq(len(subpart.get_payload()), 2)
         # message/delivery-status should treat each block as a bunch of
         # headers, i.e. a bunch of Message objects.
@@ -1629,13 +1639,13 @@
         eq(dsn2.get_param('rfc822', header='final-recipient'), '')
         # Subpart 3 is the original message
         subpart = msg.get_payload(2)
-        eq(subpart.get_type(), 'message/rfc822')
+        eq(subpart.get_content_type(), 'message/rfc822')
         payload = subpart.get_payload()
         unless(isinstance(payload, list))
         eq(len(payload), 1)
         subsubpart = payload[0]
         unless(isinstance(subsubpart, Message))
-        eq(subsubpart.get_type(), 'text/plain')
+        eq(subsubpart.get_content_type(), 'text/plain')
         eq(subsubpart['message-id'],
            '<002001c144a6$8752e060$56104586@oxy.edu>')
 
@@ -1706,16 +1716,16 @@
             fp.close()
         container1 = msg.get_payload(0)
         eq(container1.get_default_type(), 'message/rfc822')
-        eq(container1.get_type(), None)
+        eq(container1.get_content_type(), 'message/rfc822')
         container2 = msg.get_payload(1)
         eq(container2.get_default_type(), 'message/rfc822')
-        eq(container2.get_type(), None)
+        eq(container2.get_content_type(), 'message/rfc822')
         container1a = container1.get_payload(0)
         eq(container1a.get_default_type(), 'text/plain')
-        eq(container1a.get_type(), 'text/plain')
+        eq(container1a.get_content_type(), 'text/plain')
         container2a = container2.get_payload(0)
         eq(container2a.get_default_type(), 'text/plain')
-        eq(container2a.get_type(), 'text/plain')
+        eq(container2a.get_content_type(), 'text/plain')
 
     def test_default_type_with_explicit_container_type(self):
         eq = self.assertEqual
@@ -1726,16 +1736,16 @@
             fp.close()
         container1 = msg.get_payload(0)
         eq(container1.get_default_type(), 'message/rfc822')
-        eq(container1.get_type(), 'message/rfc822')
+        eq(container1.get_content_type(), 'message/rfc822')
         container2 = msg.get_payload(1)
         eq(container2.get_default_type(), 'message/rfc822')
-        eq(container2.get_type(), 'message/rfc822')
+        eq(container2.get_content_type(), 'message/rfc822')
         container1a = container1.get_payload(0)
         eq(container1a.get_default_type(), 'text/plain')
-        eq(container1a.get_type(), 'text/plain')
+        eq(container1a.get_content_type(), 'text/plain')
         container2a = container2.get_payload(0)
         eq(container2a.get_default_type(), 'text/plain')
-        eq(container2a.get_type(), 'text/plain')
+        eq(container2a.get_content_type(), 'text/plain')
 
     def test_default_type_non_parsed(self):
         eq = self.assertEqual
@@ -1750,9 +1760,9 @@
         subpart2 = MIMEMessage(subpart2a)
         container.attach(subpart1)
         container.attach(subpart2)
-        eq(subpart1.get_type(), 'message/rfc822')
+        eq(subpart1.get_content_type(), 'message/rfc822')
         eq(subpart1.get_default_type(), 'message/rfc822')
-        eq(subpart2.get_type(), 'message/rfc822')
+        eq(subpart2.get_content_type(), 'message/rfc822')
         eq(subpart2.get_default_type(), 'message/rfc822')
         neq(container.as_string(0), '''\
 Content-Type: multipart/digest; boundary="BOUNDARY"
@@ -1784,9 +1794,9 @@
         del subpart1['mime-version']
         del subpart2['content-type']
         del subpart2['mime-version']
-        eq(subpart1.get_type(), None)
+        eq(subpart1.get_content_type(), 'message/rfc822')
         eq(subpart1.get_default_type(), 'message/rfc822')
-        eq(subpart2.get_type(), None)
+        eq(subpart2.get_content_type(), 'message/rfc822')
         eq(subpart2.get_default_type(), 'message/rfc822')
         neq(container.as_string(0), '''\
 Content-Type: multipart/digest; boundary="BOUNDARY"
@@ -1847,7 +1857,7 @@
     def test_parse_text_message(self):
         eq = self.assertEquals
         msg, text = self._msgobj('msg_01.txt')
-        eq(msg.get_type(), 'text/plain')
+        eq(msg.get_content_type(), 'text/plain')
         eq(msg.get_content_maintype(), 'text')
         eq(msg.get_content_subtype(), 'plain')
         eq(msg.get_params()[1], ('charset', 'us-ascii'))
@@ -1859,7 +1869,7 @@
     def test_parse_untyped_message(self):
         eq = self.assertEquals
         msg, text = self._msgobj('msg_03.txt')
-        eq(msg.get_type(), None)
+        eq(msg.get_content_type(), 'text/plain')
         eq(msg.get_params(), None)
         eq(msg.get_param('charset'), None)
         self._idempotent(msg, text)
@@ -1933,7 +1943,7 @@
         unless = self.failUnless
         # Get a message object and reset the seek pointer for other tests
         msg, text = self._msgobj('msg_05.txt')
-        eq(msg.get_type(), 'multipart/report')
+        eq(msg.get_content_type(), 'multipart/report')
         # Test the Content-Type: parameters
         params = {}
         for pk, pv in msg.get_params():
@@ -1945,13 +1955,13 @@
         eq(len(msg.get_payload()), 3)
         # Make sure the subparts are what we expect
         msg1 = msg.get_payload(0)
-        eq(msg1.get_type(), 'text/plain')
+        eq(msg1.get_content_type(), 'text/plain')
         eq(msg1.get_payload(), 'Yadda yadda yadda\n')
         msg2 = msg.get_payload(1)
-        eq(msg2.get_type(), None)
+        eq(msg2.get_content_type(), 'text/plain')
         eq(msg2.get_payload(), 'Yadda yadda yadda\n')
         msg3 = msg.get_payload(2)
-        eq(msg3.get_type(), 'message/rfc822')
+        eq(msg3.get_content_type(), 'message/rfc822')
         self.failUnless(isinstance(msg3, Message))
         payload = msg3.get_payload()
         unless(isinstance(payload, list))
@@ -1965,7 +1975,7 @@
         unless = self.failUnless
         msg, text = self._msgobj('msg_06.txt')
         # Check some of the outer headers
-        eq(msg.get_type(), 'message/rfc822')
+        eq(msg.get_content_type(), 'message/rfc822')
         # Make sure the payload is a list of exactly one sub-Message, and that
         # that submessage has a type of text/plain
         payload = msg.get_payload()
@@ -1973,7 +1983,7 @@
         eq(len(payload), 1)
         msg1 = payload[0]
         self.failUnless(isinstance(msg1, Message))
-        eq(msg1.get_type(), 'text/plain')
+        eq(msg1.get_content_type(), 'text/plain')
         self.failUnless(isinstance(msg1.get_payload(), str))
         eq(msg1.get_payload(), '\n')
 
@@ -2058,13 +2068,19 @@
         module = __import__('email')
         all = module.__all__
         all.sort()
-        self.assertEqual(all, ['Charset', 'Encoders', 'Errors', 'Generator',
-                               'Header', 'Iterators', 'MIMEAudio', 'MIMEBase',
-                               'MIMEImage', 'MIMEMessage', 'MIMEMultipart',
-                               'MIMENonMultipart', 'MIMEText', 'Message',
-                               'Parser', 'Utils', 'base64MIME',
-                               'message_from_file', 'message_from_string',
-                               'quopriMIME'])
+        self.assertEqual(all, [
+            # Old names
+            'Charset', 'Encoders', 'Errors', 'Generator',
+            'Header', 'Iterators', 'MIMEAudio', 'MIMEBase',
+            'MIMEImage', 'MIMEMessage', 'MIMEMultipart',
+            'MIMENonMultipart', 'MIMEText', 'Message',
+            'Parser', 'Utils', 'base64MIME',
+            # new names
+            'base64mime', 'charset', 'encoders', 'errors', 'generator',
+            'header', 'iterators', 'message', 'message_from_file',
+            'message_from_string', 'mime', 'parser',
+            'quopriMIME', 'quoprimime', 'utils',
+            ])
 
     def test_formatdate(self):
         now = time.time()
@@ -2097,12 +2113,12 @@
     def test_parsedate_no_dayofweek(self):
         eq = self.assertEqual
         eq(Utils.parsedate_tz('25 Feb 2003 13:47:26 -0800'),
-           (2003, 2, 25, 13, 47, 26, 0, 1, 0, -28800))
+           (2003, 2, 25, 13, 47, 26, 0, 1, -1, -28800))
 
     def test_parsedate_compact_no_dayofweek(self):
         eq = self.assertEqual
         eq(Utils.parsedate_tz('5 Feb 2003 13:47:26 -0800'),
-           (2003, 2, 5, 13, 47, 26, 0, 1, 0, -28800))
+           (2003, 2, 5, 13, 47, 26, 0, 1, -1, -28800))
 
     def test_parsedate_acceptable_to_time_functions(self):
         eq = self.assertEqual
@@ -2356,7 +2372,7 @@
             fp.close()
         eq(msg['from'], 'ppp-request@zzz.org')
         eq(msg['to'], 'ppp@zzz.org')
-        eq(msg.get_type(), 'multipart/mixed')
+        eq(msg.get_content_type(), 'multipart/mixed')
         self.failIf(msg.is_multipart())
         self.failUnless(isinstance(msg.get_payload(), str))
 
@@ -2405,10 +2421,10 @@
             fp.close()
         eq(len(msg.get_payload()), 2)
         part1 = msg.get_payload(0)
-        eq(part1.get_type(), 'text/plain')
+        eq(part1.get_content_type(), 'text/plain')
         eq(part1.get_payload(), 'Simple email with attachment.\r\n\r\n')
         part2 = msg.get_payload(1)
-        eq(part2.get_type(), 'application/riscos')
+        eq(part2.get_content_type(), 'application/riscos')
 
     def test_multipart_digest_with_extra_mime_headers(self):
         eq = self.assertEqual
@@ -2427,21 +2443,21 @@
         eq(msg.is_multipart(), 1)
         eq(len(msg.get_payload()), 2)
         part1 = msg.get_payload(0)
-        eq(part1.get_type(), 'message/rfc822')
+        eq(part1.get_content_type(), 'message/rfc822')
         eq(part1.is_multipart(), 1)
         eq(len(part1.get_payload()), 1)
         part1a = part1.get_payload(0)
         eq(part1a.is_multipart(), 0)
-        eq(part1a.get_type(), 'text/plain')
+        eq(part1a.get_content_type(), 'text/plain')
         neq(part1a.get_payload(), 'message 1\n')
         # next message/rfc822
         part2 = msg.get_payload(1)
-        eq(part2.get_type(), 'message/rfc822')
+        eq(part2.get_content_type(), 'message/rfc822')
         eq(part2.is_multipart(), 1)
         eq(len(part2.get_payload()), 1)
         part2a = part2.get_payload(0)
         eq(part2a.is_multipart(), 0)
-        eq(part2a.get_type(), 'text/plain')
+        eq(part2a.get_content_type(), 'text/plain')
         neq(part2a.get_payload(), 'message 2\n')
 
     def test_three_lines(self):
@@ -2723,6 +2739,11 @@
         c = Charset('fake')
         eq('hello w\xf6rld', c.body_encode('hello w\xf6rld'))
 
+    def test_unicode_charset_name(self):
+        charset = Charset(u'us-ascii')
+        self.assertEqual(str(charset), 'us-ascii')
+        self.assertRaises(Errors.CharsetError, Charset, 'asc\xffii')
+
 
 
 # Test multilingual MIME headers.
diff --git a/Lib/email/test/test_email_codecs.py b/Lib/email/test/test_email_codecs.py
index 159989c..38b7d95 100644
--- a/Lib/email/test/test_email_codecs.py
+++ b/Lib/email/test/test_email_codecs.py
@@ -10,6 +10,13 @@
 from email.Header import Header, decode_header
 from email.Message import Message
 
+# We're compatible with Python 2.3, but it doesn't have the built-in Asian
+# codecs, so we have to skip all these tests.
+try:
+    unicode('foo', 'euc-jp')
+except LookupError:
+    raise TestSkipped
+
 
 
 class TestEmailAsianCodecs(TestEmailBase):
diff --git a/Lib/email/test/test_email_codecs_renamed.py b/Lib/email/test/test_email_codecs_renamed.py
new file mode 100644
index 0000000..56baccd
--- /dev/null
+++ b/Lib/email/test/test_email_codecs_renamed.py
@@ -0,0 +1,77 @@
+# Copyright (C) 2002-2006 Python Software Foundation
+# Contact: email-sig@python.org
+# email package unit tests for (optional) Asian codecs
+
+import unittest
+from test.test_support import TestSkipped, run_unittest
+
+from email.test.test_email import TestEmailBase
+from email.charset import Charset
+from email.header import Header, decode_header
+from email.message import Message
+
+# We're compatible with Python 2.3, but it doesn't have the built-in Asian
+# codecs, so we have to skip all these tests.
+try:
+    unicode('foo', 'euc-jp')
+except LookupError:
+    raise TestSkipped
+
+
+
+class TestEmailAsianCodecs(TestEmailBase):
+    def test_japanese_codecs(self):
+        eq = self.ndiffAssertEqual
+        j = Charset("euc-jp")
+        g = Charset("iso-8859-1")
+        h = Header("Hello World!")
+        jhello = '\xa5\xcf\xa5\xed\xa1\xbc\xa5\xef\xa1\xbc\xa5\xeb\xa5\xc9\xa1\xaa'
+        ghello = 'Gr\xfc\xdf Gott!'
+        h.append(jhello, j)
+        h.append(ghello, g)
+        # BAW: This used to -- and maybe should -- fold the two iso-8859-1
+        # chunks into a single encoded word.  However it doesn't violate the
+        # standard to have them as two encoded chunks and maybe it's
+        # reasonable <wink> for each .append() call to result in a separate
+        # encoded word.
+        eq(h.encode(), """\
+Hello World! =?iso-2022-jp?b?GyRCJU8lbSE8JW8hPCVrJUkhKhsoQg==?=
+ =?iso-8859-1?q?Gr=FC=DF?= =?iso-8859-1?q?_Gott!?=""")
+        eq(decode_header(h.encode()),
+           [('Hello World!', None),
+            ('\x1b$B%O%m!<%o!<%k%I!*\x1b(B', 'iso-2022-jp'),
+            ('Gr\xfc\xdf Gott!', 'iso-8859-1')])
+        long = 'test-ja \xa4\xd8\xc5\xea\xb9\xc6\xa4\xb5\xa4\xec\xa4\xbf\xa5\xe1\xa1\xbc\xa5\xeb\xa4\xcf\xbb\xca\xb2\xf1\xbc\xd4\xa4\xce\xbe\xb5\xc7\xa7\xa4\xf2\xc2\xd4\xa4\xc3\xa4\xc6\xa4\xa4\xa4\xde\xa4\xb9'
+        h = Header(long, j, header_name="Subject")
+        # test a very long header
+        enc = h.encode()
+        # TK: splitting point may differ by codec design and/or Header encoding
+        eq(enc , """\
+=?iso-2022-jp?b?dGVzdC1qYSAbJEIkWEVqOUYkNSRsJD8lYSE8JWskTztKGyhC?=
+ =?iso-2022-jp?b?GyRCMnE8VCROPjVHJyRyQlQkQyRGJCQkXiQ5GyhC?=""")
+        # TK: full decode comparison
+        eq(h.__unicode__().encode('euc-jp'), long)
+
+    def test_payload_encoding(self):
+        jhello = '\xa5\xcf\xa5\xed\xa1\xbc\xa5\xef\xa1\xbc\xa5\xeb\xa5\xc9\xa1\xaa'
+        jcode  = 'euc-jp'
+        msg = Message()
+        msg.set_payload(jhello, jcode)
+        ustr = unicode(msg.get_payload(), msg.get_content_charset())
+        self.assertEqual(jhello, ustr.encode(jcode))
+
+
+
+def suite():
+    suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(TestEmailAsianCodecs))
+    return suite
+
+
+def test_main():
+    run_unittest(TestEmailAsianCodecs)
+
+
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='suite')
diff --git a/Lib/email/test/test_email_renamed.py b/Lib/email/test/test_email_renamed.py
new file mode 100644
index 0000000..4ac2ee9
--- /dev/null
+++ b/Lib/email/test/test_email_renamed.py
@@ -0,0 +1,3078 @@
+# Copyright (C) 2001-2006 Python Software Foundation
+# Contact: email-sig@python.org
+# email package unit tests
+
+import os
+import sys
+import time
+import base64
+import difflib
+import unittest
+import warnings
+from cStringIO import StringIO
+
+import email
+
+from email.charset import Charset
+from email.header import Header, decode_header, make_header
+from email.parser import Parser, HeaderParser
+from email.generator import Generator, DecodedGenerator
+from email.message import Message
+from email.mime.application import MIMEApplication
+from email.mime.audio import MIMEAudio
+from email.mime.text import MIMEText
+from email.mime.image import MIMEImage
+from email.mime.base import MIMEBase
+from email.mime.message import MIMEMessage
+from email.mime.multipart import MIMEMultipart
+from email import utils
+from email import errors
+from email import encoders
+from email import iterators
+from email import base64mime
+from email import quoprimime
+
+from test.test_support import findfile, run_unittest
+from email.test import __file__ as landmark
+
+
+NL = '\n'
+EMPTYSTRING = ''
+SPACE = ' '
+
+
+
+def openfile(filename, mode='r'):
+    path = os.path.join(os.path.dirname(landmark), 'data', filename)
+    return open(path, mode)
+
+
+
+# Base test class
+class TestEmailBase(unittest.TestCase):
+    def ndiffAssertEqual(self, first, second):
+        """Like failUnlessEqual except use ndiff for readable output."""
+        if first <> second:
+            sfirst = str(first)
+            ssecond = str(second)
+            diff = difflib.ndiff(sfirst.splitlines(), ssecond.splitlines())
+            fp = StringIO()
+            print >> fp, NL, NL.join(diff)
+            raise self.failureException, fp.getvalue()
+
+    def _msgobj(self, filename):
+        fp = openfile(findfile(filename))
+        try:
+            msg = email.message_from_file(fp)
+        finally:
+            fp.close()
+        return msg
+
+
+
+# Test various aspects of the Message class's API
+class TestMessageAPI(TestEmailBase):
+    def test_get_all(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_20.txt')
+        eq(msg.get_all('cc'), ['ccc@zzz.org', 'ddd@zzz.org', 'eee@zzz.org'])
+        eq(msg.get_all('xx', 'n/a'), 'n/a')
+
+    def test_getset_charset(self):
+        eq = self.assertEqual
+        msg = Message()
+        eq(msg.get_charset(), None)
+        charset = Charset('iso-8859-1')
+        msg.set_charset(charset)
+        eq(msg['mime-version'], '1.0')
+        eq(msg.get_content_type(), 'text/plain')
+        eq(msg['content-type'], 'text/plain; charset="iso-8859-1"')
+        eq(msg.get_param('charset'), 'iso-8859-1')
+        eq(msg['content-transfer-encoding'], 'quoted-printable')
+        eq(msg.get_charset().input_charset, 'iso-8859-1')
+        # Remove the charset
+        msg.set_charset(None)
+        eq(msg.get_charset(), None)
+        eq(msg['content-type'], 'text/plain')
+        # Try adding a charset when there's already MIME headers present
+        msg = Message()
+        msg['MIME-Version'] = '2.0'
+        msg['Content-Type'] = 'text/x-weird'
+        msg['Content-Transfer-Encoding'] = 'quinted-puntable'
+        msg.set_charset(charset)
+        eq(msg['mime-version'], '2.0')
+        eq(msg['content-type'], 'text/x-weird; charset="iso-8859-1"')
+        eq(msg['content-transfer-encoding'], 'quinted-puntable')
+
+    def test_set_charset_from_string(self):
+        eq = self.assertEqual
+        msg = Message()
+        msg.set_charset('us-ascii')
+        eq(msg.get_charset().input_charset, 'us-ascii')
+        eq(msg['content-type'], 'text/plain; charset="us-ascii"')
+
+    def test_set_payload_with_charset(self):
+        msg = Message()
+        charset = Charset('iso-8859-1')
+        msg.set_payload('This is a string payload', charset)
+        self.assertEqual(msg.get_charset().input_charset, 'iso-8859-1')
+
+    def test_get_charsets(self):
+        eq = self.assertEqual
+
+        msg = self._msgobj('msg_08.txt')
+        charsets = msg.get_charsets()
+        eq(charsets, [None, 'us-ascii', 'iso-8859-1', 'iso-8859-2', 'koi8-r'])
+
+        msg = self._msgobj('msg_09.txt')
+        charsets = msg.get_charsets('dingbat')
+        eq(charsets, ['dingbat', 'us-ascii', 'iso-8859-1', 'dingbat',
+                      'koi8-r'])
+
+        msg = self._msgobj('msg_12.txt')
+        charsets = msg.get_charsets()
+        eq(charsets, [None, 'us-ascii', 'iso-8859-1', None, 'iso-8859-2',
+                      'iso-8859-3', 'us-ascii', 'koi8-r'])
+
+    def test_get_filename(self):
+        eq = self.assertEqual
+
+        msg = self._msgobj('msg_04.txt')
+        filenames = [p.get_filename() for p in msg.get_payload()]
+        eq(filenames, ['msg.txt', 'msg.txt'])
+
+        msg = self._msgobj('msg_07.txt')
+        subpart = msg.get_payload(1)
+        eq(subpart.get_filename(), 'dingusfish.gif')
+
+    def test_get_filename_with_name_parameter(self):
+        eq = self.assertEqual
+
+        msg = self._msgobj('msg_44.txt')
+        filenames = [p.get_filename() for p in msg.get_payload()]
+        eq(filenames, ['msg.txt', 'msg.txt'])
+
+    def test_get_boundary(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_07.txt')
+        # No quotes!
+        eq(msg.get_boundary(), 'BOUNDARY')
+
+    def test_set_boundary(self):
+        eq = self.assertEqual
+        # This one has no existing boundary parameter, but the Content-Type:
+        # header appears fifth.
+        msg = self._msgobj('msg_01.txt')
+        msg.set_boundary('BOUNDARY')
+        header, value = msg.items()[4]
+        eq(header.lower(), 'content-type')
+        eq(value, 'text/plain; charset="us-ascii"; boundary="BOUNDARY"')
+        # This one has a Content-Type: header, with a boundary, stuck in the
+        # middle of its headers.  Make sure the order is preserved; it should
+        # be fifth.
+        msg = self._msgobj('msg_04.txt')
+        msg.set_boundary('BOUNDARY')
+        header, value = msg.items()[4]
+        eq(header.lower(), 'content-type')
+        eq(value, 'multipart/mixed; boundary="BOUNDARY"')
+        # And this one has no Content-Type: header at all.
+        msg = self._msgobj('msg_03.txt')
+        self.assertRaises(errors.HeaderParseError,
+                          msg.set_boundary, 'BOUNDARY')
+
+    def test_get_decoded_payload(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_10.txt')
+        # The outer message is a multipart
+        eq(msg.get_payload(decode=True), None)
+        # Subpart 1 is 7bit encoded
+        eq(msg.get_payload(0).get_payload(decode=True),
+           'This is a 7bit encoded message.\n')
+        # Subpart 2 is quopri
+        eq(msg.get_payload(1).get_payload(decode=True),
+           '\xa1This is a Quoted Printable encoded message!\n')
+        # Subpart 3 is base64
+        eq(msg.get_payload(2).get_payload(decode=True),
+           'This is a Base64 encoded message.')
+        # Subpart 4 has no Content-Transfer-Encoding: header.
+        eq(msg.get_payload(3).get_payload(decode=True),
+           'This has no Content-Transfer-Encoding: header.\n')
+
+    def test_get_decoded_uu_payload(self):
+        eq = self.assertEqual
+        msg = Message()
+        msg.set_payload('begin 666 -\n+:&5L;&\\@=V]R;&0 \n \nend\n')
+        for cte in ('x-uuencode', 'uuencode', 'uue', 'x-uue'):
+            msg['content-transfer-encoding'] = cte
+            eq(msg.get_payload(decode=True), 'hello world')
+        # Now try some bogus data
+        msg.set_payload('foo')
+        eq(msg.get_payload(decode=True), 'foo')
+
+    def test_decoded_generator(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_07.txt')
+        fp = openfile('msg_17.txt')
+        try:
+            text = fp.read()
+        finally:
+            fp.close()
+        s = StringIO()
+        g = DecodedGenerator(s)
+        g.flatten(msg)
+        eq(s.getvalue(), text)
+
+    def test__contains__(self):
+        msg = Message()
+        msg['From'] = 'Me'
+        msg['to'] = 'You'
+        # Check for case insensitivity
+        self.failUnless('from' in msg)
+        self.failUnless('From' in msg)
+        self.failUnless('FROM' in msg)
+        self.failUnless('to' in msg)
+        self.failUnless('To' in msg)
+        self.failUnless('TO' in msg)
+
+    def test_as_string(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_01.txt')
+        fp = openfile('msg_01.txt')
+        try:
+            text = fp.read()
+        finally:
+            fp.close()
+        eq(text, msg.as_string())
+        fullrepr = str(msg)
+        lines = fullrepr.split('\n')
+        self.failUnless(lines[0].startswith('From '))
+        eq(text, NL.join(lines[1:]))
+
+    def test_bad_param(self):
+        msg = email.message_from_string("Content-Type: blarg; baz; boo\n")
+        self.assertEqual(msg.get_param('baz'), '')
+
+    def test_missing_filename(self):
+        msg = email.message_from_string("From: foo\n")
+        self.assertEqual(msg.get_filename(), None)
+
+    def test_bogus_filename(self):
+        msg = email.message_from_string(
+        "Content-Disposition: blarg; filename\n")
+        self.assertEqual(msg.get_filename(), '')
+
+    def test_missing_boundary(self):
+        msg = email.message_from_string("From: foo\n")
+        self.assertEqual(msg.get_boundary(), None)
+
+    def test_get_params(self):
+        eq = self.assertEqual
+        msg = email.message_from_string(
+            'X-Header: foo=one; bar=two; baz=three\n')
+        eq(msg.get_params(header='x-header'),
+           [('foo', 'one'), ('bar', 'two'), ('baz', 'three')])
+        msg = email.message_from_string(
+            'X-Header: foo; bar=one; baz=two\n')
+        eq(msg.get_params(header='x-header'),
+           [('foo', ''), ('bar', 'one'), ('baz', 'two')])
+        eq(msg.get_params(), None)
+        msg = email.message_from_string(
+            'X-Header: foo; bar="one"; baz=two\n')
+        eq(msg.get_params(header='x-header'),
+           [('foo', ''), ('bar', 'one'), ('baz', 'two')])
+
+    def test_get_param_liberal(self):
+        msg = Message()
+        msg['Content-Type'] = 'Content-Type: Multipart/mixed; boundary = "CPIMSSMTPC06p5f3tG"'
+        self.assertEqual(msg.get_param('boundary'), 'CPIMSSMTPC06p5f3tG')
+
+    def test_get_param(self):
+        eq = self.assertEqual
+        msg = email.message_from_string(
+            "X-Header: foo=one; bar=two; baz=three\n")
+        eq(msg.get_param('bar', header='x-header'), 'two')
+        eq(msg.get_param('quuz', header='x-header'), None)
+        eq(msg.get_param('quuz'), None)
+        msg = email.message_from_string(
+            'X-Header: foo; bar="one"; baz=two\n')
+        eq(msg.get_param('foo', header='x-header'), '')
+        eq(msg.get_param('bar', header='x-header'), 'one')
+        eq(msg.get_param('baz', header='x-header'), 'two')
+        # XXX: We are not RFC-2045 compliant!  We cannot parse:
+        # msg["Content-Type"] = 'text/plain; weird="hey; dolly? [you] @ <\\"home\\">?"'
+        # msg.get_param("weird")
+        # yet.
+
+    def test_get_param_funky_continuation_lines(self):
+        msg = self._msgobj('msg_22.txt')
+        self.assertEqual(msg.get_payload(1).get_param('name'), 'wibble.JPG')
+
+    def test_get_param_with_semis_in_quotes(self):
+        msg = email.message_from_string(
+            'Content-Type: image/pjpeg; name="Jim&amp;&amp;Jill"\n')
+        self.assertEqual(msg.get_param('name'), 'Jim&amp;&amp;Jill')
+        self.assertEqual(msg.get_param('name', unquote=False),
+                         '"Jim&amp;&amp;Jill"')
+
+    def test_has_key(self):
+        msg = email.message_from_string('Header: exists')
+        self.failUnless(msg.has_key('header'))
+        self.failUnless(msg.has_key('Header'))
+        self.failUnless(msg.has_key('HEADER'))
+        self.failIf(msg.has_key('headeri'))
+
+    def test_set_param(self):
+        eq = self.assertEqual
+        msg = Message()
+        msg.set_param('charset', 'iso-2022-jp')
+        eq(msg.get_param('charset'), 'iso-2022-jp')
+        msg.set_param('importance', 'high value')
+        eq(msg.get_param('importance'), 'high value')
+        eq(msg.get_param('importance', unquote=False), '"high value"')
+        eq(msg.get_params(), [('text/plain', ''),
+                              ('charset', 'iso-2022-jp'),
+                              ('importance', 'high value')])
+        eq(msg.get_params(unquote=False), [('text/plain', ''),
+                                       ('charset', '"iso-2022-jp"'),
+                                       ('importance', '"high value"')])
+        msg.set_param('charset', 'iso-9999-xx', header='X-Jimmy')
+        eq(msg.get_param('charset', header='X-Jimmy'), 'iso-9999-xx')
+
+    def test_del_param(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_05.txt')
+        eq(msg.get_params(),
+           [('multipart/report', ''), ('report-type', 'delivery-status'),
+            ('boundary', 'D1690A7AC1.996856090/mail.example.com')])
+        old_val = msg.get_param("report-type")
+        msg.del_param("report-type")
+        eq(msg.get_params(),
+           [('multipart/report', ''),
+            ('boundary', 'D1690A7AC1.996856090/mail.example.com')])
+        msg.set_param("report-type", old_val)
+        eq(msg.get_params(),
+           [('multipart/report', ''),
+            ('boundary', 'D1690A7AC1.996856090/mail.example.com'),
+            ('report-type', old_val)])
+
+    def test_del_param_on_other_header(self):
+        msg = Message()
+        msg.add_header('Content-Disposition', 'attachment', filename='bud.gif')
+        msg.del_param('filename', 'content-disposition')
+        self.assertEqual(msg['content-disposition'], 'attachment')
+
+    def test_set_type(self):
+        eq = self.assertEqual
+        msg = Message()
+        self.assertRaises(ValueError, msg.set_type, 'text')
+        msg.set_type('text/plain')
+        eq(msg['content-type'], 'text/plain')
+        msg.set_param('charset', 'us-ascii')
+        eq(msg['content-type'], 'text/plain; charset="us-ascii"')
+        msg.set_type('text/html')
+        eq(msg['content-type'], 'text/html; charset="us-ascii"')
+
+    def test_set_type_on_other_header(self):
+        msg = Message()
+        msg['X-Content-Type'] = 'text/plain'
+        msg.set_type('application/octet-stream', 'X-Content-Type')
+        self.assertEqual(msg['x-content-type'], 'application/octet-stream')
+
+    def test_get_content_type_missing(self):
+        msg = Message()
+        self.assertEqual(msg.get_content_type(), 'text/plain')
+
+    def test_get_content_type_missing_with_default_type(self):
+        msg = Message()
+        msg.set_default_type('message/rfc822')
+        self.assertEqual(msg.get_content_type(), 'message/rfc822')
+
+    def test_get_content_type_from_message_implicit(self):
+        msg = self._msgobj('msg_30.txt')
+        self.assertEqual(msg.get_payload(0).get_content_type(),
+                         'message/rfc822')
+
+    def test_get_content_type_from_message_explicit(self):
+        msg = self._msgobj('msg_28.txt')
+        self.assertEqual(msg.get_payload(0).get_content_type(),
+                         'message/rfc822')
+
+    def test_get_content_type_from_message_text_plain_implicit(self):
+        msg = self._msgobj('msg_03.txt')
+        self.assertEqual(msg.get_content_type(), 'text/plain')
+
+    def test_get_content_type_from_message_text_plain_explicit(self):
+        msg = self._msgobj('msg_01.txt')
+        self.assertEqual(msg.get_content_type(), 'text/plain')
+
+    def test_get_content_maintype_missing(self):
+        msg = Message()
+        self.assertEqual(msg.get_content_maintype(), 'text')
+
+    def test_get_content_maintype_missing_with_default_type(self):
+        msg = Message()
+        msg.set_default_type('message/rfc822')
+        self.assertEqual(msg.get_content_maintype(), 'message')
+
+    def test_get_content_maintype_from_message_implicit(self):
+        msg = self._msgobj('msg_30.txt')
+        self.assertEqual(msg.get_payload(0).get_content_maintype(), 'message')
+
+    def test_get_content_maintype_from_message_explicit(self):
+        msg = self._msgobj('msg_28.txt')
+        self.assertEqual(msg.get_payload(0).get_content_maintype(), 'message')
+
+    def test_get_content_maintype_from_message_text_plain_implicit(self):
+        msg = self._msgobj('msg_03.txt')
+        self.assertEqual(msg.get_content_maintype(), 'text')
+
+    def test_get_content_maintype_from_message_text_plain_explicit(self):
+        msg = self._msgobj('msg_01.txt')
+        self.assertEqual(msg.get_content_maintype(), 'text')
+
+    def test_get_content_subtype_missing(self):
+        msg = Message()
+        self.assertEqual(msg.get_content_subtype(), 'plain')
+
+    def test_get_content_subtype_missing_with_default_type(self):
+        msg = Message()
+        msg.set_default_type('message/rfc822')
+        self.assertEqual(msg.get_content_subtype(), 'rfc822')
+
+    def test_get_content_subtype_from_message_implicit(self):
+        msg = self._msgobj('msg_30.txt')
+        self.assertEqual(msg.get_payload(0).get_content_subtype(), 'rfc822')
+
+    def test_get_content_subtype_from_message_explicit(self):
+        msg = self._msgobj('msg_28.txt')
+        self.assertEqual(msg.get_payload(0).get_content_subtype(), 'rfc822')
+
+    def test_get_content_subtype_from_message_text_plain_implicit(self):
+        msg = self._msgobj('msg_03.txt')
+        self.assertEqual(msg.get_content_subtype(), 'plain')
+
+    def test_get_content_subtype_from_message_text_plain_explicit(self):
+        msg = self._msgobj('msg_01.txt')
+        self.assertEqual(msg.get_content_subtype(), 'plain')
+
+    def test_get_content_maintype_error(self):
+        msg = Message()
+        msg['Content-Type'] = 'no-slash-in-this-string'
+        self.assertEqual(msg.get_content_maintype(), 'text')
+
+    def test_get_content_subtype_error(self):
+        msg = Message()
+        msg['Content-Type'] = 'no-slash-in-this-string'
+        self.assertEqual(msg.get_content_subtype(), 'plain')
+
+    def test_replace_header(self):
+        eq = self.assertEqual
+        msg = Message()
+        msg.add_header('First', 'One')
+        msg.add_header('Second', 'Two')
+        msg.add_header('Third', 'Three')
+        eq(msg.keys(), ['First', 'Second', 'Third'])
+        eq(msg.values(), ['One', 'Two', 'Three'])
+        msg.replace_header('Second', 'Twenty')
+        eq(msg.keys(), ['First', 'Second', 'Third'])
+        eq(msg.values(), ['One', 'Twenty', 'Three'])
+        msg.add_header('First', 'Eleven')
+        msg.replace_header('First', 'One Hundred')
+        eq(msg.keys(), ['First', 'Second', 'Third', 'First'])
+        eq(msg.values(), ['One Hundred', 'Twenty', 'Three', 'Eleven'])
+        self.assertRaises(KeyError, msg.replace_header, 'Fourth', 'Missing')
+
+    def test_broken_base64_payload(self):
+        x = 'AwDp0P7//y6LwKEAcPa/6Q=9'
+        msg = Message()
+        msg['content-type'] = 'audio/x-midi'
+        msg['content-transfer-encoding'] = 'base64'
+        msg.set_payload(x)
+        self.assertEqual(msg.get_payload(decode=True), x)
+
+
+
+# Test the email.encoders module
+class TestEncoders(unittest.TestCase):
+    def test_encode_empty_payload(self):
+        eq = self.assertEqual
+        msg = Message()
+        msg.set_charset('us-ascii')
+        eq(msg['content-transfer-encoding'], '7bit')
+
+    def test_default_cte(self):
+        eq = self.assertEqual
+        msg = MIMEText('hello world')
+        eq(msg['content-transfer-encoding'], '7bit')
+
+    def test_default_cte(self):
+        eq = self.assertEqual
+        # With no explicit _charset its us-ascii, and all are 7-bit
+        msg = MIMEText('hello world')
+        eq(msg['content-transfer-encoding'], '7bit')
+        # Similar, but with 8-bit data
+        msg = MIMEText('hello \xf8 world')
+        eq(msg['content-transfer-encoding'], '8bit')
+        # And now with a different charset
+        msg = MIMEText('hello \xf8 world', _charset='iso-8859-1')
+        eq(msg['content-transfer-encoding'], 'quoted-printable')
+
+
+
+# Test long header wrapping
+class TestLongHeaders(TestEmailBase):
+    def test_split_long_continuation(self):
+        eq = self.ndiffAssertEqual
+        msg = email.message_from_string("""\
+Subject: bug demonstration
+\t12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
+\tmore text
+
+test
+""")
+        sfp = StringIO()
+        g = Generator(sfp)
+        g.flatten(msg)
+        eq(sfp.getvalue(), """\
+Subject: bug demonstration
+\t12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
+\tmore text
+
+test
+""")
+
+    def test_another_long_almost_unsplittable_header(self):
+        eq = self.ndiffAssertEqual
+        hstr = """\
+bug demonstration
+\t12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
+\tmore text"""
+        h = Header(hstr, continuation_ws='\t')
+        eq(h.encode(), """\
+bug demonstration
+\t12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
+\tmore text""")
+        h = Header(hstr)
+        eq(h.encode(), """\
+bug demonstration
+ 12345678911234567892123456789312345678941234567895123456789612345678971234567898112345678911234567892123456789112345678911234567892123456789
+ more text""")
+
+    def test_long_nonstring(self):
+        eq = self.ndiffAssertEqual
+        g = Charset("iso-8859-1")
+        cz = Charset("iso-8859-2")
+        utf8 = Charset("utf-8")
+        g_head = "Die Mieter treten hier ein werden mit einem Foerderband komfortabel den Korridor entlang, an s\xfcdl\xfcndischen Wandgem\xe4lden vorbei, gegen die rotierenden Klingen bef\xf6rdert. "
+        cz_head = "Finan\xe8ni metropole se hroutily pod tlakem jejich d\xf9vtipu.. "
+        utf8_head = u"\u6b63\u78ba\u306b\u8a00\u3046\u3068\u7ffb\u8a33\u306f\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002\u4e00\u90e8\u306f\u30c9\u30a4\u30c4\u8a9e\u3067\u3059\u304c\u3001\u3042\u3068\u306f\u3067\u305f\u3089\u3081\u3067\u3059\u3002\u5b9f\u969b\u306b\u306f\u300cWenn ist das Nunstuck git und Slotermeyer? Ja! Beiherhund das Oder die Flipperwaldt gersput.\u300d\u3068\u8a00\u3063\u3066\u3044\u307e\u3059\u3002".encode("utf-8")
+        h = Header(g_head, g, header_name='Subject')
+        h.append(cz_head, cz)
+        h.append(utf8_head, utf8)
+        msg = Message()
+        msg['Subject'] = h
+        sfp = StringIO()
+        g = Generator(sfp)
+        g.flatten(msg)
+        eq(sfp.getvalue(), """\
+Subject: =?iso-8859-1?q?Die_Mieter_treten_hier_ein_werden_mit_einem_Foerd?=
+ =?iso-8859-1?q?erband_komfortabel_den_Korridor_entlang=2C_an_s=FCdl=FCndi?=
+ =?iso-8859-1?q?schen_Wandgem=E4lden_vorbei=2C_gegen_die_rotierenden_Kling?=
+ =?iso-8859-1?q?en_bef=F6rdert=2E_?= =?iso-8859-2?q?Finan=E8ni_met?=
+ =?iso-8859-2?q?ropole_se_hroutily_pod_tlakem_jejich_d=F9vtipu=2E=2E_?=
+ =?utf-8?b?5q2j56K644Gr6KiA44GG44Go57+76Kiz44Gv44GV44KM44Gm44GE?=
+ =?utf-8?b?44G+44Gb44KT44CC5LiA6YOo44Gv44OJ44Kk44OE6Kqe44Gn44GZ44GM44CB?=
+ =?utf-8?b?44GC44Go44Gv44Gn44Gf44KJ44KB44Gn44GZ44CC5a6f6Zqb44Gr44Gv44CM?=
+ =?utf-8?q?Wenn_ist_das_Nunstuck_git_und_Slotermeyer=3F_Ja!_Beiherhund_das?=
+ =?utf-8?b?IE9kZXIgZGllIEZsaXBwZXJ3YWxkdCBnZXJzcHV0LuOAjeOBqOiogOOBow==?=
+ =?utf-8?b?44Gm44GE44G+44GZ44CC?=
+
+""")
+        eq(h.encode(), """\
+=?iso-8859-1?q?Die_Mieter_treten_hier_ein_werden_mit_einem_Foerd?=
+ =?iso-8859-1?q?erband_komfortabel_den_Korridor_entlang=2C_an_s=FCdl=FCndi?=
+ =?iso-8859-1?q?schen_Wandgem=E4lden_vorbei=2C_gegen_die_rotierenden_Kling?=
+ =?iso-8859-1?q?en_bef=F6rdert=2E_?= =?iso-8859-2?q?Finan=E8ni_met?=
+ =?iso-8859-2?q?ropole_se_hroutily_pod_tlakem_jejich_d=F9vtipu=2E=2E_?=
+ =?utf-8?b?5q2j56K644Gr6KiA44GG44Go57+76Kiz44Gv44GV44KM44Gm44GE?=
+ =?utf-8?b?44G+44Gb44KT44CC5LiA6YOo44Gv44OJ44Kk44OE6Kqe44Gn44GZ44GM44CB?=
+ =?utf-8?b?44GC44Go44Gv44Gn44Gf44KJ44KB44Gn44GZ44CC5a6f6Zqb44Gr44Gv44CM?=
+ =?utf-8?q?Wenn_ist_das_Nunstuck_git_und_Slotermeyer=3F_Ja!_Beiherhund_das?=
+ =?utf-8?b?IE9kZXIgZGllIEZsaXBwZXJ3YWxkdCBnZXJzcHV0LuOAjeOBqOiogOOBow==?=
+ =?utf-8?b?44Gm44GE44G+44GZ44CC?=""")
+
+    def test_long_header_encode(self):
+        eq = self.ndiffAssertEqual
+        h = Header('wasnipoop; giraffes="very-long-necked-animals"; '
+                   'spooge="yummy"; hippos="gargantuan"; marshmallows="gooey"',
+                   header_name='X-Foobar-Spoink-Defrobnit')
+        eq(h.encode(), '''\
+wasnipoop; giraffes="very-long-necked-animals";
+ spooge="yummy"; hippos="gargantuan"; marshmallows="gooey"''')
+
+    def test_long_header_encode_with_tab_continuation(self):
+        eq = self.ndiffAssertEqual
+        h = Header('wasnipoop; giraffes="very-long-necked-animals"; '
+                   'spooge="yummy"; hippos="gargantuan"; marshmallows="gooey"',
+                   header_name='X-Foobar-Spoink-Defrobnit',
+                   continuation_ws='\t')
+        eq(h.encode(), '''\
+wasnipoop; giraffes="very-long-necked-animals";
+\tspooge="yummy"; hippos="gargantuan"; marshmallows="gooey"''')
+
+    def test_header_splitter(self):
+        eq = self.ndiffAssertEqual
+        msg = MIMEText('')
+        # It'd be great if we could use add_header() here, but that doesn't
+        # guarantee an order of the parameters.
+        msg['X-Foobar-Spoink-Defrobnit'] = (
+            'wasnipoop; giraffes="very-long-necked-animals"; '
+            'spooge="yummy"; hippos="gargantuan"; marshmallows="gooey"')
+        sfp = StringIO()
+        g = Generator(sfp)
+        g.flatten(msg)
+        eq(sfp.getvalue(), '''\
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+X-Foobar-Spoink-Defrobnit: wasnipoop; giraffes="very-long-necked-animals";
+\tspooge="yummy"; hippos="gargantuan"; marshmallows="gooey"
+
+''')
+
+    def test_no_semis_header_splitter(self):
+        eq = self.ndiffAssertEqual
+        msg = Message()
+        msg['From'] = 'test@dom.ain'
+        msg['References'] = SPACE.join(['<%d@dom.ain>' % i for i in range(10)])
+        msg.set_payload('Test')
+        sfp = StringIO()
+        g = Generator(sfp)
+        g.flatten(msg)
+        eq(sfp.getvalue(), """\
+From: test@dom.ain
+References: <0@dom.ain> <1@dom.ain> <2@dom.ain> <3@dom.ain> <4@dom.ain>
+\t<5@dom.ain> <6@dom.ain> <7@dom.ain> <8@dom.ain> <9@dom.ain>
+
+Test""")
+
+    def test_no_split_long_header(self):
+        eq = self.ndiffAssertEqual
+        hstr = 'References: ' + 'x' * 80
+        h = Header(hstr, continuation_ws='\t')
+        eq(h.encode(), """\
+References: xxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxxx""")
+
+    def test_splitting_multiple_long_lines(self):
+        eq = self.ndiffAssertEqual
+        hstr = """\
+from babylon.socal-raves.org (localhost [127.0.0.1]); by babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81; for <mailman-admin@babylon.socal-raves.org>; Sat, 2 Feb 2002 17:00:06 -0800 (PST)
+\tfrom babylon.socal-raves.org (localhost [127.0.0.1]); by babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81; for <mailman-admin@babylon.socal-raves.org>; Sat, 2 Feb 2002 17:00:06 -0800 (PST)
+\tfrom babylon.socal-raves.org (localhost [127.0.0.1]); by babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81; for <mailman-admin@babylon.socal-raves.org>; Sat, 2 Feb 2002 17:00:06 -0800 (PST)
+"""
+        h = Header(hstr, continuation_ws='\t')
+        eq(h.encode(), """\
+from babylon.socal-raves.org (localhost [127.0.0.1]);
+\tby babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81;
+\tfor <mailman-admin@babylon.socal-raves.org>;
+\tSat, 2 Feb 2002 17:00:06 -0800 (PST)
+\tfrom babylon.socal-raves.org (localhost [127.0.0.1]);
+\tby babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81;
+\tfor <mailman-admin@babylon.socal-raves.org>;
+\tSat, 2 Feb 2002 17:00:06 -0800 (PST)
+\tfrom babylon.socal-raves.org (localhost [127.0.0.1]);
+\tby babylon.socal-raves.org (Postfix) with ESMTP id B570E51B81;
+\tfor <mailman-admin@babylon.socal-raves.org>;
+\tSat, 2 Feb 2002 17:00:06 -0800 (PST)""")
+
+    def test_splitting_first_line_only_is_long(self):
+        eq = self.ndiffAssertEqual
+        hstr = """\
+from modemcable093.139-201-24.que.mc.videotron.ca ([24.201.139.93] helo=cthulhu.gerg.ca)
+\tby kronos.mems-exchange.org with esmtp (Exim 4.05)
+\tid 17k4h5-00034i-00
+\tfor test@mems-exchange.org; Wed, 28 Aug 2002 11:25:20 -0400"""
+        h = Header(hstr, maxlinelen=78, header_name='Received',
+                   continuation_ws='\t')
+        eq(h.encode(), """\
+from modemcable093.139-201-24.que.mc.videotron.ca ([24.201.139.93]
+\thelo=cthulhu.gerg.ca)
+\tby kronos.mems-exchange.org with esmtp (Exim 4.05)
+\tid 17k4h5-00034i-00
+\tfor test@mems-exchange.org; Wed, 28 Aug 2002 11:25:20 -0400""")
+
+    def test_long_8bit_header(self):
+        eq = self.ndiffAssertEqual
+        msg = Message()
+        h = Header('Britische Regierung gibt', 'iso-8859-1',
+                    header_name='Subject')
+        h.append('gr\xfcnes Licht f\xfcr Offshore-Windkraftprojekte')
+        msg['Subject'] = h
+        eq(msg.as_string(), """\
+Subject: =?iso-8859-1?q?Britische_Regierung_gibt?= =?iso-8859-1?q?gr=FCnes?=
+ =?iso-8859-1?q?_Licht_f=FCr_Offshore-Windkraftprojekte?=
+
+""")
+
+    def test_long_8bit_header_no_charset(self):
+        eq = self.ndiffAssertEqual
+        msg = Message()
+        msg['Reply-To'] = 'Britische Regierung gibt gr\xfcnes Licht f\xfcr Offshore-Windkraftprojekte <a-very-long-address@example.com>'
+        eq(msg.as_string(), """\
+Reply-To: Britische Regierung gibt gr\xfcnes Licht f\xfcr Offshore-Windkraftprojekte <a-very-long-address@example.com>
+
+""")
+
+    def test_long_to_header(self):
+        eq = self.ndiffAssertEqual
+        to = '"Someone Test #A" <someone@eecs.umich.edu>,<someone@eecs.umich.edu>,"Someone Test #B" <someone@umich.edu>, "Someone Test #C" <someone@eecs.umich.edu>, "Someone Test #D" <someone@eecs.umich.edu>'
+        msg = Message()
+        msg['To'] = to
+        eq(msg.as_string(0), '''\
+To: "Someone Test #A" <someone@eecs.umich.edu>, <someone@eecs.umich.edu>,
+\t"Someone Test #B" <someone@umich.edu>,
+\t"Someone Test #C" <someone@eecs.umich.edu>,
+\t"Someone Test #D" <someone@eecs.umich.edu>
+
+''')
+
+    def test_long_line_after_append(self):
+        eq = self.ndiffAssertEqual
+        s = 'This is an example of string which has almost the limit of header length.'
+        h = Header(s)
+        h.append('Add another line.')
+        eq(h.encode(), """\
+This is an example of string which has almost the limit of header length.
+ Add another line.""")
+
+    def test_shorter_line_with_append(self):
+        eq = self.ndiffAssertEqual
+        s = 'This is a shorter line.'
+        h = Header(s)
+        h.append('Add another sentence. (Surprise?)')
+        eq(h.encode(),
+           'This is a shorter line. Add another sentence. (Surprise?)')
+
+    def test_long_field_name(self):
+        eq = self.ndiffAssertEqual
+        fn = 'X-Very-Very-Very-Long-Header-Name'
+        gs = "Die Mieter treten hier ein werden mit einem Foerderband komfortabel den Korridor entlang, an s\xfcdl\xfcndischen Wandgem\xe4lden vorbei, gegen die rotierenden Klingen bef\xf6rdert. "
+        h = Header(gs, 'iso-8859-1', header_name=fn)
+        # BAW: this seems broken because the first line is too long
+        eq(h.encode(), """\
+=?iso-8859-1?q?Die_Mieter_treten_hier_?=
+ =?iso-8859-1?q?ein_werden_mit_einem_Foerderband_komfortabel_den_Korridor_?=
+ =?iso-8859-1?q?entlang=2C_an_s=FCdl=FCndischen_Wandgem=E4lden_vorbei=2C_g?=
+ =?iso-8859-1?q?egen_die_rotierenden_Klingen_bef=F6rdert=2E_?=""")
+
+    def test_long_received_header(self):
+        h = 'from FOO.TLD (vizworld.acl.foo.tld [123.452.678.9]) by hrothgar.la.mastaler.com (tmda-ofmipd) with ESMTP; Wed, 05 Mar 2003 18:10:18 -0700'
+        msg = Message()
+        msg['Received-1'] = Header(h, continuation_ws='\t')
+        msg['Received-2'] = h
+        self.assertEqual(msg.as_string(), """\
+Received-1: from FOO.TLD (vizworld.acl.foo.tld [123.452.678.9]) by
+\throthgar.la.mastaler.com (tmda-ofmipd) with ESMTP;
+\tWed, 05 Mar 2003 18:10:18 -0700
+Received-2: from FOO.TLD (vizworld.acl.foo.tld [123.452.678.9]) by
+\throthgar.la.mastaler.com (tmda-ofmipd) with ESMTP;
+\tWed, 05 Mar 2003 18:10:18 -0700
+
+""")
+
+    def test_string_headerinst_eq(self):
+        h = '<15975.17901.207240.414604@sgigritzmann1.mathematik.tu-muenchen.de> (David Bremner\'s message of "Thu, 6 Mar 2003 13:58:21 +0100")'
+        msg = Message()
+        msg['Received-1'] = Header(h, header_name='Received-1',
+                                   continuation_ws='\t')
+        msg['Received-2'] = h
+        self.assertEqual(msg.as_string(), """\
+Received-1: <15975.17901.207240.414604@sgigritzmann1.mathematik.tu-muenchen.de>
+\t(David Bremner's message of "Thu, 6 Mar 2003 13:58:21 +0100")
+Received-2: <15975.17901.207240.414604@sgigritzmann1.mathematik.tu-muenchen.de>
+\t(David Bremner's message of "Thu, 6 Mar 2003 13:58:21 +0100")
+
+""")
+
+    def test_long_unbreakable_lines_with_continuation(self):
+        eq = self.ndiffAssertEqual
+        msg = Message()
+        t = """\
+ iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAGFBMVEUAAAAkHiJeRUIcGBi9
+ locQDQ4zJykFBAXJfWDjAAACYUlEQVR4nF2TQY/jIAyFc6lydlG5x8Nyp1Y69wj1PN2I5gzp"""
+        msg['Face-1'] = t
+        msg['Face-2'] = Header(t, header_name='Face-2')
+        eq(msg.as_string(), """\
+Face-1: iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAGFBMVEUAAAAkHiJeRUIcGBi9
+\tlocQDQ4zJykFBAXJfWDjAAACYUlEQVR4nF2TQY/jIAyFc6lydlG5x8Nyp1Y69wj1PN2I5gzp
+Face-2: iVBORw0KGgoAAAANSUhEUgAAADAAAAAwBAMAAAClLOS0AAAAGFBMVEUAAAAkHiJeRUIcGBi9
+ locQDQ4zJykFBAXJfWDjAAACYUlEQVR4nF2TQY/jIAyFc6lydlG5x8Nyp1Y69wj1PN2I5gzp
+
+""")
+
+    def test_another_long_multiline_header(self):
+        eq = self.ndiffAssertEqual
+        m = '''\
+Received: from siimage.com ([172.25.1.3]) by zima.siliconimage.com with Microsoft SMTPSVC(5.0.2195.4905);
+\tWed, 16 Oct 2002 07:41:11 -0700'''
+        msg = email.message_from_string(m)
+        eq(msg.as_string(), '''\
+Received: from siimage.com ([172.25.1.3]) by zima.siliconimage.com with
+\tMicrosoft SMTPSVC(5.0.2195.4905); Wed, 16 Oct 2002 07:41:11 -0700
+
+''')
+
+    def test_long_lines_with_different_header(self):
+        eq = self.ndiffAssertEqual
+        h = """\
+List-Unsubscribe: <https://lists.sourceforge.net/lists/listinfo/spamassassin-talk>,
+        <mailto:spamassassin-talk-request@lists.sourceforge.net?subject=unsubscribe>"""
+        msg = Message()
+        msg['List'] = h
+        msg['List'] = Header(h, header_name='List')
+        eq(msg.as_string(), """\
+List: List-Unsubscribe: <https://lists.sourceforge.net/lists/listinfo/spamassassin-talk>,
+\t<mailto:spamassassin-talk-request@lists.sourceforge.net?subject=unsubscribe>
+List: List-Unsubscribe: <https://lists.sourceforge.net/lists/listinfo/spamassassin-talk>,
+ <mailto:spamassassin-talk-request@lists.sourceforge.net?subject=unsubscribe>
+
+""")
+
+
+
+# Test mangling of "From " lines in the body of a message
+class TestFromMangling(unittest.TestCase):
+    def setUp(self):
+        self.msg = Message()
+        self.msg['From'] = 'aaa@bbb.org'
+        self.msg.set_payload("""\
+From the desk of A.A.A.:
+Blah blah blah
+""")
+
+    def test_mangled_from(self):
+        s = StringIO()
+        g = Generator(s, mangle_from_=True)
+        g.flatten(self.msg)
+        self.assertEqual(s.getvalue(), """\
+From: aaa@bbb.org
+
+>From the desk of A.A.A.:
+Blah blah blah
+""")
+
+    def test_dont_mangle_from(self):
+        s = StringIO()
+        g = Generator(s, mangle_from_=False)
+        g.flatten(self.msg)
+        self.assertEqual(s.getvalue(), """\
+From: aaa@bbb.org
+
+From the desk of A.A.A.:
+Blah blah blah
+""")
+
+
+
+# Test the basic MIMEAudio class
+class TestMIMEAudio(unittest.TestCase):
+    def setUp(self):
+        # Make sure we pick up the audiotest.au that lives in email/test/data.
+        # In Python, there's an audiotest.au living in Lib/test but that isn't
+        # included in some binary distros that don't include the test
+        # package.  The trailing empty string on the .join() is significant
+        # since findfile() will do a dirname().
+        datadir = os.path.join(os.path.dirname(landmark), 'data', '')
+        fp = open(findfile('audiotest.au', datadir), 'rb')
+        try:
+            self._audiodata = fp.read()
+        finally:
+            fp.close()
+        self._au = MIMEAudio(self._audiodata)
+
+    def test_guess_minor_type(self):
+        self.assertEqual(self._au.get_content_type(), 'audio/basic')
+
+    def test_encoding(self):
+        payload = self._au.get_payload()
+        self.assertEqual(base64.decodestring(payload), self._audiodata)
+
+    def test_checkSetMinor(self):
+        au = MIMEAudio(self._audiodata, 'fish')
+        self.assertEqual(au.get_content_type(), 'audio/fish')
+
+    def test_add_header(self):
+        eq = self.assertEqual
+        unless = self.failUnless
+        self._au.add_header('Content-Disposition', 'attachment',
+                            filename='audiotest.au')
+        eq(self._au['content-disposition'],
+           'attachment; filename="audiotest.au"')
+        eq(self._au.get_params(header='content-disposition'),
+           [('attachment', ''), ('filename', 'audiotest.au')])
+        eq(self._au.get_param('filename', header='content-disposition'),
+           'audiotest.au')
+        missing = []
+        eq(self._au.get_param('attachment', header='content-disposition'), '')
+        unless(self._au.get_param('foo', failobj=missing,
+                                  header='content-disposition') is missing)
+        # Try some missing stuff
+        unless(self._au.get_param('foobar', missing) is missing)
+        unless(self._au.get_param('attachment', missing,
+                                  header='foobar') is missing)
+
+
+
+# Test the basic MIMEImage class
+class TestMIMEImage(unittest.TestCase):
+    def setUp(self):
+        fp = openfile('PyBanner048.gif')
+        try:
+            self._imgdata = fp.read()
+        finally:
+            fp.close()
+        self._im = MIMEImage(self._imgdata)
+
+    def test_guess_minor_type(self):
+        self.assertEqual(self._im.get_content_type(), 'image/gif')
+
+    def test_encoding(self):
+        payload = self._im.get_payload()
+        self.assertEqual(base64.decodestring(payload), self._imgdata)
+
+    def test_checkSetMinor(self):
+        im = MIMEImage(self._imgdata, 'fish')
+        self.assertEqual(im.get_content_type(), 'image/fish')
+
+    def test_add_header(self):
+        eq = self.assertEqual
+        unless = self.failUnless
+        self._im.add_header('Content-Disposition', 'attachment',
+                            filename='dingusfish.gif')
+        eq(self._im['content-disposition'],
+           'attachment; filename="dingusfish.gif"')
+        eq(self._im.get_params(header='content-disposition'),
+           [('attachment', ''), ('filename', 'dingusfish.gif')])
+        eq(self._im.get_param('filename', header='content-disposition'),
+           'dingusfish.gif')
+        missing = []
+        eq(self._im.get_param('attachment', header='content-disposition'), '')
+        unless(self._im.get_param('foo', failobj=missing,
+                                  header='content-disposition') is missing)
+        # Try some missing stuff
+        unless(self._im.get_param('foobar', missing) is missing)
+        unless(self._im.get_param('attachment', missing,
+                                  header='foobar') is missing)
+
+
+
+# Test the basic MIMEApplication class
+class TestMIMEApplication(unittest.TestCase):
+    def test_headers(self):
+        eq = self.assertEqual
+        msg = MIMEApplication('\xfa\xfb\xfc\xfd\xfe\xff')
+        eq(msg.get_content_type(), 'application/octet-stream')
+        eq(msg['content-transfer-encoding'], 'base64')
+
+    def test_body(self):
+        eq = self.assertEqual
+        bytes = '\xfa\xfb\xfc\xfd\xfe\xff'
+        msg = MIMEApplication(bytes)
+        eq(msg.get_payload(), '+vv8/f7/')
+        eq(msg.get_payload(decode=True), bytes)
+
+
+
+# Test the basic MIMEText class
+class TestMIMEText(unittest.TestCase):
+    def setUp(self):
+        self._msg = MIMEText('hello there')
+
+    def test_types(self):
+        eq = self.assertEqual
+        unless = self.failUnless
+        eq(self._msg.get_content_type(), 'text/plain')
+        eq(self._msg.get_param('charset'), 'us-ascii')
+        missing = []
+        unless(self._msg.get_param('foobar', missing) is missing)
+        unless(self._msg.get_param('charset', missing, header='foobar')
+               is missing)
+
+    def test_payload(self):
+        self.assertEqual(self._msg.get_payload(), 'hello there')
+        self.failUnless(not self._msg.is_multipart())
+
+    def test_charset(self):
+        eq = self.assertEqual
+        msg = MIMEText('hello there', _charset='us-ascii')
+        eq(msg.get_charset().input_charset, 'us-ascii')
+        eq(msg['content-type'], 'text/plain; charset="us-ascii"')
+
+
+
+# Test complicated multipart/* messages
+class TestMultipart(TestEmailBase):
+    def setUp(self):
+        fp = openfile('PyBanner048.gif')
+        try:
+            data = fp.read()
+        finally:
+            fp.close()
+
+        container = MIMEBase('multipart', 'mixed', boundary='BOUNDARY')
+        image = MIMEImage(data, name='dingusfish.gif')
+        image.add_header('content-disposition', 'attachment',
+                         filename='dingusfish.gif')
+        intro = MIMEText('''\
+Hi there,
+
+This is the dingus fish.
+''')
+        container.attach(intro)
+        container.attach(image)
+        container['From'] = 'Barry <barry@digicool.com>'
+        container['To'] = 'Dingus Lovers <cravindogs@cravindogs.com>'
+        container['Subject'] = 'Here is your dingus fish'
+
+        now = 987809702.54848599
+        timetuple = time.localtime(now)
+        if timetuple[-1] == 0:
+            tzsecs = time.timezone
+        else:
+            tzsecs = time.altzone
+        if tzsecs > 0:
+            sign = '-'
+        else:
+            sign = '+'
+        tzoffset = ' %s%04d' % (sign, tzsecs / 36)
+        container['Date'] = time.strftime(
+            '%a, %d %b %Y %H:%M:%S',
+            time.localtime(now)) + tzoffset
+        self._msg = container
+        self._im = image
+        self._txt = intro
+
+    def test_hierarchy(self):
+        # convenience
+        eq = self.assertEqual
+        unless = self.failUnless
+        raises = self.assertRaises
+        # tests
+        m = self._msg
+        unless(m.is_multipart())
+        eq(m.get_content_type(), 'multipart/mixed')
+        eq(len(m.get_payload()), 2)
+        raises(IndexError, m.get_payload, 2)
+        m0 = m.get_payload(0)
+        m1 = m.get_payload(1)
+        unless(m0 is self._txt)
+        unless(m1 is self._im)
+        eq(m.get_payload(), [m0, m1])
+        unless(not m0.is_multipart())
+        unless(not m1.is_multipart())
+
+    def test_empty_multipart_idempotent(self):
+        text = """\
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+MIME-Version: 1.0
+Subject: A subject
+To: aperson@dom.ain
+From: bperson@dom.ain
+
+
+--BOUNDARY
+
+
+--BOUNDARY--
+"""
+        msg = Parser().parsestr(text)
+        self.ndiffAssertEqual(text, msg.as_string())
+
+    def test_no_parts_in_a_multipart_with_none_epilogue(self):
+        outer = MIMEBase('multipart', 'mixed')
+        outer['Subject'] = 'A subject'
+        outer['To'] = 'aperson@dom.ain'
+        outer['From'] = 'bperson@dom.ain'
+        outer.set_boundary('BOUNDARY')
+        self.ndiffAssertEqual(outer.as_string(), '''\
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+MIME-Version: 1.0
+Subject: A subject
+To: aperson@dom.ain
+From: bperson@dom.ain
+
+--BOUNDARY
+
+--BOUNDARY--''')
+
+    def test_no_parts_in_a_multipart_with_empty_epilogue(self):
+        outer = MIMEBase('multipart', 'mixed')
+        outer['Subject'] = 'A subject'
+        outer['To'] = 'aperson@dom.ain'
+        outer['From'] = 'bperson@dom.ain'
+        outer.preamble = ''
+        outer.epilogue = ''
+        outer.set_boundary('BOUNDARY')
+        self.ndiffAssertEqual(outer.as_string(), '''\
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+MIME-Version: 1.0
+Subject: A subject
+To: aperson@dom.ain
+From: bperson@dom.ain
+
+
+--BOUNDARY
+
+--BOUNDARY--
+''')
+
+    def test_one_part_in_a_multipart(self):
+        eq = self.ndiffAssertEqual
+        outer = MIMEBase('multipart', 'mixed')
+        outer['Subject'] = 'A subject'
+        outer['To'] = 'aperson@dom.ain'
+        outer['From'] = 'bperson@dom.ain'
+        outer.set_boundary('BOUNDARY')
+        msg = MIMEText('hello world')
+        outer.attach(msg)
+        eq(outer.as_string(), '''\
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+MIME-Version: 1.0
+Subject: A subject
+To: aperson@dom.ain
+From: bperson@dom.ain
+
+--BOUNDARY
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+hello world
+--BOUNDARY--''')
+
+    def test_seq_parts_in_a_multipart_with_empty_preamble(self):
+        eq = self.ndiffAssertEqual
+        outer = MIMEBase('multipart', 'mixed')
+        outer['Subject'] = 'A subject'
+        outer['To'] = 'aperson@dom.ain'
+        outer['From'] = 'bperson@dom.ain'
+        outer.preamble = ''
+        msg = MIMEText('hello world')
+        outer.attach(msg)
+        outer.set_boundary('BOUNDARY')
+        eq(outer.as_string(), '''\
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+MIME-Version: 1.0
+Subject: A subject
+To: aperson@dom.ain
+From: bperson@dom.ain
+
+
+--BOUNDARY
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+hello world
+--BOUNDARY--''')
+
+
+    def test_seq_parts_in_a_multipart_with_none_preamble(self):
+        eq = self.ndiffAssertEqual
+        outer = MIMEBase('multipart', 'mixed')
+        outer['Subject'] = 'A subject'
+        outer['To'] = 'aperson@dom.ain'
+        outer['From'] = 'bperson@dom.ain'
+        outer.preamble = None
+        msg = MIMEText('hello world')
+        outer.attach(msg)
+        outer.set_boundary('BOUNDARY')
+        eq(outer.as_string(), '''\
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+MIME-Version: 1.0
+Subject: A subject
+To: aperson@dom.ain
+From: bperson@dom.ain
+
+--BOUNDARY
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+hello world
+--BOUNDARY--''')
+
+
+    def test_seq_parts_in_a_multipart_with_none_epilogue(self):
+        eq = self.ndiffAssertEqual
+        outer = MIMEBase('multipart', 'mixed')
+        outer['Subject'] = 'A subject'
+        outer['To'] = 'aperson@dom.ain'
+        outer['From'] = 'bperson@dom.ain'
+        outer.epilogue = None
+        msg = MIMEText('hello world')
+        outer.attach(msg)
+        outer.set_boundary('BOUNDARY')
+        eq(outer.as_string(), '''\
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+MIME-Version: 1.0
+Subject: A subject
+To: aperson@dom.ain
+From: bperson@dom.ain
+
+--BOUNDARY
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+hello world
+--BOUNDARY--''')
+
+
+    def test_seq_parts_in_a_multipart_with_empty_epilogue(self):
+        eq = self.ndiffAssertEqual
+        outer = MIMEBase('multipart', 'mixed')
+        outer['Subject'] = 'A subject'
+        outer['To'] = 'aperson@dom.ain'
+        outer['From'] = 'bperson@dom.ain'
+        outer.epilogue = ''
+        msg = MIMEText('hello world')
+        outer.attach(msg)
+        outer.set_boundary('BOUNDARY')
+        eq(outer.as_string(), '''\
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+MIME-Version: 1.0
+Subject: A subject
+To: aperson@dom.ain
+From: bperson@dom.ain
+
+--BOUNDARY
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+hello world
+--BOUNDARY--
+''')
+
+
+    def test_seq_parts_in_a_multipart_with_nl_epilogue(self):
+        eq = self.ndiffAssertEqual
+        outer = MIMEBase('multipart', 'mixed')
+        outer['Subject'] = 'A subject'
+        outer['To'] = 'aperson@dom.ain'
+        outer['From'] = 'bperson@dom.ain'
+        outer.epilogue = '\n'
+        msg = MIMEText('hello world')
+        outer.attach(msg)
+        outer.set_boundary('BOUNDARY')
+        eq(outer.as_string(), '''\
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+MIME-Version: 1.0
+Subject: A subject
+To: aperson@dom.ain
+From: bperson@dom.ain
+
+--BOUNDARY
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+hello world
+--BOUNDARY--
+
+''')
+
+    def test_message_external_body(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_36.txt')
+        eq(len(msg.get_payload()), 2)
+        msg1 = msg.get_payload(1)
+        eq(msg1.get_content_type(), 'multipart/alternative')
+        eq(len(msg1.get_payload()), 2)
+        for subpart in msg1.get_payload():
+            eq(subpart.get_content_type(), 'message/external-body')
+            eq(len(subpart.get_payload()), 1)
+            subsubpart = subpart.get_payload(0)
+            eq(subsubpart.get_content_type(), 'text/plain')
+
+    def test_double_boundary(self):
+        # msg_37.txt is a multipart that contains two dash-boundary's in a
+        # row.  Our interpretation of RFC 2046 calls for ignoring the second
+        # and subsequent boundaries.
+        msg = self._msgobj('msg_37.txt')
+        self.assertEqual(len(msg.get_payload()), 3)
+
+    def test_nested_inner_contains_outer_boundary(self):
+        eq = self.ndiffAssertEqual
+        # msg_38.txt has an inner part that contains outer boundaries.  My
+        # interpretation of RFC 2046 (based on sections 5.1 and 5.1.2) say
+        # these are illegal and should be interpreted as unterminated inner
+        # parts.
+        msg = self._msgobj('msg_38.txt')
+        sfp = StringIO()
+        iterators._structure(msg, sfp)
+        eq(sfp.getvalue(), """\
+multipart/mixed
+    multipart/mixed
+        multipart/alternative
+            text/plain
+        text/plain
+    text/plain
+    text/plain
+""")
+
+    def test_nested_with_same_boundary(self):
+        eq = self.ndiffAssertEqual
+        # msg 39.txt is similarly evil in that it's got inner parts that use
+        # the same boundary as outer parts.  Again, I believe the way this is
+        # parsed is closest to the spirit of RFC 2046
+        msg = self._msgobj('msg_39.txt')
+        sfp = StringIO()
+        iterators._structure(msg, sfp)
+        eq(sfp.getvalue(), """\
+multipart/mixed
+    multipart/mixed
+        multipart/alternative
+        application/octet-stream
+        application/octet-stream
+    text/plain
+""")
+
+    def test_boundary_in_non_multipart(self):
+        msg = self._msgobj('msg_40.txt')
+        self.assertEqual(msg.as_string(), '''\
+MIME-Version: 1.0
+Content-Type: text/html; boundary="--961284236552522269"
+
+----961284236552522269
+Content-Type: text/html;
+Content-Transfer-Encoding: 7Bit
+
+<html></html>
+
+----961284236552522269--
+''')
+
+    def test_boundary_with_leading_space(self):
+        eq = self.assertEqual
+        msg = email.message_from_string('''\
+MIME-Version: 1.0
+Content-Type: multipart/mixed; boundary="    XXXX"
+
+--    XXXX
+Content-Type: text/plain
+
+
+--    XXXX
+Content-Type: text/plain
+
+--    XXXX--
+''')
+        self.failUnless(msg.is_multipart())
+        eq(msg.get_boundary(), '    XXXX')
+        eq(len(msg.get_payload()), 2)
+
+    def test_boundary_without_trailing_newline(self):
+        m = Parser().parsestr("""\
+Content-Type: multipart/mixed; boundary="===============0012394164=="
+MIME-Version: 1.0
+
+--===============0012394164==
+Content-Type: image/file1.jpg
+MIME-Version: 1.0
+Content-Transfer-Encoding: base64
+
+YXNkZg==
+--===============0012394164==--""")
+        self.assertEquals(m.get_payload(0).get_payload(), 'YXNkZg==')
+
+
+
+# Test some badly formatted messages
+class TestNonConformant(TestEmailBase):
+    def test_parse_missing_minor_type(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_14.txt')
+        eq(msg.get_content_type(), 'text/plain')
+        eq(msg.get_content_maintype(), 'text')
+        eq(msg.get_content_subtype(), 'plain')
+
+    def test_same_boundary_inner_outer(self):
+        unless = self.failUnless
+        msg = self._msgobj('msg_15.txt')
+        # XXX We can probably eventually do better
+        inner = msg.get_payload(0)
+        unless(hasattr(inner, 'defects'))
+        self.assertEqual(len(inner.defects), 1)
+        unless(isinstance(inner.defects[0],
+                          errors.StartBoundaryNotFoundDefect))
+
+    def test_multipart_no_boundary(self):
+        unless = self.failUnless
+        msg = self._msgobj('msg_25.txt')
+        unless(isinstance(msg.get_payload(), str))
+        self.assertEqual(len(msg.defects), 2)
+        unless(isinstance(msg.defects[0], errors.NoBoundaryInMultipartDefect))
+        unless(isinstance(msg.defects[1],
+                          errors.MultipartInvariantViolationDefect))
+
+    def test_invalid_content_type(self):
+        eq = self.assertEqual
+        neq = self.ndiffAssertEqual
+        msg = Message()
+        # RFC 2045, $5.2 says invalid yields text/plain
+        msg['Content-Type'] = 'text'
+        eq(msg.get_content_maintype(), 'text')
+        eq(msg.get_content_subtype(), 'plain')
+        eq(msg.get_content_type(), 'text/plain')
+        # Clear the old value and try something /really/ invalid
+        del msg['content-type']
+        msg['Content-Type'] = 'foo'
+        eq(msg.get_content_maintype(), 'text')
+        eq(msg.get_content_subtype(), 'plain')
+        eq(msg.get_content_type(), 'text/plain')
+        # Still, make sure that the message is idempotently generated
+        s = StringIO()
+        g = Generator(s)
+        g.flatten(msg)
+        neq(s.getvalue(), 'Content-Type: foo\n\n')
+
+    def test_no_start_boundary(self):
+        eq = self.ndiffAssertEqual
+        msg = self._msgobj('msg_31.txt')
+        eq(msg.get_payload(), """\
+--BOUNDARY
+Content-Type: text/plain
+
+message 1
+
+--BOUNDARY
+Content-Type: text/plain
+
+message 2
+
+--BOUNDARY--
+""")
+
+    def test_no_separating_blank_line(self):
+        eq = self.ndiffAssertEqual
+        msg = self._msgobj('msg_35.txt')
+        eq(msg.as_string(), """\
+From: aperson@dom.ain
+To: bperson@dom.ain
+Subject: here's something interesting
+
+counter to RFC 2822, there's no separating newline here
+""")
+
+    def test_lying_multipart(self):
+        unless = self.failUnless
+        msg = self._msgobj('msg_41.txt')
+        unless(hasattr(msg, 'defects'))
+        self.assertEqual(len(msg.defects), 2)
+        unless(isinstance(msg.defects[0], errors.NoBoundaryInMultipartDefect))
+        unless(isinstance(msg.defects[1],
+                          errors.MultipartInvariantViolationDefect))
+
+    def test_missing_start_boundary(self):
+        outer = self._msgobj('msg_42.txt')
+        # The message structure is:
+        #
+        # multipart/mixed
+        #    text/plain
+        #    message/rfc822
+        #        multipart/mixed [*]
+        #
+        # [*] This message is missing its start boundary
+        bad = outer.get_payload(1).get_payload(0)
+        self.assertEqual(len(bad.defects), 1)
+        self.failUnless(isinstance(bad.defects[0],
+                                   errors.StartBoundaryNotFoundDefect))
+
+
+
+# Test RFC 2047 header encoding and decoding
+class TestRFC2047(unittest.TestCase):
+    def test_rfc2047_multiline(self):
+        eq = self.assertEqual
+        s = """Re: =?mac-iceland?q?r=8Aksm=9Arg=8Cs?= baz
+ foo bar =?mac-iceland?q?r=8Aksm=9Arg=8Cs?="""
+        dh = decode_header(s)
+        eq(dh, [
+            ('Re:', None),
+            ('r\x8aksm\x9arg\x8cs', 'mac-iceland'),
+            ('baz foo bar', None),
+            ('r\x8aksm\x9arg\x8cs', 'mac-iceland')])
+        eq(str(make_header(dh)),
+           """Re: =?mac-iceland?q?r=8Aksm=9Arg=8Cs?= baz foo bar
+ =?mac-iceland?q?r=8Aksm=9Arg=8Cs?=""")
+
+    def test_whitespace_eater_unicode(self):
+        eq = self.assertEqual
+        s = '=?ISO-8859-1?Q?Andr=E9?= Pirard <pirard@dom.ain>'
+        dh = decode_header(s)
+        eq(dh, [('Andr\xe9', 'iso-8859-1'), ('Pirard <pirard@dom.ain>', None)])
+        hu = unicode(make_header(dh)).encode('latin-1')
+        eq(hu, 'Andr\xe9 Pirard <pirard@dom.ain>')
+
+    def test_whitespace_eater_unicode_2(self):
+        eq = self.assertEqual
+        s = 'The =?iso-8859-1?b?cXVpY2sgYnJvd24gZm94?= jumped over the =?iso-8859-1?b?bGF6eSBkb2c=?='
+        dh = decode_header(s)
+        eq(dh, [('The', None), ('quick brown fox', 'iso-8859-1'),
+                ('jumped over the', None), ('lazy dog', 'iso-8859-1')])
+        hu = make_header(dh).__unicode__()
+        eq(hu, u'The quick brown fox jumped over the lazy dog')
+
+
+
+# Test the MIMEMessage class
+class TestMIMEMessage(TestEmailBase):
+    def setUp(self):
+        fp = openfile('msg_11.txt')
+        try:
+            self._text = fp.read()
+        finally:
+            fp.close()
+
+    def test_type_error(self):
+        self.assertRaises(TypeError, MIMEMessage, 'a plain string')
+
+    def test_valid_argument(self):
+        eq = self.assertEqual
+        unless = self.failUnless
+        subject = 'A sub-message'
+        m = Message()
+        m['Subject'] = subject
+        r = MIMEMessage(m)
+        eq(r.get_content_type(), 'message/rfc822')
+        payload = r.get_payload()
+        unless(isinstance(payload, list))
+        eq(len(payload), 1)
+        subpart = payload[0]
+        unless(subpart is m)
+        eq(subpart['subject'], subject)
+
+    def test_bad_multipart(self):
+        eq = self.assertEqual
+        msg1 = Message()
+        msg1['Subject'] = 'subpart 1'
+        msg2 = Message()
+        msg2['Subject'] = 'subpart 2'
+        r = MIMEMessage(msg1)
+        self.assertRaises(errors.MultipartConversionError, r.attach, msg2)
+
+    def test_generate(self):
+        # First craft the message to be encapsulated
+        m = Message()
+        m['Subject'] = 'An enclosed message'
+        m.set_payload('Here is the body of the message.\n')
+        r = MIMEMessage(m)
+        r['Subject'] = 'The enclosing message'
+        s = StringIO()
+        g = Generator(s)
+        g.flatten(r)
+        self.assertEqual(s.getvalue(), """\
+Content-Type: message/rfc822
+MIME-Version: 1.0
+Subject: The enclosing message
+
+Subject: An enclosed message
+
+Here is the body of the message.
+""")
+
+    def test_parse_message_rfc822(self):
+        eq = self.assertEqual
+        unless = self.failUnless
+        msg = self._msgobj('msg_11.txt')
+        eq(msg.get_content_type(), 'message/rfc822')
+        payload = msg.get_payload()
+        unless(isinstance(payload, list))
+        eq(len(payload), 1)
+        submsg = payload[0]
+        self.failUnless(isinstance(submsg, Message))
+        eq(submsg['subject'], 'An enclosed message')
+        eq(submsg.get_payload(), 'Here is the body of the message.\n')
+
+    def test_dsn(self):
+        eq = self.assertEqual
+        unless = self.failUnless
+        # msg 16 is a Delivery Status Notification, see RFC 1894
+        msg = self._msgobj('msg_16.txt')
+        eq(msg.get_content_type(), 'multipart/report')
+        unless(msg.is_multipart())
+        eq(len(msg.get_payload()), 3)
+        # Subpart 1 is a text/plain, human readable section
+        subpart = msg.get_payload(0)
+        eq(subpart.get_content_type(), 'text/plain')
+        eq(subpart.get_payload(), """\
+This report relates to a message you sent with the following header fields:
+
+  Message-id: <002001c144a6$8752e060$56104586@oxy.edu>
+  Date: Sun, 23 Sep 2001 20:10:55 -0700
+  From: "Ian T. Henry" <henryi@oxy.edu>
+  To: SoCal Raves <scr@socal-raves.org>
+  Subject: [scr] yeah for Ians!!
+
+Your message cannot be delivered to the following recipients:
+
+  Recipient address: jangel1@cougar.noc.ucla.edu
+  Reason: recipient reached disk quota
+
+""")
+        # Subpart 2 contains the machine parsable DSN information.  It
+        # consists of two blocks of headers, represented by two nested Message
+        # objects.
+        subpart = msg.get_payload(1)
+        eq(subpart.get_content_type(), 'message/delivery-status')
+        eq(len(subpart.get_payload()), 2)
+        # message/delivery-status should treat each block as a bunch of
+        # headers, i.e. a bunch of Message objects.
+        dsn1 = subpart.get_payload(0)
+        unless(isinstance(dsn1, Message))
+        eq(dsn1['original-envelope-id'], '0GK500B4HD0888@cougar.noc.ucla.edu')
+        eq(dsn1.get_param('dns', header='reporting-mta'), '')
+        # Try a missing one <wink>
+        eq(dsn1.get_param('nsd', header='reporting-mta'), None)
+        dsn2 = subpart.get_payload(1)
+        unless(isinstance(dsn2, Message))
+        eq(dsn2['action'], 'failed')
+        eq(dsn2.get_params(header='original-recipient'),
+           [('rfc822', ''), ('jangel1@cougar.noc.ucla.edu', '')])
+        eq(dsn2.get_param('rfc822', header='final-recipient'), '')
+        # Subpart 3 is the original message
+        subpart = msg.get_payload(2)
+        eq(subpart.get_content_type(), 'message/rfc822')
+        payload = subpart.get_payload()
+        unless(isinstance(payload, list))
+        eq(len(payload), 1)
+        subsubpart = payload[0]
+        unless(isinstance(subsubpart, Message))
+        eq(subsubpart.get_content_type(), 'text/plain')
+        eq(subsubpart['message-id'],
+           '<002001c144a6$8752e060$56104586@oxy.edu>')
+
+    def test_epilogue(self):
+        eq = self.ndiffAssertEqual
+        fp = openfile('msg_21.txt')
+        try:
+            text = fp.read()
+        finally:
+            fp.close()
+        msg = Message()
+        msg['From'] = 'aperson@dom.ain'
+        msg['To'] = 'bperson@dom.ain'
+        msg['Subject'] = 'Test'
+        msg.preamble = 'MIME message'
+        msg.epilogue = 'End of MIME message\n'
+        msg1 = MIMEText('One')
+        msg2 = MIMEText('Two')
+        msg.add_header('Content-Type', 'multipart/mixed', boundary='BOUNDARY')
+        msg.attach(msg1)
+        msg.attach(msg2)
+        sfp = StringIO()
+        g = Generator(sfp)
+        g.flatten(msg)
+        eq(sfp.getvalue(), text)
+
+    def test_no_nl_preamble(self):
+        eq = self.ndiffAssertEqual
+        msg = Message()
+        msg['From'] = 'aperson@dom.ain'
+        msg['To'] = 'bperson@dom.ain'
+        msg['Subject'] = 'Test'
+        msg.preamble = 'MIME message'
+        msg.epilogue = ''
+        msg1 = MIMEText('One')
+        msg2 = MIMEText('Two')
+        msg.add_header('Content-Type', 'multipart/mixed', boundary='BOUNDARY')
+        msg.attach(msg1)
+        msg.attach(msg2)
+        eq(msg.as_string(), """\
+From: aperson@dom.ain
+To: bperson@dom.ain
+Subject: Test
+Content-Type: multipart/mixed; boundary="BOUNDARY"
+
+MIME message
+--BOUNDARY
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+One
+--BOUNDARY
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+Two
+--BOUNDARY--
+""")
+
+    def test_default_type(self):
+        eq = self.assertEqual
+        fp = openfile('msg_30.txt')
+        try:
+            msg = email.message_from_file(fp)
+        finally:
+            fp.close()
+        container1 = msg.get_payload(0)
+        eq(container1.get_default_type(), 'message/rfc822')
+        eq(container1.get_content_type(), 'message/rfc822')
+        container2 = msg.get_payload(1)
+        eq(container2.get_default_type(), 'message/rfc822')
+        eq(container2.get_content_type(), 'message/rfc822')
+        container1a = container1.get_payload(0)
+        eq(container1a.get_default_type(), 'text/plain')
+        eq(container1a.get_content_type(), 'text/plain')
+        container2a = container2.get_payload(0)
+        eq(container2a.get_default_type(), 'text/plain')
+        eq(container2a.get_content_type(), 'text/plain')
+
+    def test_default_type_with_explicit_container_type(self):
+        eq = self.assertEqual
+        fp = openfile('msg_28.txt')
+        try:
+            msg = email.message_from_file(fp)
+        finally:
+            fp.close()
+        container1 = msg.get_payload(0)
+        eq(container1.get_default_type(), 'message/rfc822')
+        eq(container1.get_content_type(), 'message/rfc822')
+        container2 = msg.get_payload(1)
+        eq(container2.get_default_type(), 'message/rfc822')
+        eq(container2.get_content_type(), 'message/rfc822')
+        container1a = container1.get_payload(0)
+        eq(container1a.get_default_type(), 'text/plain')
+        eq(container1a.get_content_type(), 'text/plain')
+        container2a = container2.get_payload(0)
+        eq(container2a.get_default_type(), 'text/plain')
+        eq(container2a.get_content_type(), 'text/plain')
+
+    def test_default_type_non_parsed(self):
+        eq = self.assertEqual
+        neq = self.ndiffAssertEqual
+        # Set up container
+        container = MIMEMultipart('digest', 'BOUNDARY')
+        container.epilogue = ''
+        # Set up subparts
+        subpart1a = MIMEText('message 1\n')
+        subpart2a = MIMEText('message 2\n')
+        subpart1 = MIMEMessage(subpart1a)
+        subpart2 = MIMEMessage(subpart2a)
+        container.attach(subpart1)
+        container.attach(subpart2)
+        eq(subpart1.get_content_type(), 'message/rfc822')
+        eq(subpart1.get_default_type(), 'message/rfc822')
+        eq(subpart2.get_content_type(), 'message/rfc822')
+        eq(subpart2.get_default_type(), 'message/rfc822')
+        neq(container.as_string(0), '''\
+Content-Type: multipart/digest; boundary="BOUNDARY"
+MIME-Version: 1.0
+
+--BOUNDARY
+Content-Type: message/rfc822
+MIME-Version: 1.0
+
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+message 1
+
+--BOUNDARY
+Content-Type: message/rfc822
+MIME-Version: 1.0
+
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+message 2
+
+--BOUNDARY--
+''')
+        del subpart1['content-type']
+        del subpart1['mime-version']
+        del subpart2['content-type']
+        del subpart2['mime-version']
+        eq(subpart1.get_content_type(), 'message/rfc822')
+        eq(subpart1.get_default_type(), 'message/rfc822')
+        eq(subpart2.get_content_type(), 'message/rfc822')
+        eq(subpart2.get_default_type(), 'message/rfc822')
+        neq(container.as_string(0), '''\
+Content-Type: multipart/digest; boundary="BOUNDARY"
+MIME-Version: 1.0
+
+--BOUNDARY
+
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+message 1
+
+--BOUNDARY
+
+Content-Type: text/plain; charset="us-ascii"
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+
+message 2
+
+--BOUNDARY--
+''')
+
+    def test_mime_attachments_in_constructor(self):
+        eq = self.assertEqual
+        text1 = MIMEText('')
+        text2 = MIMEText('')
+        msg = MIMEMultipart(_subparts=(text1, text2))
+        eq(len(msg.get_payload()), 2)
+        eq(msg.get_payload(0), text1)
+        eq(msg.get_payload(1), text2)
+
+
+
+# A general test of parser->model->generator idempotency.  IOW, read a message
+# in, parse it into a message object tree, then without touching the tree,
+# regenerate the plain text.  The original text and the transformed text
+# should be identical.  Note: that we ignore the Unix-From since that may
+# contain a changed date.
+class TestIdempotent(TestEmailBase):
+    def _msgobj(self, filename):
+        fp = openfile(filename)
+        try:
+            data = fp.read()
+        finally:
+            fp.close()
+        msg = email.message_from_string(data)
+        return msg, data
+
+    def _idempotent(self, msg, text):
+        eq = self.ndiffAssertEqual
+        s = StringIO()
+        g = Generator(s, maxheaderlen=0)
+        g.flatten(msg)
+        eq(text, s.getvalue())
+
+    def test_parse_text_message(self):
+        eq = self.assertEquals
+        msg, text = self._msgobj('msg_01.txt')
+        eq(msg.get_content_type(), 'text/plain')
+        eq(msg.get_content_maintype(), 'text')
+        eq(msg.get_content_subtype(), 'plain')
+        eq(msg.get_params()[1], ('charset', 'us-ascii'))
+        eq(msg.get_param('charset'), 'us-ascii')
+        eq(msg.preamble, None)
+        eq(msg.epilogue, None)
+        self._idempotent(msg, text)
+
+    def test_parse_untyped_message(self):
+        eq = self.assertEquals
+        msg, text = self._msgobj('msg_03.txt')
+        eq(msg.get_content_type(), 'text/plain')
+        eq(msg.get_params(), None)
+        eq(msg.get_param('charset'), None)
+        self._idempotent(msg, text)
+
+    def test_simple_multipart(self):
+        msg, text = self._msgobj('msg_04.txt')
+        self._idempotent(msg, text)
+
+    def test_MIME_digest(self):
+        msg, text = self._msgobj('msg_02.txt')
+        self._idempotent(msg, text)
+
+    def test_long_header(self):
+        msg, text = self._msgobj('msg_27.txt')
+        self._idempotent(msg, text)
+
+    def test_MIME_digest_with_part_headers(self):
+        msg, text = self._msgobj('msg_28.txt')
+        self._idempotent(msg, text)
+
+    def test_mixed_with_image(self):
+        msg, text = self._msgobj('msg_06.txt')
+        self._idempotent(msg, text)
+
+    def test_multipart_report(self):
+        msg, text = self._msgobj('msg_05.txt')
+        self._idempotent(msg, text)
+
+    def test_dsn(self):
+        msg, text = self._msgobj('msg_16.txt')
+        self._idempotent(msg, text)
+
+    def test_preamble_epilogue(self):
+        msg, text = self._msgobj('msg_21.txt')
+        self._idempotent(msg, text)
+
+    def test_multipart_one_part(self):
+        msg, text = self._msgobj('msg_23.txt')
+        self._idempotent(msg, text)
+
+    def test_multipart_no_parts(self):
+        msg, text = self._msgobj('msg_24.txt')
+        self._idempotent(msg, text)
+
+    def test_no_start_boundary(self):
+        msg, text = self._msgobj('msg_31.txt')
+        self._idempotent(msg, text)
+
+    def test_rfc2231_charset(self):
+        msg, text = self._msgobj('msg_32.txt')
+        self._idempotent(msg, text)
+
+    def test_more_rfc2231_parameters(self):
+        msg, text = self._msgobj('msg_33.txt')
+        self._idempotent(msg, text)
+
+    def test_text_plain_in_a_multipart_digest(self):
+        msg, text = self._msgobj('msg_34.txt')
+        self._idempotent(msg, text)
+
+    def test_nested_multipart_mixeds(self):
+        msg, text = self._msgobj('msg_12a.txt')
+        self._idempotent(msg, text)
+
+    def test_message_external_body_idempotent(self):
+        msg, text = self._msgobj('msg_36.txt')
+        self._idempotent(msg, text)
+
+    def test_content_type(self):
+        eq = self.assertEquals
+        unless = self.failUnless
+        # Get a message object and reset the seek pointer for other tests
+        msg, text = self._msgobj('msg_05.txt')
+        eq(msg.get_content_type(), 'multipart/report')
+        # Test the Content-Type: parameters
+        params = {}
+        for pk, pv in msg.get_params():
+            params[pk] = pv
+        eq(params['report-type'], 'delivery-status')
+        eq(params['boundary'], 'D1690A7AC1.996856090/mail.example.com')
+        eq(msg.preamble, 'This is a MIME-encapsulated message.\n')
+        eq(msg.epilogue, '\n')
+        eq(len(msg.get_payload()), 3)
+        # Make sure the subparts are what we expect
+        msg1 = msg.get_payload(0)
+        eq(msg1.get_content_type(), 'text/plain')
+        eq(msg1.get_payload(), 'Yadda yadda yadda\n')
+        msg2 = msg.get_payload(1)
+        eq(msg2.get_content_type(), 'text/plain')
+        eq(msg2.get_payload(), 'Yadda yadda yadda\n')
+        msg3 = msg.get_payload(2)
+        eq(msg3.get_content_type(), 'message/rfc822')
+        self.failUnless(isinstance(msg3, Message))
+        payload = msg3.get_payload()
+        unless(isinstance(payload, list))
+        eq(len(payload), 1)
+        msg4 = payload[0]
+        unless(isinstance(msg4, Message))
+        eq(msg4.get_payload(), 'Yadda yadda yadda\n')
+
+    def test_parser(self):
+        eq = self.assertEquals
+        unless = self.failUnless
+        msg, text = self._msgobj('msg_06.txt')
+        # Check some of the outer headers
+        eq(msg.get_content_type(), 'message/rfc822')
+        # Make sure the payload is a list of exactly one sub-Message, and that
+        # that submessage has a type of text/plain
+        payload = msg.get_payload()
+        unless(isinstance(payload, list))
+        eq(len(payload), 1)
+        msg1 = payload[0]
+        self.failUnless(isinstance(msg1, Message))
+        eq(msg1.get_content_type(), 'text/plain')
+        self.failUnless(isinstance(msg1.get_payload(), str))
+        eq(msg1.get_payload(), '\n')
+
+
+
+# Test various other bits of the package's functionality
+class TestMiscellaneous(TestEmailBase):
+    def test_message_from_string(self):
+        fp = openfile('msg_01.txt')
+        try:
+            text = fp.read()
+        finally:
+            fp.close()
+        msg = email.message_from_string(text)
+        s = StringIO()
+        # Don't wrap/continue long headers since we're trying to test
+        # idempotency.
+        g = Generator(s, maxheaderlen=0)
+        g.flatten(msg)
+        self.assertEqual(text, s.getvalue())
+
+    def test_message_from_file(self):
+        fp = openfile('msg_01.txt')
+        try:
+            text = fp.read()
+            fp.seek(0)
+            msg = email.message_from_file(fp)
+            s = StringIO()
+            # Don't wrap/continue long headers since we're trying to test
+            # idempotency.
+            g = Generator(s, maxheaderlen=0)
+            g.flatten(msg)
+            self.assertEqual(text, s.getvalue())
+        finally:
+            fp.close()
+
+    def test_message_from_string_with_class(self):
+        unless = self.failUnless
+        fp = openfile('msg_01.txt')
+        try:
+            text = fp.read()
+        finally:
+            fp.close()
+        # Create a subclass
+        class MyMessage(Message):
+            pass
+
+        msg = email.message_from_string(text, MyMessage)
+        unless(isinstance(msg, MyMessage))
+        # Try something more complicated
+        fp = openfile('msg_02.txt')
+        try:
+            text = fp.read()
+        finally:
+            fp.close()
+        msg = email.message_from_string(text, MyMessage)
+        for subpart in msg.walk():
+            unless(isinstance(subpart, MyMessage))
+
+    def test_message_from_file_with_class(self):
+        unless = self.failUnless
+        # Create a subclass
+        class MyMessage(Message):
+            pass
+
+        fp = openfile('msg_01.txt')
+        try:
+            msg = email.message_from_file(fp, MyMessage)
+        finally:
+            fp.close()
+        unless(isinstance(msg, MyMessage))
+        # Try something more complicated
+        fp = openfile('msg_02.txt')
+        try:
+            msg = email.message_from_file(fp, MyMessage)
+        finally:
+            fp.close()
+        for subpart in msg.walk():
+            unless(isinstance(subpart, MyMessage))
+
+    def test__all__(self):
+        module = __import__('email')
+        # Can't use sorted() here due to Python 2.3 compatibility
+        all = module.__all__[:]
+        all.sort()
+        self.assertEqual(all, [
+            # Old names
+            'Charset', 'Encoders', 'Errors', 'Generator',
+            'Header', 'Iterators', 'MIMEAudio', 'MIMEBase',
+            'MIMEImage', 'MIMEMessage', 'MIMEMultipart',
+            'MIMENonMultipart', 'MIMEText', 'Message',
+            'Parser', 'Utils', 'base64MIME',
+            # new names
+            'base64mime', 'charset', 'encoders', 'errors', 'generator',
+            'header', 'iterators', 'message', 'message_from_file',
+            'message_from_string', 'mime', 'parser',
+            'quopriMIME', 'quoprimime', 'utils',
+            ])
+
+    def test_formatdate(self):
+        now = time.time()
+        self.assertEqual(utils.parsedate(utils.formatdate(now))[:6],
+                         time.gmtime(now)[:6])
+
+    def test_formatdate_localtime(self):
+        now = time.time()
+        self.assertEqual(
+            utils.parsedate(utils.formatdate(now, localtime=True))[:6],
+            time.localtime(now)[:6])
+
+    def test_formatdate_usegmt(self):
+        now = time.time()
+        self.assertEqual(
+            utils.formatdate(now, localtime=False),
+            time.strftime('%a, %d %b %Y %H:%M:%S -0000', time.gmtime(now)))
+        self.assertEqual(
+            utils.formatdate(now, localtime=False, usegmt=True),
+            time.strftime('%a, %d %b %Y %H:%M:%S GMT', time.gmtime(now)))
+
+    def test_parsedate_none(self):
+        self.assertEqual(utils.parsedate(''), None)
+
+    def test_parsedate_compact(self):
+        # The FWS after the comma is optional
+        self.assertEqual(utils.parsedate('Wed,3 Apr 2002 14:58:26 +0800'),
+                         utils.parsedate('Wed, 3 Apr 2002 14:58:26 +0800'))
+
+    def test_parsedate_no_dayofweek(self):
+        eq = self.assertEqual
+        eq(utils.parsedate_tz('25 Feb 2003 13:47:26 -0800'),
+           (2003, 2, 25, 13, 47, 26, 0, 1, -1, -28800))
+
+    def test_parsedate_compact_no_dayofweek(self):
+        eq = self.assertEqual
+        eq(utils.parsedate_tz('5 Feb 2003 13:47:26 -0800'),
+           (2003, 2, 5, 13, 47, 26, 0, 1, -1, -28800))
+
+    def test_parsedate_acceptable_to_time_functions(self):
+        eq = self.assertEqual
+        timetup = utils.parsedate('5 Feb 2003 13:47:26 -0800')
+        t = int(time.mktime(timetup))
+        eq(time.localtime(t)[:6], timetup[:6])
+        eq(int(time.strftime('%Y', timetup)), 2003)
+        timetup = utils.parsedate_tz('5 Feb 2003 13:47:26 -0800')
+        t = int(time.mktime(timetup[:9]))
+        eq(time.localtime(t)[:6], timetup[:6])
+        eq(int(time.strftime('%Y', timetup[:9])), 2003)
+
+    def test_parseaddr_empty(self):
+        self.assertEqual(utils.parseaddr('<>'), ('', ''))
+        self.assertEqual(utils.formataddr(utils.parseaddr('<>')), '')
+
+    def test_noquote_dump(self):
+        self.assertEqual(
+            utils.formataddr(('A Silly Person', 'person@dom.ain')),
+            'A Silly Person <person@dom.ain>')
+
+    def test_escape_dump(self):
+        self.assertEqual(
+            utils.formataddr(('A (Very) Silly Person', 'person@dom.ain')),
+            r'"A \(Very\) Silly Person" <person@dom.ain>')
+        a = r'A \(Special\) Person'
+        b = 'person@dom.ain'
+        self.assertEqual(utils.parseaddr(utils.formataddr((a, b))), (a, b))
+
+    def test_escape_backslashes(self):
+        self.assertEqual(
+            utils.formataddr(('Arthur \Backslash\ Foobar', 'person@dom.ain')),
+            r'"Arthur \\Backslash\\ Foobar" <person@dom.ain>')
+        a = r'Arthur \Backslash\ Foobar'
+        b = 'person@dom.ain'
+        self.assertEqual(utils.parseaddr(utils.formataddr((a, b))), (a, b))
+
+    def test_name_with_dot(self):
+        x = 'John X. Doe <jxd@example.com>'
+        y = '"John X. Doe" <jxd@example.com>'
+        a, b = ('John X. Doe', 'jxd@example.com')
+        self.assertEqual(utils.parseaddr(x), (a, b))
+        self.assertEqual(utils.parseaddr(y), (a, b))
+        # formataddr() quotes the name if there's a dot in it
+        self.assertEqual(utils.formataddr((a, b)), y)
+
+    def test_quote_dump(self):
+        self.assertEqual(
+            utils.formataddr(('A Silly; Person', 'person@dom.ain')),
+            r'"A Silly; Person" <person@dom.ain>')
+
+    def test_fix_eols(self):
+        eq = self.assertEqual
+        eq(utils.fix_eols('hello'), 'hello')
+        eq(utils.fix_eols('hello\n'), 'hello\r\n')
+        eq(utils.fix_eols('hello\r'), 'hello\r\n')
+        eq(utils.fix_eols('hello\r\n'), 'hello\r\n')
+        eq(utils.fix_eols('hello\n\r'), 'hello\r\n\r\n')
+
+    def test_charset_richcomparisons(self):
+        eq = self.assertEqual
+        ne = self.failIfEqual
+        cset1 = Charset()
+        cset2 = Charset()
+        eq(cset1, 'us-ascii')
+        eq(cset1, 'US-ASCII')
+        eq(cset1, 'Us-AsCiI')
+        eq('us-ascii', cset1)
+        eq('US-ASCII', cset1)
+        eq('Us-AsCiI', cset1)
+        ne(cset1, 'usascii')
+        ne(cset1, 'USASCII')
+        ne(cset1, 'UsAsCiI')
+        ne('usascii', cset1)
+        ne('USASCII', cset1)
+        ne('UsAsCiI', cset1)
+        eq(cset1, cset2)
+        eq(cset2, cset1)
+
+    def test_getaddresses(self):
+        eq = self.assertEqual
+        eq(utils.getaddresses(['aperson@dom.ain (Al Person)',
+                               'Bud Person <bperson@dom.ain>']),
+           [('Al Person', 'aperson@dom.ain'),
+            ('Bud Person', 'bperson@dom.ain')])
+
+    def test_getaddresses_nasty(self):
+        eq = self.assertEqual
+        eq(utils.getaddresses(['foo: ;']), [('', '')])
+        eq(utils.getaddresses(
+           ['[]*-- =~$']),
+           [('', ''), ('', ''), ('', '*--')])
+        eq(utils.getaddresses(
+           ['foo: ;', '"Jason R. Mastaler" <jason@dom.ain>']),
+           [('', ''), ('Jason R. Mastaler', 'jason@dom.ain')])
+
+    def test_utils_quote_unquote(self):
+        eq = self.assertEqual
+        msg = Message()
+        msg.add_header('content-disposition', 'attachment',
+                       filename='foo\\wacky"name')
+        eq(msg.get_filename(), 'foo\\wacky"name')
+
+    def test_get_body_encoding_with_bogus_charset(self):
+        charset = Charset('not a charset')
+        self.assertEqual(charset.get_body_encoding(), 'base64')
+
+    def test_get_body_encoding_with_uppercase_charset(self):
+        eq = self.assertEqual
+        msg = Message()
+        msg['Content-Type'] = 'text/plain; charset=UTF-8'
+        eq(msg['content-type'], 'text/plain; charset=UTF-8')
+        charsets = msg.get_charsets()
+        eq(len(charsets), 1)
+        eq(charsets[0], 'utf-8')
+        charset = Charset(charsets[0])
+        eq(charset.get_body_encoding(), 'base64')
+        msg.set_payload('hello world', charset=charset)
+        eq(msg.get_payload(), 'aGVsbG8gd29ybGQ=\n')
+        eq(msg.get_payload(decode=True), 'hello world')
+        eq(msg['content-transfer-encoding'], 'base64')
+        # Try another one
+        msg = Message()
+        msg['Content-Type'] = 'text/plain; charset="US-ASCII"'
+        charsets = msg.get_charsets()
+        eq(len(charsets), 1)
+        eq(charsets[0], 'us-ascii')
+        charset = Charset(charsets[0])
+        eq(charset.get_body_encoding(), encoders.encode_7or8bit)
+        msg.set_payload('hello world', charset=charset)
+        eq(msg.get_payload(), 'hello world')
+        eq(msg['content-transfer-encoding'], '7bit')
+
+    def test_charsets_case_insensitive(self):
+        lc = Charset('us-ascii')
+        uc = Charset('US-ASCII')
+        self.assertEqual(lc.get_body_encoding(), uc.get_body_encoding())
+
+    def test_partial_falls_inside_message_delivery_status(self):
+        eq = self.ndiffAssertEqual
+        # The Parser interface provides chunks of data to FeedParser in 8192
+        # byte gulps.  SF bug #1076485 found one of those chunks inside
+        # message/delivery-status header block, which triggered an
+        # unreadline() of NeedMoreData.
+        msg = self._msgobj('msg_43.txt')
+        sfp = StringIO()
+        iterators._structure(msg, sfp)
+        eq(sfp.getvalue(), """\
+multipart/report
+    text/plain
+    message/delivery-status
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+        text/plain
+    text/rfc822-headers
+""")
+
+
+
+# Test the iterator/generators
+class TestIterators(TestEmailBase):
+    def test_body_line_iterator(self):
+        eq = self.assertEqual
+        neq = self.ndiffAssertEqual
+        # First a simple non-multipart message
+        msg = self._msgobj('msg_01.txt')
+        it = iterators.body_line_iterator(msg)
+        lines = list(it)
+        eq(len(lines), 6)
+        neq(EMPTYSTRING.join(lines), msg.get_payload())
+        # Now a more complicated multipart
+        msg = self._msgobj('msg_02.txt')
+        it = iterators.body_line_iterator(msg)
+        lines = list(it)
+        eq(len(lines), 43)
+        fp = openfile('msg_19.txt')
+        try:
+            neq(EMPTYSTRING.join(lines), fp.read())
+        finally:
+            fp.close()
+
+    def test_typed_subpart_iterator(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_04.txt')
+        it = iterators.typed_subpart_iterator(msg, 'text')
+        lines = []
+        subparts = 0
+        for subpart in it:
+            subparts += 1
+            lines.append(subpart.get_payload())
+        eq(subparts, 2)
+        eq(EMPTYSTRING.join(lines), """\
+a simple kind of mirror
+to reflect upon our own
+a simple kind of mirror
+to reflect upon our own
+""")
+
+    def test_typed_subpart_iterator_default_type(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_03.txt')
+        it = iterators.typed_subpart_iterator(msg, 'text', 'plain')
+        lines = []
+        subparts = 0
+        for subpart in it:
+            subparts += 1
+            lines.append(subpart.get_payload())
+        eq(subparts, 1)
+        eq(EMPTYSTRING.join(lines), """\
+
+Hi,
+
+Do you like this message?
+
+-Me
+""")
+
+
+
+class TestParsers(TestEmailBase):
+    def test_header_parser(self):
+        eq = self.assertEqual
+        # Parse only the headers of a complex multipart MIME document
+        fp = openfile('msg_02.txt')
+        try:
+            msg = HeaderParser().parse(fp)
+        finally:
+            fp.close()
+        eq(msg['from'], 'ppp-request@zzz.org')
+        eq(msg['to'], 'ppp@zzz.org')
+        eq(msg.get_content_type(), 'multipart/mixed')
+        self.failIf(msg.is_multipart())
+        self.failUnless(isinstance(msg.get_payload(), str))
+
+    def test_whitespace_continuation(self):
+        eq = self.assertEqual
+        # This message contains a line after the Subject: header that has only
+        # whitespace, but it is not empty!
+        msg = email.message_from_string("""\
+From: aperson@dom.ain
+To: bperson@dom.ain
+Subject: the next line has a space on it
+\x20
+Date: Mon, 8 Apr 2002 15:09:19 -0400
+Message-ID: spam
+
+Here's the message body
+""")
+        eq(msg['subject'], 'the next line has a space on it\n ')
+        eq(msg['message-id'], 'spam')
+        eq(msg.get_payload(), "Here's the message body\n")
+
+    def test_whitespace_continuation_last_header(self):
+        eq = self.assertEqual
+        # Like the previous test, but the subject line is the last
+        # header.
+        msg = email.message_from_string("""\
+From: aperson@dom.ain
+To: bperson@dom.ain
+Date: Mon, 8 Apr 2002 15:09:19 -0400
+Message-ID: spam
+Subject: the next line has a space on it
+\x20
+
+Here's the message body
+""")
+        eq(msg['subject'], 'the next line has a space on it\n ')
+        eq(msg['message-id'], 'spam')
+        eq(msg.get_payload(), "Here's the message body\n")
+
+    def test_crlf_separation(self):
+        eq = self.assertEqual
+        fp = openfile('msg_26.txt', mode='rb')
+        try:
+            msg = Parser().parse(fp)
+        finally:
+            fp.close()
+        eq(len(msg.get_payload()), 2)
+        part1 = msg.get_payload(0)
+        eq(part1.get_content_type(), 'text/plain')
+        eq(part1.get_payload(), 'Simple email with attachment.\r\n\r\n')
+        part2 = msg.get_payload(1)
+        eq(part2.get_content_type(), 'application/riscos')
+
+    def test_multipart_digest_with_extra_mime_headers(self):
+        eq = self.assertEqual
+        neq = self.ndiffAssertEqual
+        fp = openfile('msg_28.txt')
+        try:
+            msg = email.message_from_file(fp)
+        finally:
+            fp.close()
+        # Structure is:
+        # multipart/digest
+        #   message/rfc822
+        #     text/plain
+        #   message/rfc822
+        #     text/plain
+        eq(msg.is_multipart(), 1)
+        eq(len(msg.get_payload()), 2)
+        part1 = msg.get_payload(0)
+        eq(part1.get_content_type(), 'message/rfc822')
+        eq(part1.is_multipart(), 1)
+        eq(len(part1.get_payload()), 1)
+        part1a = part1.get_payload(0)
+        eq(part1a.is_multipart(), 0)
+        eq(part1a.get_content_type(), 'text/plain')
+        neq(part1a.get_payload(), 'message 1\n')
+        # next message/rfc822
+        part2 = msg.get_payload(1)
+        eq(part2.get_content_type(), 'message/rfc822')
+        eq(part2.is_multipart(), 1)
+        eq(len(part2.get_payload()), 1)
+        part2a = part2.get_payload(0)
+        eq(part2a.is_multipart(), 0)
+        eq(part2a.get_content_type(), 'text/plain')
+        neq(part2a.get_payload(), 'message 2\n')
+
+    def test_three_lines(self):
+        # A bug report by Andrew McNamara
+        lines = ['From: Andrew Person <aperson@dom.ain',
+                 'Subject: Test',
+                 'Date: Tue, 20 Aug 2002 16:43:45 +1000']
+        msg = email.message_from_string(NL.join(lines))
+        self.assertEqual(msg['date'], 'Tue, 20 Aug 2002 16:43:45 +1000')
+
+    def test_strip_line_feed_and_carriage_return_in_headers(self):
+        eq = self.assertEqual
+        # For [ 1002475 ] email message parser doesn't handle \r\n correctly
+        value1 = 'text'
+        value2 = 'more text'
+        m = 'Header: %s\r\nNext-Header: %s\r\n\r\nBody\r\n\r\n' % (
+            value1, value2)
+        msg = email.message_from_string(m)
+        eq(msg.get('Header'), value1)
+        eq(msg.get('Next-Header'), value2)
+
+    def test_rfc2822_header_syntax(self):
+        eq = self.assertEqual
+        m = '>From: foo\nFrom: bar\n!"#QUX;~: zoo\n\nbody'
+        msg = email.message_from_string(m)
+        eq(len(msg.keys()), 3)
+        keys = msg.keys()
+        keys.sort()
+        eq(keys, ['!"#QUX;~', '>From', 'From'])
+        eq(msg.get_payload(), 'body')
+
+    def test_rfc2822_space_not_allowed_in_header(self):
+        eq = self.assertEqual
+        m = '>From foo@example.com 11:25:53\nFrom: bar\n!"#QUX;~: zoo\n\nbody'
+        msg = email.message_from_string(m)
+        eq(len(msg.keys()), 0)
+
+    def test_rfc2822_one_character_header(self):
+        eq = self.assertEqual
+        m = 'A: first header\nB: second header\nCC: third header\n\nbody'
+        msg = email.message_from_string(m)
+        headers = msg.keys()
+        headers.sort()
+        eq(headers, ['A', 'B', 'CC'])
+        eq(msg.get_payload(), 'body')
+
+
+
+class TestBase64(unittest.TestCase):
+    def test_len(self):
+        eq = self.assertEqual
+        eq(base64mime.base64_len('hello'),
+           len(base64mime.encode('hello', eol='')))
+        for size in range(15):
+            if   size == 0 : bsize = 0
+            elif size <= 3 : bsize = 4
+            elif size <= 6 : bsize = 8
+            elif size <= 9 : bsize = 12
+            elif size <= 12: bsize = 16
+            else           : bsize = 20
+            eq(base64mime.base64_len('x'*size), bsize)
+
+    def test_decode(self):
+        eq = self.assertEqual
+        eq(base64mime.decode(''), '')
+        eq(base64mime.decode('aGVsbG8='), 'hello')
+        eq(base64mime.decode('aGVsbG8=', 'X'), 'hello')
+        eq(base64mime.decode('aGVsbG8NCndvcmxk\n', 'X'), 'helloXworld')
+
+    def test_encode(self):
+        eq = self.assertEqual
+        eq(base64mime.encode(''), '')
+        eq(base64mime.encode('hello'), 'aGVsbG8=\n')
+        # Test the binary flag
+        eq(base64mime.encode('hello\n'), 'aGVsbG8K\n')
+        eq(base64mime.encode('hello\n', 0), 'aGVsbG8NCg==\n')
+        # Test the maxlinelen arg
+        eq(base64mime.encode('xxxx ' * 20, maxlinelen=40), """\
+eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg
+eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg
+eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg
+eHh4eCB4eHh4IA==
+""")
+        # Test the eol argument
+        eq(base64mime.encode('xxxx ' * 20, maxlinelen=40, eol='\r\n'), """\
+eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg\r
+eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg\r
+eHh4eCB4eHh4IHh4eHggeHh4eCB4eHh4IHh4eHgg\r
+eHh4eCB4eHh4IA==\r
+""")
+
+    def test_header_encode(self):
+        eq = self.assertEqual
+        he = base64mime.header_encode
+        eq(he('hello'), '=?iso-8859-1?b?aGVsbG8=?=')
+        eq(he('hello\nworld'), '=?iso-8859-1?b?aGVsbG8NCndvcmxk?=')
+        # Test the charset option
+        eq(he('hello', charset='iso-8859-2'), '=?iso-8859-2?b?aGVsbG8=?=')
+        # Test the keep_eols flag
+        eq(he('hello\nworld', keep_eols=True),
+           '=?iso-8859-1?b?aGVsbG8Kd29ybGQ=?=')
+        # Test the maxlinelen argument
+        eq(he('xxxx ' * 20, maxlinelen=40), """\
+=?iso-8859-1?b?eHh4eCB4eHh4IHh4eHggeHg=?=
+ =?iso-8859-1?b?eHggeHh4eCB4eHh4IHh4eHg=?=
+ =?iso-8859-1?b?IHh4eHggeHh4eCB4eHh4IHg=?=
+ =?iso-8859-1?b?eHh4IHh4eHggeHh4eCB4eHg=?=
+ =?iso-8859-1?b?eCB4eHh4IHh4eHggeHh4eCA=?=
+ =?iso-8859-1?b?eHh4eCB4eHh4IHh4eHgg?=""")
+        # Test the eol argument
+        eq(he('xxxx ' * 20, maxlinelen=40, eol='\r\n'), """\
+=?iso-8859-1?b?eHh4eCB4eHh4IHh4eHggeHg=?=\r
+ =?iso-8859-1?b?eHggeHh4eCB4eHh4IHh4eHg=?=\r
+ =?iso-8859-1?b?IHh4eHggeHh4eCB4eHh4IHg=?=\r
+ =?iso-8859-1?b?eHh4IHh4eHggeHh4eCB4eHg=?=\r
+ =?iso-8859-1?b?eCB4eHh4IHh4eHggeHh4eCA=?=\r
+ =?iso-8859-1?b?eHh4eCB4eHh4IHh4eHgg?=""")
+
+
+
+class TestQuopri(unittest.TestCase):
+    def setUp(self):
+        self.hlit = [chr(x) for x in range(ord('a'), ord('z')+1)] + \
+                    [chr(x) for x in range(ord('A'), ord('Z')+1)] + \
+                    [chr(x) for x in range(ord('0'), ord('9')+1)] + \
+                    ['!', '*', '+', '-', '/', ' ']
+        self.hnon = [chr(x) for x in range(256) if chr(x) not in self.hlit]
+        assert len(self.hlit) + len(self.hnon) == 256
+        self.blit = [chr(x) for x in range(ord(' '), ord('~')+1)] + ['\t']
+        self.blit.remove('=')
+        self.bnon = [chr(x) for x in range(256) if chr(x) not in self.blit]
+        assert len(self.blit) + len(self.bnon) == 256
+
+    def test_header_quopri_check(self):
+        for c in self.hlit:
+            self.failIf(quoprimime.header_quopri_check(c))
+        for c in self.hnon:
+            self.failUnless(quoprimime.header_quopri_check(c))
+
+    def test_body_quopri_check(self):
+        for c in self.blit:
+            self.failIf(quoprimime.body_quopri_check(c))
+        for c in self.bnon:
+            self.failUnless(quoprimime.body_quopri_check(c))
+
+    def test_header_quopri_len(self):
+        eq = self.assertEqual
+        hql = quoprimime.header_quopri_len
+        enc = quoprimime.header_encode
+        for s in ('hello', 'h@e@l@l@o@'):
+            # Empty charset and no line-endings.  7 == RFC chrome
+            eq(hql(s), len(enc(s, charset='', eol=''))-7)
+        for c in self.hlit:
+            eq(hql(c), 1)
+        for c in self.hnon:
+            eq(hql(c), 3)
+
+    def test_body_quopri_len(self):
+        eq = self.assertEqual
+        bql = quoprimime.body_quopri_len
+        for c in self.blit:
+            eq(bql(c), 1)
+        for c in self.bnon:
+            eq(bql(c), 3)
+
+    def test_quote_unquote_idempotent(self):
+        for x in range(256):
+            c = chr(x)
+            self.assertEqual(quoprimime.unquote(quoprimime.quote(c)), c)
+
+    def test_header_encode(self):
+        eq = self.assertEqual
+        he = quoprimime.header_encode
+        eq(he('hello'), '=?iso-8859-1?q?hello?=')
+        eq(he('hello\nworld'), '=?iso-8859-1?q?hello=0D=0Aworld?=')
+        # Test the charset option
+        eq(he('hello', charset='iso-8859-2'), '=?iso-8859-2?q?hello?=')
+        # Test the keep_eols flag
+        eq(he('hello\nworld', keep_eols=True), '=?iso-8859-1?q?hello=0Aworld?=')
+        # Test a non-ASCII character
+        eq(he('hello\xc7there'), '=?iso-8859-1?q?hello=C7there?=')
+        # Test the maxlinelen argument
+        eq(he('xxxx ' * 20, maxlinelen=40), """\
+=?iso-8859-1?q?xxxx_xxxx_xxxx_xxxx_xx?=
+ =?iso-8859-1?q?xx_xxxx_xxxx_xxxx_xxxx?=
+ =?iso-8859-1?q?_xxxx_xxxx_xxxx_xxxx_x?=
+ =?iso-8859-1?q?xxx_xxxx_xxxx_xxxx_xxx?=
+ =?iso-8859-1?q?x_xxxx_xxxx_?=""")
+        # Test the eol argument
+        eq(he('xxxx ' * 20, maxlinelen=40, eol='\r\n'), """\
+=?iso-8859-1?q?xxxx_xxxx_xxxx_xxxx_xx?=\r
+ =?iso-8859-1?q?xx_xxxx_xxxx_xxxx_xxxx?=\r
+ =?iso-8859-1?q?_xxxx_xxxx_xxxx_xxxx_x?=\r
+ =?iso-8859-1?q?xxx_xxxx_xxxx_xxxx_xxx?=\r
+ =?iso-8859-1?q?x_xxxx_xxxx_?=""")
+
+    def test_decode(self):
+        eq = self.assertEqual
+        eq(quoprimime.decode(''), '')
+        eq(quoprimime.decode('hello'), 'hello')
+        eq(quoprimime.decode('hello', 'X'), 'hello')
+        eq(quoprimime.decode('hello\nworld', 'X'), 'helloXworld')
+
+    def test_encode(self):
+        eq = self.assertEqual
+        eq(quoprimime.encode(''), '')
+        eq(quoprimime.encode('hello'), 'hello')
+        # Test the binary flag
+        eq(quoprimime.encode('hello\r\nworld'), 'hello\nworld')
+        eq(quoprimime.encode('hello\r\nworld', 0), 'hello\nworld')
+        # Test the maxlinelen arg
+        eq(quoprimime.encode('xxxx ' * 20, maxlinelen=40), """\
+xxxx xxxx xxxx xxxx xxxx xxxx xxxx xxxx=
+ xxxx xxxx xxxx xxxx xxxx xxxx xxxx xxx=
+x xxxx xxxx xxxx xxxx=20""")
+        # Test the eol argument
+        eq(quoprimime.encode('xxxx ' * 20, maxlinelen=40, eol='\r\n'), """\
+xxxx xxxx xxxx xxxx xxxx xxxx xxxx xxxx=\r
+ xxxx xxxx xxxx xxxx xxxx xxxx xxxx xxx=\r
+x xxxx xxxx xxxx xxxx=20""")
+        eq(quoprimime.encode("""\
+one line
+
+two line"""), """\
+one line
+
+two line""")
+
+
+
+# Test the Charset class
+class TestCharset(unittest.TestCase):
+    def tearDown(self):
+        from email import charset as CharsetModule
+        try:
+            del CharsetModule.CHARSETS['fake']
+        except KeyError:
+            pass
+
+    def test_idempotent(self):
+        eq = self.assertEqual
+        # Make sure us-ascii = no Unicode conversion
+        c = Charset('us-ascii')
+        s = 'Hello World!'
+        sp = c.to_splittable(s)
+        eq(s, c.from_splittable(sp))
+        # test 8-bit idempotency with us-ascii
+        s = '\xa4\xa2\xa4\xa4\xa4\xa6\xa4\xa8\xa4\xaa'
+        sp = c.to_splittable(s)
+        eq(s, c.from_splittable(sp))
+
+    def test_body_encode(self):
+        eq = self.assertEqual
+        # Try a charset with QP body encoding
+        c = Charset('iso-8859-1')
+        eq('hello w=F6rld', c.body_encode('hello w\xf6rld'))
+        # Try a charset with Base64 body encoding
+        c = Charset('utf-8')
+        eq('aGVsbG8gd29ybGQ=\n', c.body_encode('hello world'))
+        # Try a charset with None body encoding
+        c = Charset('us-ascii')
+        eq('hello world', c.body_encode('hello world'))
+        # Try the convert argument, where input codec <> output codec
+        c = Charset('euc-jp')
+        # With apologies to Tokio Kikuchi ;)
+        try:
+            eq('\x1b$B5FCO;~IW\x1b(B',
+               c.body_encode('\xb5\xc6\xc3\xcf\xbb\xfe\xc9\xd7'))
+            eq('\xb5\xc6\xc3\xcf\xbb\xfe\xc9\xd7',
+               c.body_encode('\xb5\xc6\xc3\xcf\xbb\xfe\xc9\xd7', False))
+        except LookupError:
+            # We probably don't have the Japanese codecs installed
+            pass
+        # Testing SF bug #625509, which we have to fake, since there are no
+        # built-in encodings where the header encoding is QP but the body
+        # encoding is not.
+        from email import charset as CharsetModule
+        CharsetModule.add_charset('fake', CharsetModule.QP, None)
+        c = Charset('fake')
+        eq('hello w\xf6rld', c.body_encode('hello w\xf6rld'))
+
+    def test_unicode_charset_name(self):
+        charset = Charset(u'us-ascii')
+        self.assertEqual(str(charset), 'us-ascii')
+        self.assertRaises(errors.CharsetError, Charset, 'asc\xffii')
+
+
+
+# Test multilingual MIME headers.
+class TestHeader(TestEmailBase):
+    def test_simple(self):
+        eq = self.ndiffAssertEqual
+        h = Header('Hello World!')
+        eq(h.encode(), 'Hello World!')
+        h.append(' Goodbye World!')
+        eq(h.encode(), 'Hello World!  Goodbye World!')
+
+    def test_simple_surprise(self):
+        eq = self.ndiffAssertEqual
+        h = Header('Hello World!')
+        eq(h.encode(), 'Hello World!')
+        h.append('Goodbye World!')
+        eq(h.encode(), 'Hello World! Goodbye World!')
+
+    def test_header_needs_no_decoding(self):
+        h = 'no decoding needed'
+        self.assertEqual(decode_header(h), [(h, None)])
+
+    def test_long(self):
+        h = Header("I am the very model of a modern Major-General; I've information vegetable, animal, and mineral; I know the kings of England, and I quote the fights historical from Marathon to Waterloo, in order categorical; I'm very well acquainted, too, with matters mathematical; I understand equations, both the simple and quadratical; about binomial theorem I'm teeming with a lot o' news, with many cheerful facts about the square of the hypotenuse.",
+                   maxlinelen=76)
+        for l in h.encode(splitchars=' ').split('\n '):
+            self.failUnless(len(l) <= 76)
+
+    def test_multilingual(self):
+        eq = self.ndiffAssertEqual
+        g = Charset("iso-8859-1")
+        cz = Charset("iso-8859-2")
+        utf8 = Charset("utf-8")
+        g_head = "Die Mieter treten hier ein werden mit einem Foerderband komfortabel den Korridor entlang, an s\xfcdl\xfcndischen Wandgem\xe4lden vorbei, gegen die rotierenden Klingen bef\xf6rdert. "
+        cz_head = "Finan\xe8ni metropole se hroutily pod tlakem jejich d\xf9vtipu.. "
+        utf8_head = u"\u6b63\u78ba\u306b\u8a00\u3046\u3068\u7ffb\u8a33\u306f\u3055\u308c\u3066\u3044\u307e\u305b\u3093\u3002\u4e00\u90e8\u306f\u30c9\u30a4\u30c4\u8a9e\u3067\u3059\u304c\u3001\u3042\u3068\u306f\u3067\u305f\u3089\u3081\u3067\u3059\u3002\u5b9f\u969b\u306b\u306f\u300cWenn ist das Nunstuck git und Slotermeyer? Ja! Beiherhund das Oder die Flipperwaldt gersput.\u300d\u3068\u8a00\u3063\u3066\u3044\u307e\u3059\u3002".encode("utf-8")
+        h = Header(g_head, g)
+        h.append(cz_head, cz)
+        h.append(utf8_head, utf8)
+        enc = h.encode()
+        eq(enc, """\
+=?iso-8859-1?q?Die_Mieter_treten_hier_ein_werden_mit_einem_Foerderband_ko?=
+ =?iso-8859-1?q?mfortabel_den_Korridor_entlang=2C_an_s=FCdl=FCndischen_Wan?=
+ =?iso-8859-1?q?dgem=E4lden_vorbei=2C_gegen_die_rotierenden_Klingen_bef=F6?=
+ =?iso-8859-1?q?rdert=2E_?= =?iso-8859-2?q?Finan=E8ni_metropole_se_hroutily?=
+ =?iso-8859-2?q?_pod_tlakem_jejich_d=F9vtipu=2E=2E_?= =?utf-8?b?5q2j56K6?=
+ =?utf-8?b?44Gr6KiA44GG44Go57+76Kiz44Gv44GV44KM44Gm44GE44G+44Gb44KT44CC?=
+ =?utf-8?b?5LiA6YOo44Gv44OJ44Kk44OE6Kqe44Gn44GZ44GM44CB44GC44Go44Gv44Gn?=
+ =?utf-8?b?44Gf44KJ44KB44Gn44GZ44CC5a6f6Zqb44Gr44Gv44CMV2VubiBpc3QgZGFz?=
+ =?utf-8?q?_Nunstuck_git_und_Slotermeyer=3F_Ja!_Beiherhund_das_Oder_die_Fl?=
+ =?utf-8?b?aXBwZXJ3YWxkdCBnZXJzcHV0LuOAjeOBqOiogOOBo+OBpuOBhOOBvuOBmQ==?=
+ =?utf-8?b?44CC?=""")
+        eq(decode_header(enc),
+           [(g_head, "iso-8859-1"), (cz_head, "iso-8859-2"),
+            (utf8_head, "utf-8")])
+        ustr = unicode(h)
+        eq(ustr.encode('utf-8'),
+           'Die Mieter treten hier ein werden mit einem Foerderband '
+           'komfortabel den Korridor entlang, an s\xc3\xbcdl\xc3\xbcndischen '
+           'Wandgem\xc3\xa4lden vorbei, gegen die rotierenden Klingen '
+           'bef\xc3\xb6rdert. Finan\xc4\x8dni metropole se hroutily pod '
+           'tlakem jejich d\xc5\xafvtipu.. \xe6\xad\xa3\xe7\xa2\xba\xe3\x81'
+           '\xab\xe8\xa8\x80\xe3\x81\x86\xe3\x81\xa8\xe7\xbf\xbb\xe8\xa8\xb3'
+           '\xe3\x81\xaf\xe3\x81\x95\xe3\x82\x8c\xe3\x81\xa6\xe3\x81\x84\xe3'
+           '\x81\xbe\xe3\x81\x9b\xe3\x82\x93\xe3\x80\x82\xe4\xb8\x80\xe9\x83'
+           '\xa8\xe3\x81\xaf\xe3\x83\x89\xe3\x82\xa4\xe3\x83\x84\xe8\xaa\x9e'
+           '\xe3\x81\xa7\xe3\x81\x99\xe3\x81\x8c\xe3\x80\x81\xe3\x81\x82\xe3'
+           '\x81\xa8\xe3\x81\xaf\xe3\x81\xa7\xe3\x81\x9f\xe3\x82\x89\xe3\x82'
+           '\x81\xe3\x81\xa7\xe3\x81\x99\xe3\x80\x82\xe5\xae\x9f\xe9\x9a\x9b'
+           '\xe3\x81\xab\xe3\x81\xaf\xe3\x80\x8cWenn ist das Nunstuck git '
+           'und Slotermeyer? Ja! Beiherhund das Oder die Flipperwaldt '
+           'gersput.\xe3\x80\x8d\xe3\x81\xa8\xe8\xa8\x80\xe3\x81\xa3\xe3\x81'
+           '\xa6\xe3\x81\x84\xe3\x81\xbe\xe3\x81\x99\xe3\x80\x82')
+        # Test make_header()
+        newh = make_header(decode_header(enc))
+        eq(newh, enc)
+
+    def test_header_ctor_default_args(self):
+        eq = self.ndiffAssertEqual
+        h = Header()
+        eq(h, '')
+        h.append('foo', Charset('iso-8859-1'))
+        eq(h, '=?iso-8859-1?q?foo?=')
+
+    def test_explicit_maxlinelen(self):
+        eq = self.ndiffAssertEqual
+        hstr = 'A very long line that must get split to something other than at the 76th character boundary to test the non-default behavior'
+        h = Header(hstr)
+        eq(h.encode(), '''\
+A very long line that must get split to something other than at the 76th
+ character boundary to test the non-default behavior''')
+        h = Header(hstr, header_name='Subject')
+        eq(h.encode(), '''\
+A very long line that must get split to something other than at the
+ 76th character boundary to test the non-default behavior''')
+        h = Header(hstr, maxlinelen=1024, header_name='Subject')
+        eq(h.encode(), hstr)
+
+    def test_us_ascii_header(self):
+        eq = self.assertEqual
+        s = 'hello'
+        x = decode_header(s)
+        eq(x, [('hello', None)])
+        h = make_header(x)
+        eq(s, h.encode())
+
+    def test_string_charset(self):
+        eq = self.assertEqual
+        h = Header()
+        h.append('hello', 'iso-8859-1')
+        eq(h, '=?iso-8859-1?q?hello?=')
+
+##    def test_unicode_error(self):
+##        raises = self.assertRaises
+##        raises(UnicodeError, Header, u'[P\xf6stal]', 'us-ascii')
+##        raises(UnicodeError, Header, '[P\xf6stal]', 'us-ascii')
+##        h = Header()
+##        raises(UnicodeError, h.append, u'[P\xf6stal]', 'us-ascii')
+##        raises(UnicodeError, h.append, '[P\xf6stal]', 'us-ascii')
+##        raises(UnicodeError, Header, u'\u83ca\u5730\u6642\u592b', 'iso-8859-1')
+
+    def test_utf8_shortest(self):
+        eq = self.assertEqual
+        h = Header(u'p\xf6stal', 'utf-8')
+        eq(h.encode(), '=?utf-8?q?p=C3=B6stal?=')
+        h = Header(u'\u83ca\u5730\u6642\u592b', 'utf-8')
+        eq(h.encode(), '=?utf-8?b?6I+K5Zyw5pmC5aSr?=')
+
+    def test_bad_8bit_header(self):
+        raises = self.assertRaises
+        eq = self.assertEqual
+        x = 'Ynwp4dUEbay Auction Semiar- No Charge \x96 Earn Big'
+        raises(UnicodeError, Header, x)
+        h = Header()
+        raises(UnicodeError, h.append, x)
+        eq(str(Header(x, errors='replace')), x)
+        h.append(x, errors='replace')
+        eq(str(h), x)
+
+    def test_encoded_adjacent_nonencoded(self):
+        eq = self.assertEqual
+        h = Header()
+        h.append('hello', 'iso-8859-1')
+        h.append('world')
+        s = h.encode()
+        eq(s, '=?iso-8859-1?q?hello?= world')
+        h = make_header(decode_header(s))
+        eq(h.encode(), s)
+
+    def test_whitespace_eater(self):
+        eq = self.assertEqual
+        s = 'Subject: =?koi8-r?b?8NLP18XSy8EgzsEgxsnOwczYztk=?= =?koi8-r?q?=CA?= zz.'
+        parts = decode_header(s)
+        eq(parts, [('Subject:', None), ('\xf0\xd2\xcf\xd7\xc5\xd2\xcb\xc1 \xce\xc1 \xc6\xc9\xce\xc1\xcc\xd8\xce\xd9\xca', 'koi8-r'), ('zz.', None)])
+        hdr = make_header(parts)
+        eq(hdr.encode(),
+           'Subject: =?koi8-r?b?8NLP18XSy8EgzsEgxsnOwczYztnK?= zz.')
+
+    def test_broken_base64_header(self):
+        raises = self.assertRaises
+        s = 'Subject: =?EUC-KR?B?CSixpLDtKSC/7Liuvsax4iC6uLmwMcijIKHaILzSwd/H0SC8+LCjwLsgv7W/+Mj3IQ?='
+        raises(errors.HeaderParseError, decode_header, s)
+
+
+
+# Test RFC 2231 header parameters (en/de)coding
+class TestRFC2231(TestEmailBase):
+    def test_get_param(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_29.txt')
+        eq(msg.get_param('title'),
+           ('us-ascii', 'en', 'This is even more ***fun*** isn\'t it!'))
+        eq(msg.get_param('title', unquote=False),
+           ('us-ascii', 'en', '"This is even more ***fun*** isn\'t it!"'))
+
+    def test_set_param(self):
+        eq = self.assertEqual
+        msg = Message()
+        msg.set_param('title', 'This is even more ***fun*** isn\'t it!',
+                      charset='us-ascii')
+        eq(msg.get_param('title'),
+           ('us-ascii', '', 'This is even more ***fun*** isn\'t it!'))
+        msg.set_param('title', 'This is even more ***fun*** isn\'t it!',
+                      charset='us-ascii', language='en')
+        eq(msg.get_param('title'),
+           ('us-ascii', 'en', 'This is even more ***fun*** isn\'t it!'))
+        msg = self._msgobj('msg_01.txt')
+        msg.set_param('title', 'This is even more ***fun*** isn\'t it!',
+                      charset='us-ascii', language='en')
+        eq(msg.as_string(), """\
+Return-Path: <bbb@zzz.org>
+Delivered-To: bbb@zzz.org
+Received: by mail.zzz.org (Postfix, from userid 889)
+\tid 27CEAD38CC; Fri,  4 May 2001 14:05:44 -0400 (EDT)
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+Message-ID: <15090.61304.110929.45684@aaa.zzz.org>
+From: bbb@ddd.com (John X. Doe)
+To: bbb@zzz.org
+Subject: This is a test message
+Date: Fri, 4 May 2001 14:05:44 -0400
+Content-Type: text/plain; charset=us-ascii;
+\ttitle*="us-ascii'en'This%20is%20even%20more%20%2A%2A%2Afun%2A%2A%2A%20isn%27t%20it%21"
+
+
+Hi,
+
+Do you like this message?
+
+-Me
+""")
+
+    def test_del_param(self):
+        eq = self.ndiffAssertEqual
+        msg = self._msgobj('msg_01.txt')
+        msg.set_param('foo', 'bar', charset='us-ascii', language='en')
+        msg.set_param('title', 'This is even more ***fun*** isn\'t it!',
+            charset='us-ascii', language='en')
+        msg.del_param('foo', header='Content-Type')
+        eq(msg.as_string(), """\
+Return-Path: <bbb@zzz.org>
+Delivered-To: bbb@zzz.org
+Received: by mail.zzz.org (Postfix, from userid 889)
+\tid 27CEAD38CC; Fri,  4 May 2001 14:05:44 -0400 (EDT)
+MIME-Version: 1.0
+Content-Transfer-Encoding: 7bit
+Message-ID: <15090.61304.110929.45684@aaa.zzz.org>
+From: bbb@ddd.com (John X. Doe)
+To: bbb@zzz.org
+Subject: This is a test message
+Date: Fri, 4 May 2001 14:05:44 -0400
+Content-Type: text/plain; charset="us-ascii";
+\ttitle*="us-ascii'en'This%20is%20even%20more%20%2A%2A%2Afun%2A%2A%2A%20isn%27t%20it%21"
+
+
+Hi,
+
+Do you like this message?
+
+-Me
+""")
+
+    def test_rfc2231_get_content_charset(self):
+        eq = self.assertEqual
+        msg = self._msgobj('msg_32.txt')
+        eq(msg.get_content_charset(), 'us-ascii')
+
+    def test_rfc2231_no_language_or_charset(self):
+        m = '''\
+Content-Transfer-Encoding: 8bit
+Content-Disposition: inline; filename="file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEMP_nsmail.htm"
+Content-Type: text/html; NAME*0=file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEM; NAME*1=P_nsmail.htm
+
+'''
+        msg = email.message_from_string(m)
+        self.assertEqual(msg.get_param('NAME'),
+                         (None, None, 'file____C__DOCUMENTS_20AND_20SETTINGS_FABIEN_LOCAL_20SETTINGS_TEMP_nsmail.htm'))
+
+    def test_rfc2231_no_language_or_charset_in_filename(self):
+        m = '''\
+Content-Disposition: inline;
+\tfilename*0="This%20is%20even%20more%20";
+\tfilename*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tfilename*2="is it not.pdf"
+
+'''
+        msg = email.message_from_string(m)
+        self.assertEqual(msg.get_filename(),
+                         'This is even more ***fun*** is it not.pdf')
+
+    def test_rfc2231_no_language_or_charset_in_boundary(self):
+        m = '''\
+Content-Type: multipart/alternative;
+\tboundary*0="This%20is%20even%20more%20";
+\tboundary*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tboundary*2="is it not.pdf"
+
+'''
+        msg = email.message_from_string(m)
+        self.assertEqual(msg.get_boundary(),
+                         'This is even more ***fun*** is it not.pdf')
+
+    def test_rfc2231_no_language_or_charset_in_charset(self):
+        # This is a nonsensical charset value, but tests the code anyway
+        m = '''\
+Content-Type: text/plain;
+\tcharset*0="This%20is%20even%20more%20";
+\tcharset*1="%2A%2A%2Afun%2A%2A%2A%20";
+\tcharset*2="is it not.pdf"
+
+'''
+        msg = email.message_from_string(m)
+        self.assertEqual(msg.get_content_charset(),
+                         'this is even more ***fun*** is it not.pdf')
+
+    def test_rfc2231_unknown_encoding(self):
+        m = """\
+Content-Transfer-Encoding: 8bit
+Content-Disposition: inline; filename*0=X-UNKNOWN''myfile.txt
+
+"""
+        msg = email.message_from_string(m)
+        self.assertEqual(msg.get_filename(), 'myfile.txt')
+
+
+
+def _testclasses():
+    mod = sys.modules[__name__]
+    return [getattr(mod, name) for name in dir(mod) if name.startswith('Test')]
+
+
+def suite():
+    suite = unittest.TestSuite()
+    for testclass in _testclasses():
+        suite.addTest(unittest.makeSuite(testclass))
+    return suite
+
+
+def test_main():
+    for testclass in _testclasses():
+        run_unittest(testclass)
+
+
+
+if __name__ == '__main__':
+    unittest.main(defaultTest='suite')
diff --git a/Lib/email/Utils.py b/Lib/email/utils.py
similarity index 96%
rename from Lib/email/Utils.py
rename to Lib/email/utils.py
index 9ba7601..250eb19 100644
--- a/Lib/email/Utils.py
+++ b/Lib/email/utils.py
@@ -1,9 +1,24 @@
-# Copyright (C) 2001-2004 Python Software Foundation
+# Copyright (C) 2001-2006 Python Software Foundation
 # Author: Barry Warsaw
 # Contact: email-sig@python.org
 
 """Miscellaneous utilities."""
 
+__all__ = [
+    'collapse_rfc2231_value',
+    'decode_params',
+    'decode_rfc2231',
+    'encode_rfc2231',
+    'formataddr',
+    'formatdate',
+    'getaddresses',
+    'make_msgid',
+    'parseaddr',
+    'parsedate',
+    'parsedate_tz',
+    'unquote',
+    ]
+
 import os
 import re
 import time
@@ -24,7 +39,7 @@
 from quopri import decodestring as _qdecode
 
 # Intrapackage imports
-from email.Encoders import _bencode, _qencode
+from email.encoders import _bencode, _qencode
 
 COMMASPACE = ', '
 EMPTYSTRING = ''
diff --git a/Lib/encodings/big5.py b/Lib/encodings/big5.py
index d56aa1b..7adeb0e 100644
--- a/Lib/encodings/big5.py
+++ b/Lib/encodings/big5.py
@@ -2,10 +2,10 @@
 # big5.py: Python Unicode Codec for BIG5
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: big5.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_tw, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_tw.getcodec('big5')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='big5',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/big5hkscs.py b/Lib/encodings/big5hkscs.py
index 443997f..350df37 100644
--- a/Lib/encodings/big5hkscs.py
+++ b/Lib/encodings/big5hkscs.py
@@ -2,10 +2,10 @@
 # big5hkscs.py: Python Unicode Codec for BIG5HKSCS
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: big5hkscs.py,v 1.1 2004/06/29 05:14:27 perky Exp $
 #
 
 import _codecs_hk, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_hk.getcodec('big5hkscs')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='big5hkscs',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/cp932.py b/Lib/encodings/cp932.py
index 38937f5..e01f59b 100644
--- a/Lib/encodings/cp932.py
+++ b/Lib/encodings/cp932.py
@@ -2,10 +2,10 @@
 # cp932.py: Python Unicode Codec for CP932
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: cp932.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_jp, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_jp.getcodec('cp932')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='cp932',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/cp949.py b/Lib/encodings/cp949.py
index 0f3c847..627c871 100644
--- a/Lib/encodings/cp949.py
+++ b/Lib/encodings/cp949.py
@@ -2,10 +2,10 @@
 # cp949.py: Python Unicode Codec for CP949
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: cp949.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_kr, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_kr.getcodec('cp949')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='cp949',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/cp950.py b/Lib/encodings/cp950.py
index dab3e28..39eec5e 100644
--- a/Lib/encodings/cp950.py
+++ b/Lib/encodings/cp950.py
@@ -2,10 +2,10 @@
 # cp950.py: Python Unicode Codec for CP950
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: cp950.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_tw, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_tw.getcodec('cp950')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='cp950',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/euc_jis_2004.py b/Lib/encodings/euc_jis_2004.py
index 02d55ca..72b87ae 100644
--- a/Lib/encodings/euc_jis_2004.py
+++ b/Lib/encodings/euc_jis_2004.py
@@ -2,10 +2,10 @@
 # euc_jis_2004.py: Python Unicode Codec for EUC_JIS_2004
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: euc_jis_2004.py,v 1.1 2004/07/07 16:18:25 perky Exp $
 #
 
 import _codecs_jp, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_jp.getcodec('euc_jis_2004')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='euc_jis_2004',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/euc_jisx0213.py b/Lib/encodings/euc_jisx0213.py
index 30f173e..cc47d04 100644
--- a/Lib/encodings/euc_jisx0213.py
+++ b/Lib/encodings/euc_jisx0213.py
@@ -2,10 +2,10 @@
 # euc_jisx0213.py: Python Unicode Codec for EUC_JISX0213
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: euc_jisx0213.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_jp, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_jp.getcodec('euc_jisx0213')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='euc_jisx0213',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/euc_jp.py b/Lib/encodings/euc_jp.py
index a3947a3..7bcbe41 100644
--- a/Lib/encodings/euc_jp.py
+++ b/Lib/encodings/euc_jp.py
@@ -2,10 +2,10 @@
 # euc_jp.py: Python Unicode Codec for EUC_JP
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: euc_jp.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_jp, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_jp.getcodec('euc_jp')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='euc_jp',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/euc_kr.py b/Lib/encodings/euc_kr.py
index bbebee8..c1fb126 100644
--- a/Lib/encodings/euc_kr.py
+++ b/Lib/encodings/euc_kr.py
@@ -2,10 +2,10 @@
 # euc_kr.py: Python Unicode Codec for EUC_KR
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: euc_kr.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_kr, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_kr.getcodec('euc_kr')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='euc_kr',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/gb18030.py b/Lib/encodings/gb18030.py
index 7eca319..34fb6c3 100644
--- a/Lib/encodings/gb18030.py
+++ b/Lib/encodings/gb18030.py
@@ -2,10 +2,10 @@
 # gb18030.py: Python Unicode Codec for GB18030
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: gb18030.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_cn, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_cn.getcodec('gb18030')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='gb18030',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/gb2312.py b/Lib/encodings/gb2312.py
index 5130efa..3c3b837 100644
--- a/Lib/encodings/gb2312.py
+++ b/Lib/encodings/gb2312.py
@@ -2,10 +2,10 @@
 # gb2312.py: Python Unicode Codec for GB2312
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: gb2312.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_cn, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_cn.getcodec('gb2312')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='gb2312',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/gbk.py b/Lib/encodings/gbk.py
index 67854bc..1b45db8 100644
--- a/Lib/encodings/gbk.py
+++ b/Lib/encodings/gbk.py
@@ -2,10 +2,10 @@
 # gbk.py: Python Unicode Codec for GBK
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: gbk.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_cn, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_cn.getcodec('gbk')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='gbk',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/hz.py b/Lib/encodings/hz.py
index 3940894..383442a 100644
--- a/Lib/encodings/hz.py
+++ b/Lib/encodings/hz.py
@@ -2,10 +2,10 @@
 # hz.py: Python Unicode Codec for HZ
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: hz.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_cn, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_cn.getcodec('hz')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='hz',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/idna.py b/Lib/encodings/idna.py
index 8bdae32..ea90d67 100644
--- a/Lib/encodings/idna.py
+++ b/Lib/encodings/idna.py
@@ -35,7 +35,7 @@
            stringprep.in_table_c7(c) or \
            stringprep.in_table_c8(c) or \
            stringprep.in_table_c9(c):
-            raise UnicodeError, "Invalid character %s" % repr(c)
+            raise UnicodeError("Invalid character %r" % c)
 
     # Check bidi
     RandAL = map(stringprep.in_table_d1, label)
@@ -48,14 +48,14 @@
             # 2) If a string contains any RandALCat character, the string
             # MUST NOT contain any LCat character.
             if filter(stringprep.in_table_d2, label):
-                raise UnicodeError, "Violation of BIDI requirement 2"
+                raise UnicodeError("Violation of BIDI requirement 2")
 
             # 3) If a string contains any RandALCat character, a
             # RandALCat character MUST be the first character of the
             # string, and a RandALCat character MUST be the last
             # character of the string.
             if not RandAL[0] or not RandAL[-1]:
-                raise UnicodeError, "Violation of BIDI requirement 3"
+                raise UnicodeError("Violation of BIDI requirement 3")
 
     return label
 
@@ -70,7 +70,7 @@
         # Skip to step 8.
         if 0 < len(label) < 64:
             return label
-        raise UnicodeError, "label too long"
+        raise UnicodeError("label empty or too long")
 
     # Step 2: nameprep
     label = nameprep(label)
@@ -85,11 +85,11 @@
         # Skip to step 8.
         if 0 < len(label) < 64:
             return label
-        raise UnicodeError, "label too long"
+        raise UnicodeError("label empty or too long")
 
     # Step 5: Check ACE prefix
     if label.startswith(uace_prefix):
-        raise UnicodeError, "Label starts with ACE prefix"
+        raise UnicodeError("Label starts with ACE prefix")
 
     # Step 6: Encode with PUNYCODE
     label = label.encode("punycode")
@@ -100,7 +100,7 @@
     # Step 8: Check size
     if 0 < len(label) < 64:
         return label
-    raise UnicodeError, "label too long"
+    raise UnicodeError("label empty or too long")
 
 def ToUnicode(label):
     # Step 1: Check for ASCII
@@ -119,7 +119,7 @@
         try:
             label = label.encode("ascii")
         except UnicodeError:
-            raise UnicodeError, "Invalid character in IDN label"
+            raise UnicodeError("Invalid character in IDN label")
     # Step 3: Check for ACE prefix
     if not label.startswith(ace_prefix):
         return unicode(label, "ascii")
@@ -136,7 +136,7 @@
     # Step 7: Compare the result of step 6 with the one of step 3
     # label2 will already be in lower case.
     if label.lower() != label2:
-        raise UnicodeError, ("IDNA does not round-trip", label, label2)
+        raise UnicodeError("IDNA does not round-trip", label, label2)
 
     # Step 8: return the result of step 5
     return result
@@ -148,7 +148,7 @@
 
         if errors != 'strict':
             # IDNA is quite clear that implementations must be strict
-            raise UnicodeError, "unsupported error handling "+errors
+            raise UnicodeError("unsupported error handling "+errors)
 
         if not input:
             return "", 0
@@ -168,7 +168,7 @@
     def decode(self,input,errors='strict'):
 
         if errors != 'strict':
-            raise UnicodeError, "Unsupported error handling "+errors
+            raise UnicodeError("Unsupported error handling "+errors)
 
         if not input:
             return u"", 0
@@ -194,13 +194,79 @@
 
         return u".".join(result)+trailing_dot, len(input)
 
-class IncrementalEncoder(codecs.IncrementalEncoder):
-    def encode(self, input, final=False):
-        return Codec().encode(input, self.errors)[0]
+class IncrementalEncoder(codecs.BufferedIncrementalEncoder):
+    def _buffer_encode(self, input, errors, final):
+        if errors != 'strict':
+            # IDNA is quite clear that implementations must be strict
+            raise UnicodeError("unsupported error handling "+errors)
 
-class IncrementalDecoder(codecs.IncrementalDecoder):
-    def decode(self, input, final=False):
-        return Codec().decode(input, self.errors)[0]
+        if not input:
+            return ("", 0)
+
+        labels = dots.split(input)
+        trailing_dot = u''
+        if labels:
+            if not labels[-1]:
+                trailing_dot = '.'
+                del labels[-1]
+            elif not final:
+                # Keep potentially unfinished label until the next call
+                del labels[-1]
+                if labels:
+                    trailing_dot = '.'
+
+        result = []
+        size = 0
+        for label in labels:
+            result.append(ToASCII(label))
+            if size:
+                size += 1
+            size += len(label)
+
+        # Join with U+002E
+        result = ".".join(result) + trailing_dot
+        size += len(trailing_dot)
+        return (result, size)
+
+class IncrementalDecoder(codecs.BufferedIncrementalDecoder):
+    def _buffer_decode(self, input, errors, final):
+        if errors != 'strict':
+            raise UnicodeError("Unsupported error handling "+errors)
+
+        if not input:
+            return (u"", 0)
+
+        # IDNA allows decoding to operate on Unicode strings, too.
+        if isinstance(input, unicode):
+            labels = dots.split(input)
+        else:
+            # Must be ASCII string
+            input = str(input)
+            unicode(input, "ascii")
+            labels = input.split(".")
+
+        trailing_dot = u''
+        if labels:
+            if not labels[-1]:
+                trailing_dot = u'.'
+                del labels[-1]
+            elif not final:
+                # Keep potentially unfinished label until the next call
+                del labels[-1]
+                if labels:
+                    trailing_dot = u'.'
+
+        result = []
+        size = 0
+        for label in labels:
+            result.append(ToUnicode(label))
+            if size:
+                size += 1
+            size += len(label)
+
+        result = u".".join(result) + trailing_dot
+        size += len(trailing_dot)
+        return (result, size)
 
 class StreamWriter(Codec,codecs.StreamWriter):
     pass
diff --git a/Lib/encodings/iso2022_jp.py b/Lib/encodings/iso2022_jp.py
index 109658b..ab04060 100644
--- a/Lib/encodings/iso2022_jp.py
+++ b/Lib/encodings/iso2022_jp.py
@@ -2,10 +2,10 @@
 # iso2022_jp.py: Python Unicode Codec for ISO2022_JP
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: iso2022_jp.py,v 1.2 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_iso2022, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_iso2022.getcodec('iso2022_jp')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='iso2022_jp',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/iso2022_jp_1.py b/Lib/encodings/iso2022_jp_1.py
index 201bd28..997044d 100644
--- a/Lib/encodings/iso2022_jp_1.py
+++ b/Lib/encodings/iso2022_jp_1.py
@@ -2,10 +2,10 @@
 # iso2022_jp_1.py: Python Unicode Codec for ISO2022_JP_1
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: iso2022_jp_1.py,v 1.2 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_iso2022, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_iso2022.getcodec('iso2022_jp_1')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='iso2022_jp_1',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/iso2022_jp_2.py b/Lib/encodings/iso2022_jp_2.py
index 7a61018..9106bf7 100644
--- a/Lib/encodings/iso2022_jp_2.py
+++ b/Lib/encodings/iso2022_jp_2.py
@@ -2,10 +2,10 @@
 # iso2022_jp_2.py: Python Unicode Codec for ISO2022_JP_2
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: iso2022_jp_2.py,v 1.2 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_iso2022, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_iso2022.getcodec('iso2022_jp_2')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='iso2022_jp_2',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/iso2022_jp_2004.py b/Lib/encodings/iso2022_jp_2004.py
index 2497124..40198bf 100644
--- a/Lib/encodings/iso2022_jp_2004.py
+++ b/Lib/encodings/iso2022_jp_2004.py
@@ -2,10 +2,10 @@
 # iso2022_jp_2004.py: Python Unicode Codec for ISO2022_JP_2004
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: iso2022_jp_2004.py,v 1.1 2004/07/07 16:18:25 perky Exp $
 #
 
 import _codecs_iso2022, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_iso2022.getcodec('iso2022_jp_2004')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='iso2022_jp_2004',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/iso2022_jp_3.py b/Lib/encodings/iso2022_jp_3.py
index 8b2ed00..346e08b 100644
--- a/Lib/encodings/iso2022_jp_3.py
+++ b/Lib/encodings/iso2022_jp_3.py
@@ -2,10 +2,10 @@
 # iso2022_jp_3.py: Python Unicode Codec for ISO2022_JP_3
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: iso2022_jp_3.py,v 1.2 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_iso2022, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_iso2022.getcodec('iso2022_jp_3')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='iso2022_jp_3',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/iso2022_jp_ext.py b/Lib/encodings/iso2022_jp_ext.py
index 97cb4e7..752bab9 100644
--- a/Lib/encodings/iso2022_jp_ext.py
+++ b/Lib/encodings/iso2022_jp_ext.py
@@ -2,10 +2,10 @@
 # iso2022_jp_ext.py: Python Unicode Codec for ISO2022_JP_EXT
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: iso2022_jp_ext.py,v 1.2 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_iso2022, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_iso2022.getcodec('iso2022_jp_ext')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='iso2022_jp_ext',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/iso2022_kr.py b/Lib/encodings/iso2022_kr.py
index f5549ca..bf70187 100644
--- a/Lib/encodings/iso2022_kr.py
+++ b/Lib/encodings/iso2022_kr.py
@@ -2,10 +2,10 @@
 # iso2022_kr.py: Python Unicode Codec for ISO2022_KR
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: iso2022_kr.py,v 1.2 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_iso2022, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_iso2022.getcodec('iso2022_kr')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='iso2022_kr',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/johab.py b/Lib/encodings/johab.py
index b6a87d7..512aeeb 100644
--- a/Lib/encodings/johab.py
+++ b/Lib/encodings/johab.py
@@ -2,10 +2,10 @@
 # johab.py: Python Unicode Codec for JOHAB
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: johab.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_kr, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_kr.getcodec('johab')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='johab',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/shift_jis.py b/Lib/encodings/shift_jis.py
index ec5e517..8338117 100644
--- a/Lib/encodings/shift_jis.py
+++ b/Lib/encodings/shift_jis.py
@@ -2,10 +2,10 @@
 # shift_jis.py: Python Unicode Codec for SHIFT_JIS
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: shift_jis.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_jp, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_jp.getcodec('shift_jis')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='shift_jis',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/shift_jis_2004.py b/Lib/encodings/shift_jis_2004.py
index 446cd7c..161b1e8 100644
--- a/Lib/encodings/shift_jis_2004.py
+++ b/Lib/encodings/shift_jis_2004.py
@@ -2,10 +2,10 @@
 # shift_jis_2004.py: Python Unicode Codec for SHIFT_JIS_2004
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: shift_jis_2004.py,v 1.1 2004/07/07 16:18:25 perky Exp $
 #
 
 import _codecs_jp, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_jp.getcodec('shift_jis_2004')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='shift_jis_2004',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/encodings/shift_jisx0213.py b/Lib/encodings/shift_jisx0213.py
index 495468b..cb653f5 100644
--- a/Lib/encodings/shift_jisx0213.py
+++ b/Lib/encodings/shift_jisx0213.py
@@ -2,10 +2,10 @@
 # shift_jisx0213.py: Python Unicode Codec for SHIFT_JISX0213
 #
 # Written by Hye-Shik Chang <perky@FreeBSD.org>
-# $CJKCodecs: shift_jisx0213.py,v 1.8 2004/06/28 18:16:03 perky Exp $
 #
 
 import _codecs_jp, codecs
+import _multibytecodec as mbc
 
 codec = _codecs_jp.getcodec('shift_jisx0213')
 
@@ -13,22 +13,27 @@
     encode = codec.encode
     decode = codec.decode
 
-class StreamReader(Codec, codecs.StreamReader):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamReader.__init__(self, stream, errors)
-        __codec = codec.StreamReader(stream, errors)
-        self.read = __codec.read
-        self.readline = __codec.readline
-        self.readlines = __codec.readlines
-        self.reset = __codec.reset
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
 
-class StreamWriter(Codec, codecs.StreamWriter):
-    def __init__(self, stream, errors='strict'):
-        codecs.StreamWriter.__init__(self, stream, errors)
-        __codec = codec.StreamWriter(stream, errors)
-        self.write = __codec.write
-        self.writelines = __codec.writelines
-        self.reset = __codec.reset
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
 
 def getregentry():
-    return (codec.encode, codec.decode, StreamReader, StreamWriter)
+    return codecs.CodecInfo(
+        name='shift_jisx0213',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
diff --git a/Lib/getpass.py b/Lib/getpass.py
index a30d3a1..8204a47 100644
--- a/Lib/getpass.py
+++ b/Lib/getpass.py
@@ -15,11 +15,14 @@
 
 __all__ = ["getpass","getuser"]
 
-def unix_getpass(prompt='Password: '):
+def unix_getpass(prompt='Password: ', stream=None):
     """Prompt for a password, with echo turned off.
+    The prompt is written on stream, by default stdout.
 
     Restore terminal settings at end.
     """
+    if stream is None:
+        stream = sys.stdout
 
     try:
         fd = sys.stdin.fileno()
@@ -32,18 +35,18 @@
     new[3] = new[3] & ~termios.ECHO # 3 == 'lflags'
     try:
         termios.tcsetattr(fd, termios.TCSADRAIN, new)
-        passwd = _raw_input(prompt)
+        passwd = _raw_input(prompt, stream)
     finally:
         termios.tcsetattr(fd, termios.TCSADRAIN, old)
 
-    sys.stdout.write('\n')
+    stream.write('\n')
     return passwd
 
 
-def win_getpass(prompt='Password: '):
+def win_getpass(prompt='Password: ', stream=None):
     """Prompt for password with echo off, using Windows getch()."""
     if sys.stdin is not sys.__stdin__:
-        return default_getpass(prompt)
+        return default_getpass(prompt, stream)
     import msvcrt
     for c in prompt:
         msvcrt.putch(c)
@@ -63,16 +66,18 @@
     return pw
 
 
-def default_getpass(prompt='Password: '):
-    print "Warning: Problem with getpass. Passwords may be echoed."
-    return _raw_input(prompt)
+def default_getpass(prompt='Password: ', stream=None):
+    print >>sys.stderr, "Warning: Problem with getpass. Passwords may be echoed."
+    return _raw_input(prompt, stream)
 
 
-def _raw_input(prompt=""):
+def _raw_input(prompt="", stream=None):
     # This doesn't save the string in the GNU readline history.
+    if stream is None:
+        stream = sys.stdout
     prompt = str(prompt)
     if prompt:
-        sys.stdout.write(prompt)
+        stream.write(prompt)
     line = sys.stdin.readline()
     if not line:
         raise EOFError
diff --git a/Lib/glob.py b/Lib/glob.py
index ecc6d25..95656cc 100644
--- a/Lib/glob.py
+++ b/Lib/glob.py
@@ -60,7 +60,7 @@
     if basename == '':
         # `os.path.split()` returns an empty basename for paths ending with a
         # directory separator.  'q*x/' should match only directories.
-        if os.isdir(dirname):
+        if os.path.isdir(dirname):
             return [basename]
     else:
         if os.path.lexists(os.path.join(dirname, basename)):
diff --git a/Lib/idlelib/IOBinding.py b/Lib/idlelib/IOBinding.py
index ce1fd2a..deeb5c5 100644
--- a/Lib/idlelib/IOBinding.py
+++ b/Lib/idlelib/IOBinding.py
@@ -377,6 +377,7 @@
         try:
             f = open(filename, "wb")
             f.write(chars)
+            f.flush()
             f.close()
             return True
         except IOError, msg:
diff --git a/Lib/idlelib/NEWS.txt b/Lib/idlelib/NEWS.txt
index bbe9878..8163330 100644
--- a/Lib/idlelib/NEWS.txt
+++ b/Lib/idlelib/NEWS.txt
@@ -1,7 +1,10 @@
-What's New in IDLE 1.2a0?
-=======================
+What's New in IDLE 1.2a1?
+=========================
 
-*Release date: XX-XXX-2006*
+*Release date: 05-APR-2006*
+
+- Source file f.flush() after writing; trying to avoid lossage if user
+  kills GUI.
 
 - Options / Keys / Advanced dialog made functional.  Also, allow binding
   of 'movement' keys.
@@ -70,7 +73,7 @@
 
 - Improve error handling when .idlerc can't be created (warn and exit).
 
-- The GUI was hanging if the shell window was closed while a raw_input() 
+- The GUI was hanging if the shell window was closed while a raw_input()
   was pending.  Restored the quit() of the readline() mainloop().
   http://mail.python.org/pipermail/idle-dev/2004-December/002307.html
 
diff --git a/Lib/idlelib/idlever.py b/Lib/idlelib/idlever.py
index eef2885..fbde56c 100644
--- a/Lib/idlelib/idlever.py
+++ b/Lib/idlelib/idlever.py
@@ -1 +1 @@
-IDLE_VERSION = "1.2a0"
+IDLE_VERSION = "1.2a1"
diff --git a/Lib/inspect.py b/Lib/inspect.py
index 57bf18c..2e4d987 100644
--- a/Lib/inspect.py
+++ b/Lib/inspect.py
@@ -353,7 +353,7 @@
         if 'b' in mode and string.lower(filename[-len(suffix):]) == suffix:
             # Looks like a binary file.  We want to only return a text file.
             return None
-    if os.path.exists(filename):
+    if os.path.exists(filename) or hasattr(getmodule(object), '__loader__'):
         return filename
 
 def getabsfile(object):
@@ -379,7 +379,7 @@
     if file in modulesbyfile:
         return sys.modules.get(modulesbyfile[file])
     for module in sys.modules.values():
-        if hasattr(module, '__file__'):
+        if ismodule(module) and hasattr(module, '__file__'):
             modulesbyfile[
                 os.path.realpath(
                         getabsfile(module))] = module.__name__
@@ -406,7 +406,7 @@
     in the file and the line number indexes a line in that list.  An IOError
     is raised if the source code cannot be retrieved."""
     file = getsourcefile(object) or getfile(object)
-    lines = linecache.getlines(file)
+    lines = linecache.getlines(file, getmodule(object).__dict__)
     if not lines:
         raise IOError('could not get source code')
 
diff --git a/Lib/lib-old/Para.py b/Lib/lib-old/Para.py
deleted file mode 100644
index 2fd8dc6..0000000
--- a/Lib/lib-old/Para.py
+++ /dev/null
@@ -1,343 +0,0 @@
-# Text formatting abstractions
-# Note -- this module is obsolete, it's too slow anyway
-
-
-# Oft-used type object
-Int = type(0)
-
-
-# Represent a paragraph.  This is a list of words with associated
-# font and size information, plus indents and justification for the
-# entire paragraph.
-# Once the words have been added to a paragraph, it can be laid out
-# for different line widths.  Once laid out, it can be rendered at
-# different screen locations.  Once rendered, it can be queried
-# for mouse hits, and parts of the text can be highlighted
-class Para:
-    #
-    def __init__(self):
-        self.words = [] # The words
-        self.just = 'l' # Justification: 'l', 'r', 'lr' or 'c'
-        self.indent_left = self.indent_right = self.indent_hang = 0
-        # Final lay-out parameters, may change
-        self.left = self.top = self.right = self.bottom = \
-                self.width = self.height = self.lines = None
-    #
-    # Add a word, computing size information for it.
-    # Words may also be added manually by appending to self.words
-    # Each word should be a 7-tuple:
-    # (font, text, width, space, stretch, ascent, descent)
-    def addword(self, d, font, text, space, stretch):
-        if font is not None:
-            d.setfont(font)
-        width = d.textwidth(text)
-        ascent = d.baseline()
-        descent = d.lineheight() - ascent
-        spw = d.textwidth(' ')
-        space = space * spw
-        stretch = stretch * spw
-        tuple = (font, text, width, space, stretch, ascent, descent)
-        self.words.append(tuple)
-    #
-    # Hooks to begin and end anchors -- insert numbers in the word list!
-    def bgn_anchor(self, id):
-        self.words.append(id)
-    #
-    def end_anchor(self, id):
-        self.words.append(0)
-    #
-    # Return the total length (width) of the text added so far, in pixels
-    def getlength(self):
-        total = 0
-        for word in self.words:
-            if type(word) is not Int:
-                total = total + word[2] + word[3]
-        return total
-    #
-    # Tab to a given position (relative to the current left indent):
-    # remove all stretch, add fixed space up to the new indent.
-    # If the current position is already at the tab stop,
-    # don't add any new space (but still remove the stretch)
-    def tabto(self, tab):
-        total = 0
-        as, de = 1, 0
-        for i in range(len(self.words)):
-            word = self.words[i]
-            if type(word) is Int: continue
-            (fo, te, wi, sp, st, as, de) = word
-            self.words[i] = (fo, te, wi, sp, 0, as, de)
-            total = total + wi + sp
-        if total < tab:
-            self.words.append((None, '', 0, tab-total, 0, as, de))
-    #
-    # Make a hanging tag: tab to hang, increment indent_left by hang,
-    # and reset indent_hang to -hang
-    def makehangingtag(self, hang):
-        self.tabto(hang)
-        self.indent_left = self.indent_left + hang
-        self.indent_hang = -hang
-    #
-    # Decide where the line breaks will be given some screen width
-    def layout(self, linewidth):
-        self.width = linewidth
-        height = 0
-        self.lines = lines = []
-        avail1 = self.width - self.indent_left - self.indent_right
-        avail = avail1 - self.indent_hang
-        words = self.words
-        i = 0
-        n = len(words)
-        lastfont = None
-        while i < n:
-            firstfont = lastfont
-            charcount = 0
-            width = 0
-            stretch = 0
-            ascent = 0
-            descent = 0
-            lsp = 0
-            j = i
-            while i < n:
-                word = words[i]
-                if type(word) is Int:
-                    if word > 0 and width >= avail:
-                        break
-                    i = i+1
-                    continue
-                fo, te, wi, sp, st, as, de = word
-                if width + wi > avail and width > 0 and wi > 0:
-                    break
-                if fo is not None:
-                    lastfont = fo
-                    if width == 0:
-                        firstfont = fo
-                charcount = charcount + len(te) + (sp > 0)
-                width = width + wi + sp
-                lsp = sp
-                stretch = stretch + st
-                lst = st
-                ascent = max(ascent, as)
-                descent = max(descent, de)
-                i = i+1
-            while i > j and type(words[i-1]) is Int and \
-                    words[i-1] > 0: i = i-1
-            width = width - lsp
-            if i < n:
-                stretch = stretch - lst
-            else:
-                stretch = 0
-            tuple = i-j, firstfont, charcount, width, stretch, \
-                    ascent, descent
-            lines.append(tuple)
-            height = height + ascent + descent
-            avail = avail1
-        self.height = height
-    #
-    # Call a function for all words in a line
-    def visit(self, wordfunc, anchorfunc):
-        avail1 = self.width - self.indent_left - self.indent_right
-        avail = avail1 - self.indent_hang
-        v = self.top
-        i = 0
-        for tuple in self.lines:
-            wordcount, firstfont, charcount, width, stretch, \
-                    ascent, descent = tuple
-            h = self.left + self.indent_left
-            if i == 0: h = h + self.indent_hang
-            extra = 0
-            if self.just == 'r': h = h + avail - width
-            elif self.just == 'c': h = h + (avail - width) / 2
-            elif self.just == 'lr' and stretch > 0:
-                extra = avail - width
-            v2 = v + ascent + descent
-            for j in range(i, i+wordcount):
-                word = self.words[j]
-                if type(word) is Int:
-                    ok = anchorfunc(self, tuple, word, \
-                                    h, v)
-                    if ok is not None: return ok
-                    continue
-                fo, te, wi, sp, st, as, de = word
-                if extra > 0 and stretch > 0:
-                    ex = extra * st / stretch
-                    extra = extra - ex
-                    stretch = stretch - st
-                else:
-                    ex = 0
-                h2 = h + wi + sp + ex
-                ok = wordfunc(self, tuple, word, h, v, \
-                        h2, v2, (j==i), (j==i+wordcount-1))
-                if ok is not None: return ok
-                h = h2
-            v = v2
-            i = i + wordcount
-            avail = avail1
-    #
-    # Render a paragraph in "drawing object" d, using the rectangle
-    # given by (left, top, right) with an unspecified bottom.
-    # Return the computed bottom of the text.
-    def render(self, d, left, top, right):
-        if self.width != right-left:
-            self.layout(right-left)
-        self.left = left
-        self.top = top
-        self.right = right
-        self.bottom = self.top + self.height
-        self.anchorid = 0
-        try:
-            self.d = d
-            self.visit(self.__class__._renderword, \
-                       self.__class__._renderanchor)
-        finally:
-            self.d = None
-        return self.bottom
-    #
-    def _renderword(self, tuple, word, h, v, h2, v2, isfirst, islast):
-        if word[0] is not None: self.d.setfont(word[0])
-        baseline = v + tuple[5]
-        self.d.text((h, baseline - word[5]), word[1])
-        if self.anchorid > 0:
-            self.d.line((h, baseline+2), (h2, baseline+2))
-    #
-    def _renderanchor(self, tuple, word, h, v):
-        self.anchorid = word
-    #
-    # Return which anchor(s) was hit by the mouse
-    def hitcheck(self, mouseh, mousev):
-        self.mouseh = mouseh
-        self.mousev = mousev
-        self.anchorid = 0
-        self.hits = []
-        self.visit(self.__class__._hitcheckword, \
-                   self.__class__._hitcheckanchor)
-        return self.hits
-    #
-    def _hitcheckword(self, tuple, word, h, v, h2, v2, isfirst, islast):
-        if self.anchorid > 0 and h <= self.mouseh <= h2 and \
-                v <= self.mousev <= v2:
-            self.hits.append(self.anchorid)
-    #
-    def _hitcheckanchor(self, tuple, word, h, v):
-        self.anchorid = word
-    #
-    # Return whether the given anchor id is present
-    def hasanchor(self, id):
-        return id in self.words or -id in self.words
-    #
-    # Extract the raw text from the word list, substituting one space
-    # for non-empty inter-word space, and terminating with '\n'
-    def extract(self):
-        text = ''
-        for w in self.words:
-            if type(w) is not Int:
-                word = w[1]
-                if w[3]: word = word + ' '
-                text = text + word
-        return text + '\n'
-    #
-    # Return which character position was hit by the mouse, as
-    # an offset in the entire text as returned by extract().
-    # Return None if the mouse was not in this paragraph
-    def whereis(self, d, mouseh, mousev):
-        if mousev < self.top or mousev > self.bottom:
-            return None
-        self.mouseh = mouseh
-        self.mousev = mousev
-        self.lastfont = None
-        self.charcount = 0
-        try:
-            self.d = d
-            return self.visit(self.__class__._whereisword, \
-                              self.__class__._whereisanchor)
-        finally:
-            self.d = None
-    #
-    def _whereisword(self, tuple, word, h1, v1, h2, v2, isfirst, islast):
-        fo, te, wi, sp, st, as, de = word
-        if fo is not None: self.lastfont = fo
-        h = h1
-        if isfirst: h1 = 0
-        if islast: h2 = 999999
-        if not (v1 <= self.mousev <= v2 and h1 <= self.mouseh <= h2):
-            self.charcount = self.charcount + len(te) + (sp > 0)
-            return
-        if self.lastfont is not None:
-            self.d.setfont(self.lastfont)
-        cc = 0
-        for c in te:
-            cw = self.d.textwidth(c)
-            if self.mouseh <= h + cw/2:
-                return self.charcount + cc
-            cc = cc+1
-            h = h+cw
-        self.charcount = self.charcount + cc
-        if self.mouseh <= (h+h2) / 2:
-            return self.charcount
-        else:
-            return self.charcount + 1
-    #
-    def _whereisanchor(self, tuple, word, h, v):
-        pass
-    #
-    # Return screen position corresponding to position in paragraph.
-    # Return tuple (h, vtop, vbaseline, vbottom).
-    # This is more or less the inverse of whereis()
-    def screenpos(self, d, pos):
-        if pos < 0:
-            ascent, descent = self.lines[0][5:7]
-            return self.left, self.top, self.top + ascent, \
-                    self.top + ascent + descent
-        self.pos = pos
-        self.lastfont = None
-        try:
-            self.d = d
-            ok = self.visit(self.__class__._screenposword, \
-                            self.__class__._screenposanchor)
-        finally:
-            self.d = None
-        if ok is None:
-            ascent, descent = self.lines[-1][5:7]
-            ok = self.right, self.bottom - ascent - descent, \
-                    self.bottom - descent, self.bottom
-        return ok
-    #
-    def _screenposword(self, tuple, word, h1, v1, h2, v2, isfirst, islast):
-        fo, te, wi, sp, st, as, de = word
-        if fo is not None: self.lastfont = fo
-        cc = len(te) + (sp > 0)
-        if self.pos > cc:
-            self.pos = self.pos - cc
-            return
-        if self.pos < cc:
-            self.d.setfont(self.lastfont)
-            h = h1 + self.d.textwidth(te[:self.pos])
-        else:
-            h = h2
-        ascent, descent = tuple[5:7]
-        return h, v1, v1+ascent, v2
-    #
-    def _screenposanchor(self, tuple, word, h, v):
-        pass
-    #
-    # Invert the stretch of text between pos1 and pos2.
-    # If pos1 is None, the beginning is implied;
-    # if pos2 is None, the end is implied.
-    # Undoes its own effect when called again with the same arguments
-    def invert(self, d, pos1, pos2):
-        if pos1 is None:
-            pos1 = self.left, self.top, self.top, self.top
-        else:
-            pos1 = self.screenpos(d, pos1)
-        if pos2 is None:
-            pos2 = self.right, self.bottom,self.bottom,self.bottom
-        else:
-            pos2 = self.screenpos(d, pos2)
-        h1, top1, baseline1, bottom1 = pos1
-        h2, top2, baseline2, bottom2 = pos2
-        if bottom1 <= top2:
-            d.invert((h1, top1), (self.right, bottom1))
-            h1 = self.left
-            if bottom1 < top2:
-                d.invert((h1, bottom1), (self.right, top2))
-            top1, bottom1 = top2, bottom2
-        d.invert((h1, top1), (h2, bottom2))
diff --git a/Lib/lib-old/addpack.py b/Lib/lib-old/addpack.py
deleted file mode 100644
index 2fb2601..0000000
--- a/Lib/lib-old/addpack.py
+++ /dev/null
@@ -1,67 +0,0 @@
-# This module provides standard support for "packages".
-#
-# The idea is that large groups of related modules can be placed in
-# their own subdirectory, which can be added to the Python search path
-# in a relatively easy way.
-#
-# The current version takes a package name and searches the Python
-# search path for a directory by that name, and if found adds it to
-# the module search path (sys.path).  It maintains a list of packages
-# that have already been added so adding the same package many times
-# is OK.
-#
-# It is intended to be used in a fairly stylized manner: each module
-# that wants to use a particular package, say 'Foo', is supposed to
-# contain the following code:
-#
-#   from addpack import addpack
-#   addpack('Foo')
-#   <import modules from package Foo>
-#
-# Additional arguments, when present, provide additional places where
-# to look for the package before trying sys.path (these may be either
-# strings or lists/tuples of strings).  Also, if the package name is a
-# full pathname, first the last component is tried in the usual way,
-# then the full pathname is tried last.  If the package name is a
-# *relative* pathname (UNIX: contains a slash but doesn't start with
-# one), then nothing special is done.  The packages "/foo/bar/bletch"
-# and "bletch" are considered the same, but unrelated to "bar/bletch".
-#
-# If the algorithm finds more than one suitable subdirectory, all are
-# added to the search path -- this makes it possible to override part
-# of a package.  The same path will not be added more than once.
-#
-# If no directory is found, ImportError is raised.
-
-_packs = {}                             # {pack: [pathname, ...], ...}
-
-def addpack(pack, *locations):
-    import os
-    if os.path.isabs(pack):
-        base = os.path.basename(pack)
-    else:
-        base = pack
-    if _packs.has_key(base):
-        return
-    import sys
-    path = []
-    for loc in _flatten(locations) + sys.path:
-        fn = os.path.join(loc, base)
-        if fn not in path and os.path.isdir(fn):
-            path.append(fn)
-    if pack != base and pack not in path and os.path.isdir(pack):
-        path.append(pack)
-    if not path: raise ImportError, 'package ' + pack + ' not found'
-    _packs[base] = path
-    for fn in path:
-        if fn not in sys.path:
-            sys.path.append(fn)
-
-def _flatten(locations):
-    locs = []
-    for loc in locations:
-        if type(loc) == type(''):
-            locs.append(loc)
-        else:
-            locs = locs + _flatten(loc)
-    return locs
diff --git a/Lib/lib-old/cmp.py b/Lib/lib-old/cmp.py
deleted file mode 100644
index 1146a25..0000000
--- a/Lib/lib-old/cmp.py
+++ /dev/null
@@ -1,63 +0,0 @@
-"""Efficiently compare files, boolean outcome only (equal / not equal).
-
-Tricks (used in this order):
-    - Files with identical type, size & mtime are assumed to be clones
-    - Files with different type or size cannot be identical
-    - We keep a cache of outcomes of earlier comparisons
-    - We don't fork a process to run 'cmp' but read the files ourselves
-"""
-
-import os
-
-cache = {}
-
-def cmp(f1, f2, shallow=1):
-    """Compare two files, use the cache if possible.
-    Return 1 for identical files, 0 for different.
-    Raise exceptions if either file could not be statted, read, etc."""
-    s1, s2 = sig(os.stat(f1)), sig(os.stat(f2))
-    if s1[0] != 8 or s2[0] != 8:
-        # Either is a not a plain file -- always report as different
-        return 0
-    if shallow and s1 == s2:
-        # type, size & mtime match -- report same
-        return 1
-    if s1[:2] != s2[:2]: # Types or sizes differ, don't bother
-        # types or sizes differ -- report different
-        return 0
-    # same type and size -- look in the cache
-    key = (f1, f2)
-    try:
-        cs1, cs2, outcome = cache[key]
-        # cache hit
-        if s1 == cs1 and s2 == cs2:
-            # cached signatures match
-            return outcome
-        # stale cached signature(s)
-    except KeyError:
-        # cache miss
-        pass
-    # really compare
-    outcome = do_cmp(f1, f2)
-    cache[key] = s1, s2, outcome
-    return outcome
-
-def sig(st):
-    """Return signature (i.e., type, size, mtime) from raw stat data
-    0-5: st_mode, st_ino, st_dev, st_nlink, st_uid, st_gid
-    6-9: st_size, st_atime, st_mtime, st_ctime"""
-    type = st[0] / 4096
-    size = st[6]
-    mtime = st[8]
-    return type, size, mtime
-
-def do_cmp(f1, f2):
-    """Compare two files, really."""
-    bufsize = 8*1024 # Could be tuned
-    fp1 = open(f1, 'rb')
-    fp2 = open(f2, 'rb')
-    while 1:
-        b1 = fp1.read(bufsize)
-        b2 = fp2.read(bufsize)
-        if b1 != b2: return 0
-        if not b1: return 1
diff --git a/Lib/lib-old/cmpcache.py b/Lib/lib-old/cmpcache.py
deleted file mode 100644
index 11540f8..0000000
--- a/Lib/lib-old/cmpcache.py
+++ /dev/null
@@ -1,64 +0,0 @@
-"""Efficiently compare files, boolean outcome only (equal / not equal).
-
-Tricks (used in this order):
-    - Use the statcache module to avoid statting files more than once
-    - Files with identical type, size & mtime are assumed to be clones
-    - Files with different type or size cannot be identical
-    - We keep a cache of outcomes of earlier comparisons
-    - We don't fork a process to run 'cmp' but read the files ourselves
-"""
-
-import os
-from stat import *
-import statcache
-
-
-# The cache.
-#
-cache = {}
-
-
-def cmp(f1, f2, shallow=1):
-    """Compare two files, use the cache if possible.
-    May raise os.error if a stat or open of either fails.
-    Return 1 for identical files, 0 for different.
-    Raise exceptions if either file could not be statted, read, etc."""
-    s1, s2 = sig(statcache.stat(f1)), sig(statcache.stat(f2))
-    if not S_ISREG(s1[0]) or not S_ISREG(s2[0]):
-        # Either is a not a plain file -- always report as different
-        return 0
-    if shallow and s1 == s2:
-        # type, size & mtime match -- report same
-        return 1
-    if s1[:2] != s2[:2]: # Types or sizes differ, don't bother
-        # types or sizes differ -- report different
-        return 0
-    # same type and size -- look in the cache
-    key = f1 + ' ' + f2
-    if cache.has_key(key):
-        cs1, cs2, outcome = cache[key]
-        # cache hit
-        if s1 == cs1 and s2 == cs2:
-            # cached signatures match
-            return outcome
-        # stale cached signature(s)
-    # really compare
-    outcome = do_cmp(f1, f2)
-    cache[key] = s1, s2, outcome
-    return outcome
-
-def sig(st):
-    """Return signature (i.e., type, size, mtime) from raw stat data."""
-    return S_IFMT(st[ST_MODE]), st[ST_SIZE], st[ST_MTIME]
-
-def do_cmp(f1, f2):
-    """Compare two files, really."""
-    #print '    cmp', f1, f2 # XXX remove when debugged
-    bufsize = 8*1024 # Could be tuned
-    fp1 = open(f1, 'rb')
-    fp2 = open(f2, 'rb')
-    while 1:
-        b1 = fp1.read(bufsize)
-        b2 = fp2.read(bufsize)
-        if b1 != b2: return 0
-        if not b1: return 1
diff --git a/Lib/lib-old/codehack.py b/Lib/lib-old/codehack.py
deleted file mode 100644
index 0b5e3a1..0000000
--- a/Lib/lib-old/codehack.py
+++ /dev/null
@@ -1,81 +0,0 @@
-# A subroutine for extracting a function name from a code object
-# (with cache)
-
-import sys
-from stat import *
-import string
-import os
-import linecache
-
-# XXX The functions getcodename() and getfuncname() are now obsolete
-# XXX as code and function objects now have a name attribute --
-# XXX co.co_name and f.func_name.
-# XXX getlineno() is now also obsolete because of the new attribute
-# XXX of code objects, co.co_firstlineno.
-
-# Extract the function or class name from a code object.
-# This is a bit of a hack, since a code object doesn't contain
-# the name directly.  So what do we do:
-# - get the filename (which *is* in the code object)
-# - look in the code string to find the first SET_LINENO instruction
-#   (this must be the first instruction)
-# - get the line from the file
-# - if the line starts with 'class' or 'def' (after possible whitespace),
-#   extract the following identifier
-#
-# This breaks apart when the function was read from <stdin>
-# or constructed by exec(), when the file is not accessible,
-# and also when the file has been modified or when a line is
-# continued with a backslash before the function or class name.
-#
-# Because this is a pretty expensive hack, a cache is kept.
-
-SET_LINENO = 127 # The opcode (see "opcode.h" in the Python source)
-identchars = string.ascii_letters + string.digits + '_' # Identifier characters
-
-_namecache = {} # The cache
-
-def getcodename(co):
-    try:
-        return co.co_name
-    except AttributeError:
-        pass
-    key = `co` # arbitrary but uniquely identifying string
-    if _namecache.has_key(key): return _namecache[key]
-    filename = co.co_filename
-    code = co.co_code
-    name = ''
-    if ord(code[0]) == SET_LINENO:
-        lineno = ord(code[1]) | ord(code[2]) << 8
-        line = linecache.getline(filename, lineno)
-        words = line.split()
-        if len(words) >= 2 and words[0] in ('def', 'class'):
-            name = words[1]
-            for i in range(len(name)):
-                if name[i] not in identchars:
-                    name = name[:i]
-                    break
-    _namecache[key] = name
-    return name
-
-# Use the above routine to find a function's name.
-
-def getfuncname(func):
-    try:
-        return func.func_name
-    except AttributeError:
-        pass
-    return getcodename(func.func_code)
-
-# A part of the above code to extract just the line number from a code object.
-
-def getlineno(co):
-    try:
-        return co.co_firstlineno
-    except AttributeError:
-        pass
-    code = co.co_code
-    if ord(code[0]) == SET_LINENO:
-        return ord(code[1]) | ord(code[2]) << 8
-    else:
-        return -1
diff --git a/Lib/lib-old/dircmp.py b/Lib/lib-old/dircmp.py
deleted file mode 100644
index 1e7bf2a..0000000
--- a/Lib/lib-old/dircmp.py
+++ /dev/null
@@ -1,202 +0,0 @@
-"""A class to build directory diff tools on."""
-
-import os
-
-import dircache
-import cmpcache
-import statcache
-from stat import *
-
-class dircmp:
-    """Directory comparison class."""
-
-    def new(self, a, b):
-        """Initialize."""
-        self.a = a
-        self.b = b
-        # Properties that caller may change before calling self.run():
-        self.hide = [os.curdir, os.pardir] # Names never to be shown
-        self.ignore = ['RCS', 'tags'] # Names ignored in comparison
-
-        return self
-
-    def run(self):
-        """Compare everything except common subdirectories."""
-        self.a_list = filter(dircache.listdir(self.a), self.hide)
-        self.b_list = filter(dircache.listdir(self.b), self.hide)
-        self.a_list.sort()
-        self.b_list.sort()
-        self.phase1()
-        self.phase2()
-        self.phase3()
-
-    def phase1(self):
-        """Compute common names."""
-        self.a_only = []
-        self.common = []
-        for x in self.a_list:
-            if x in self.b_list:
-                self.common.append(x)
-            else:
-                self.a_only.append(x)
-
-        self.b_only = []
-        for x in self.b_list:
-            if x not in self.common:
-                self.b_only.append(x)
-
-    def phase2(self):
-        """Distinguish files, directories, funnies."""
-        self.common_dirs = []
-        self.common_files = []
-        self.common_funny = []
-
-        for x in self.common:
-            a_path = os.path.join(self.a, x)
-            b_path = os.path.join(self.b, x)
-
-            ok = 1
-            try:
-                a_stat = statcache.stat(a_path)
-            except os.error, why:
-                # print 'Can\'t stat', a_path, ':', why[1]
-                ok = 0
-            try:
-                b_stat = statcache.stat(b_path)
-            except os.error, why:
-                # print 'Can\'t stat', b_path, ':', why[1]
-                ok = 0
-
-            if ok:
-                a_type = S_IFMT(a_stat[ST_MODE])
-                b_type = S_IFMT(b_stat[ST_MODE])
-                if a_type != b_type:
-                    self.common_funny.append(x)
-                elif S_ISDIR(a_type):
-                    self.common_dirs.append(x)
-                elif S_ISREG(a_type):
-                    self.common_files.append(x)
-                else:
-                    self.common_funny.append(x)
-            else:
-                self.common_funny.append(x)
-
-    def phase3(self):
-        """Find out differences between common files."""
-        xx = cmpfiles(self.a, self.b, self.common_files)
-        self.same_files, self.diff_files, self.funny_files = xx
-
-    def phase4(self):
-        """Find out differences between common subdirectories.
-        A new dircmp object is created for each common subdirectory,
-        these are stored in a dictionary indexed by filename.
-        The hide and ignore properties are inherited from the parent."""
-        self.subdirs = {}
-        for x in self.common_dirs:
-            a_x = os.path.join(self.a, x)
-            b_x = os.path.join(self.b, x)
-            self.subdirs[x] = newdd = dircmp().new(a_x, b_x)
-            newdd.hide = self.hide
-            newdd.ignore = self.ignore
-            newdd.run()
-
-    def phase4_closure(self):
-        """Recursively call phase4() on subdirectories."""
-        self.phase4()
-        for x in self.subdirs.keys():
-            self.subdirs[x].phase4_closure()
-
-    def report(self):
-        """Print a report on the differences between a and b."""
-        # Assume that phases 1 to 3 have been executed
-        # Output format is purposely lousy
-        print 'diff', self.a, self.b
-        if self.a_only:
-            print 'Only in', self.a, ':', self.a_only
-        if self.b_only:
-            print 'Only in', self.b, ':', self.b_only
-        if self.same_files:
-            print 'Identical files :', self.same_files
-        if self.diff_files:
-            print 'Differing files :', self.diff_files
-        if self.funny_files:
-            print 'Trouble with common files :', self.funny_files
-        if self.common_dirs:
-            print 'Common subdirectories :', self.common_dirs
-        if self.common_funny:
-            print 'Common funny cases :', self.common_funny
-
-    def report_closure(self):
-        """Print reports on self and on subdirs.
-        If phase 4 hasn't been done, no subdir reports are printed."""
-        self.report()
-        try:
-            x = self.subdirs
-        except AttributeError:
-            return # No subdirectories computed
-        for x in self.subdirs.keys():
-            print
-            self.subdirs[x].report_closure()
-
-    def report_phase4_closure(self):
-        """Report and do phase 4 recursively."""
-        self.report()
-        self.phase4()
-        for x in self.subdirs.keys():
-            print
-            self.subdirs[x].report_phase4_closure()
-
-
-def cmpfiles(a, b, common):
-    """Compare common files in two directories.
-    Return:
-        - files that compare equal
-        - files that compare different
-        - funny cases (can't stat etc.)"""
-
-    res = ([], [], [])
-    for x in common:
-        res[cmp(os.path.join(a, x), os.path.join(b, x))].append(x)
-    return res
-
-
-def cmp(a, b):
-    """Compare two files.
-    Return:
-        0 for equal
-        1 for different
-        2 for funny cases (can't stat, etc.)"""
-
-    try:
-        if cmpcache.cmp(a, b): return 0
-        return 1
-    except os.error:
-        return 2
-
-
-def filter(list, skip):
-    """Return a copy with items that occur in skip removed."""
-
-    result = []
-    for item in list:
-        if item not in skip: result.append(item)
-    return result
-
-
-def demo():
-    """Demonstration and testing."""
-
-    import sys
-    import getopt
-    options, args = getopt.getopt(sys.argv[1:], 'r')
-    if len(args) != 2:
-        raise getopt.error, 'need exactly two args'
-    dd = dircmp().new(args[0], args[1])
-    dd.run()
-    if ('-r', '') in options:
-        dd.report_phase4_closure()
-    else:
-        dd.report()
-
-if __name__ == "__main__":
-    demo()
diff --git a/Lib/lib-old/dump.py b/Lib/lib-old/dump.py
deleted file mode 100644
index 60bdba8..0000000
--- a/Lib/lib-old/dump.py
+++ /dev/null
@@ -1,63 +0,0 @@
-# Module 'dump'
-#
-# Print python code that reconstructs a variable.
-# This only works in certain cases.
-#
-# It works fine for:
-# - ints and floats (except NaNs and other weird things)
-# - strings
-# - compounds and lists, provided it works for all their elements
-# - imported modules, provided their name is the module name
-#
-# It works for top-level dictionaries but not for dictionaries
-# contained in other objects (could be made to work with some hassle
-# though).
-#
-# It does not work for functions (all sorts), classes, class objects,
-# windows, files etc.
-#
-# Finally, objects referenced by more than one name or contained in more
-# than one other object lose their sharing property (this is bad for
-# strings used as exception identifiers, for instance).
-
-# Dump a whole symbol table
-#
-def dumpsymtab(dict):
-    for key in dict.keys():
-        dumpvar(key, dict[key])
-
-# Dump a single variable
-#
-def dumpvar(name, x):
-    import sys
-    t = type(x)
-    if t == type({}):
-        print name, '= {}'
-        for key in x.keys():
-            item = x[key]
-            if not printable(item):
-                print '#',
-            print name, '[', `key`, '] =', `item`
-    elif t in (type(''), type(0), type(0.0), type([]), type(())):
-        if not printable(x):
-            print '#',
-        print name, '=', `x`
-    elif t == type(sys):
-        print 'import', name, '#', x
-    else:
-        print '#', name, '=', x
-
-# check if a value is printable in a way that can be read back with input()
-#
-def printable(x):
-    t = type(x)
-    if t in (type(''), type(0), type(0.0)):
-        return 1
-    if t in (type([]), type(())):
-        for item in x:
-            if not printable(item):
-                return 0
-        return 1
-    if x == {}:
-        return 1
-    return 0
diff --git a/Lib/lib-old/find.py b/Lib/lib-old/find.py
deleted file mode 100644
index 39ad771..0000000
--- a/Lib/lib-old/find.py
+++ /dev/null
@@ -1,26 +0,0 @@
-import fnmatch
-import os
-
-_debug = 0
-
-_prune = ['(*)']
-
-def find(pattern, dir = os.curdir):
-    list = []
-    names = os.listdir(dir)
-    names.sort()
-    for name in names:
-        if name in (os.curdir, os.pardir):
-            continue
-        fullname = os.path.join(dir, name)
-        if fnmatch.fnmatch(name, pattern):
-            list.append(fullname)
-        if os.path.isdir(fullname) and not os.path.islink(fullname):
-            for p in _prune:
-                if fnmatch.fnmatch(name, p):
-                    if _debug: print "skip", `fullname`
-                    break
-            else:
-                if _debug: print "descend into", `fullname`
-                list = list + find(pattern, fullname)
-    return list
diff --git a/Lib/lib-old/fmt.py b/Lib/lib-old/fmt.py
deleted file mode 100644
index 997d37a..0000000
--- a/Lib/lib-old/fmt.py
+++ /dev/null
@@ -1,623 +0,0 @@
-# Text formatting abstractions
-# Note -- this module is obsolete, it's too slow anyway
-
-
-import string
-import Para
-
-
-# A formatter back-end object has one method that is called by the formatter:
-# addpara(p), where p is a paragraph object.  For example:
-
-
-# Formatter back-end to do nothing at all with the paragraphs
-class NullBackEnd:
-    #
-    def __init__(self):
-        pass
-    #
-    def addpara(self, p):
-        pass
-    #
-    def bgn_anchor(self, id):
-        pass
-    #
-    def end_anchor(self, id):
-        pass
-
-
-# Formatter back-end to collect the paragraphs in a list
-class SavingBackEnd(NullBackEnd):
-    #
-    def __init__(self):
-        self.paralist = []
-    #
-    def addpara(self, p):
-        self.paralist.append(p)
-    #
-    def hitcheck(self, h, v):
-        hits = []
-        for p in self.paralist:
-            if p.top <= v <= p.bottom:
-                for id in p.hitcheck(h, v):
-                    if id not in hits:
-                        hits.append(id)
-        return hits
-    #
-    def extract(self):
-        text = ''
-        for p in self.paralist:
-            text = text + (p.extract())
-        return text
-    #
-    def extractpart(self, long1, long2):
-        if long1 > long2: long1, long2 = long2, long1
-        para1, pos1 = long1
-        para2, pos2 = long2
-        text = ''
-        while para1 < para2:
-            ptext = self.paralist[para1].extract()
-            text = text + ptext[pos1:]
-            pos1 = 0
-            para1 = para1 + 1
-        ptext = self.paralist[para2].extract()
-        return text + ptext[pos1:pos2]
-    #
-    def whereis(self, d, h, v):
-        total = 0
-        for i in range(len(self.paralist)):
-            p = self.paralist[i]
-            result = p.whereis(d, h, v)
-            if result is not None:
-                return i, result
-        return None
-    #
-    def roundtowords(self, long1, long2):
-        i, offset = long1
-        text = self.paralist[i].extract()
-        while offset > 0 and text[offset-1] != ' ': offset = offset-1
-        long1 = i, offset
-        #
-        i, offset = long2
-        text = self.paralist[i].extract()
-        n = len(text)
-        while offset < n-1 and text[offset] != ' ': offset = offset+1
-        long2 = i, offset
-        #
-        return long1, long2
-    #
-    def roundtoparagraphs(self, long1, long2):
-        long1 = long1[0], 0
-        long2 = long2[0], len(self.paralist[long2[0]].extract())
-        return long1, long2
-
-
-# Formatter back-end to send the text directly to the drawing object
-class WritingBackEnd(NullBackEnd):
-    #
-    def __init__(self, d, width):
-        self.d = d
-        self.width = width
-        self.lineno = 0
-    #
-    def addpara(self, p):
-        self.lineno = p.render(self.d, 0, self.lineno, self.width)
-
-
-# A formatter receives a stream of formatting instructions and assembles
-# these into a stream of paragraphs on to a back-end.  The assembly is
-# parametrized by a text measurement object, which must match the output
-# operations of the back-end.  The back-end is responsible for splitting
-# paragraphs up in lines of a given maximum width.  (This is done because
-# in a windowing environment, when the window size changes, there is no
-# need to redo the assembly into paragraphs, but the splitting into lines
-# must be done taking the new window size into account.)
-
-
-# Formatter base class.  Initialize it with a text measurement object,
-# which is used for text measurements, and a back-end object,
-# which receives the completed paragraphs.  The formatting methods are:
-# setfont(font)
-# setleftindent(nspaces)
-# setjust(type) where type is 'l', 'c', 'r', or 'lr'
-# flush()
-# vspace(nlines)
-# needvspace(nlines)
-# addword(word, nspaces)
-class BaseFormatter:
-    #
-    def __init__(self, d, b):
-        # Drawing object used for text measurements
-        self.d = d
-        #
-        # BackEnd object receiving completed paragraphs
-        self.b = b
-        #
-        # Parameters of the formatting model
-        self.leftindent = 0
-        self.just = 'l'
-        self.font = None
-        self.blanklines = 0
-        #
-        # Parameters derived from the current font
-        self.space = d.textwidth(' ')
-        self.line = d.lineheight()
-        self.ascent = d.baseline()
-        self.descent = self.line - self.ascent
-        #
-        # Parameter derived from the default font
-        self.n_space = self.space
-        #
-        # Current paragraph being built
-        self.para = None
-        self.nospace = 1
-        #
-        # Font to set on the next word
-        self.nextfont = None
-    #
-    def newpara(self):
-        return Para.Para()
-    #
-    def setfont(self, font):
-        if font is None: return
-        self.font = self.nextfont = font
-        d = self.d
-        d.setfont(font)
-        self.space = d.textwidth(' ')
-        self.line = d.lineheight()
-        self.ascent = d.baseline()
-        self.descent = self.line - self.ascent
-    #
-    def setleftindent(self, nspaces):
-        self.leftindent = int(self.n_space * nspaces)
-        if self.para:
-            hang = self.leftindent - self.para.indent_left
-            if hang > 0 and self.para.getlength() <= hang:
-                self.para.makehangingtag(hang)
-                self.nospace = 1
-            else:
-                self.flush()
-    #
-    def setrightindent(self, nspaces):
-        self.rightindent = int(self.n_space * nspaces)
-        if self.para:
-            self.para.indent_right = self.rightindent
-            self.flush()
-    #
-    def setjust(self, just):
-        self.just = just
-        if self.para:
-            self.para.just = self.just
-    #
-    def flush(self):
-        if self.para:
-            self.b.addpara(self.para)
-            self.para = None
-            if self.font is not None:
-                self.d.setfont(self.font)
-        self.nospace = 1
-    #
-    def vspace(self, nlines):
-        self.flush()
-        if nlines > 0:
-            self.para = self.newpara()
-            tuple = None, '', 0, 0, 0, int(nlines*self.line), 0
-            self.para.words.append(tuple)
-            self.flush()
-            self.blanklines = self.blanklines + nlines
-    #
-    def needvspace(self, nlines):
-        self.flush() # Just to be sure
-        if nlines > self.blanklines:
-            self.vspace(nlines - self.blanklines)
-    #
-    def addword(self, text, space):
-        if self.nospace and not text:
-            return
-        self.nospace = 0
-        self.blanklines = 0
-        if not self.para:
-            self.para = self.newpara()
-            self.para.indent_left = self.leftindent
-            self.para.just = self.just
-            self.nextfont = self.font
-        space = int(space * self.space)
-        self.para.words.append((self.nextfont, text,
-                self.d.textwidth(text), space, space,
-                self.ascent, self.descent))
-        self.nextfont = None
-    #
-    def bgn_anchor(self, id):
-        if not self.para:
-            self.nospace = 0
-            self.addword('', 0)
-        self.para.bgn_anchor(id)
-    #
-    def end_anchor(self, id):
-        if not self.para:
-            self.nospace = 0
-            self.addword('', 0)
-        self.para.end_anchor(id)
-
-
-# Measuring object for measuring text as viewed on a tty
-class NullMeasurer:
-    #
-    def __init__(self):
-        pass
-    #
-    def setfont(self, font):
-        pass
-    #
-    def textwidth(self, text):
-        return len(text)
-    #
-    def lineheight(self):
-        return 1
-    #
-    def baseline(self):
-        return 0
-
-
-# Drawing object for writing plain ASCII text to a file
-class FileWriter:
-    #
-    def __init__(self, fp):
-        self.fp = fp
-        self.lineno, self.colno = 0, 0
-    #
-    def setfont(self, font):
-        pass
-    #
-    def text(self, (h, v), str):
-        if not str: return
-        if '\n' in str:
-            raise ValueError, 'can\'t write \\n'
-        while self.lineno < v:
-            self.fp.write('\n')
-            self.colno, self.lineno = 0, self.lineno + 1
-        while self.lineno > v:
-            # XXX This should never happen...
-            self.fp.write('\033[A') # ANSI up arrow
-            self.lineno = self.lineno - 1
-        if self.colno < h:
-            self.fp.write(' ' * (h - self.colno))
-        elif self.colno > h:
-            self.fp.write('\b' * (self.colno - h))
-        self.colno = h
-        self.fp.write(str)
-        self.colno = h + len(str)
-
-
-# Formatting class to do nothing at all with the data
-class NullFormatter(BaseFormatter):
-    #
-    def __init__(self):
-        d = NullMeasurer()
-        b = NullBackEnd()
-        BaseFormatter.__init__(self, d, b)
-
-
-# Formatting class to write directly to a file
-class WritingFormatter(BaseFormatter):
-    #
-    def __init__(self, fp, width):
-        dm = NullMeasurer()
-        dw = FileWriter(fp)
-        b = WritingBackEnd(dw, width)
-        BaseFormatter.__init__(self, dm, b)
-        self.blanklines = 1
-    #
-    # Suppress multiple blank lines
-    def needvspace(self, nlines):
-        BaseFormatter.needvspace(self, min(1, nlines))
-
-
-# A "FunnyFormatter" writes ASCII text with a twist: *bold words*,
-# _italic text_ and _underlined words_, and `quoted text'.
-# It assumes that the fonts are 'r', 'i', 'b', 'u', 'q': (roman,
-# italic, bold, underline, quote).
-# Moreover, if the font is in upper case, the text is converted to
-# UPPER CASE.
-class FunnyFormatter(WritingFormatter):
-    #
-    def flush(self):
-        if self.para: finalize(self.para)
-        WritingFormatter.flush(self)
-
-
-# Surrounds *bold words* and _italic text_ in a paragraph with
-# appropriate markers, fixing the size (assuming these characters'
-# width is 1).
-openchar = \
-    {'b':'*', 'i':'_', 'u':'_', 'q':'`', 'B':'*', 'I':'_', 'U':'_', 'Q':'`'}
-closechar = \
-    {'b':'*', 'i':'_', 'u':'_', 'q':'\'', 'B':'*', 'I':'_', 'U':'_', 'Q':'\''}
-def finalize(para):
-    oldfont = curfont = 'r'
-    para.words.append(('r', '', 0, 0, 0, 0)) # temporary, deleted at end
-    for i in range(len(para.words)):
-        fo, te, wi = para.words[i][:3]
-        if fo is not None: curfont = fo
-        if curfont != oldfont:
-            if closechar.has_key(oldfont):
-                c = closechar[oldfont]
-                j = i-1
-                while j > 0 and para.words[j][1] == '': j = j-1
-                fo1, te1, wi1 = para.words[j][:3]
-                te1 = te1 + c
-                wi1 = wi1 + len(c)
-                para.words[j] = (fo1, te1, wi1) + \
-                        para.words[j][3:]
-            if openchar.has_key(curfont) and te:
-                c = openchar[curfont]
-                te = c + te
-                wi = len(c) + wi
-                para.words[i] = (fo, te, wi) + \
-                        para.words[i][3:]
-            if te: oldfont = curfont
-            else: oldfont = 'r'
-        if curfont in string.uppercase:
-            te = string.upper(te)
-            para.words[i] = (fo, te, wi) + para.words[i][3:]
-    del para.words[-1]
-
-
-# Formatter back-end to draw the text in a window.
-# This has an option to draw while the paragraphs are being added,
-# to minimize the delay before the user sees anything.
-# This manages the entire "document" of the window.
-class StdwinBackEnd(SavingBackEnd):
-    #
-    def __init__(self, window, drawnow):
-        self.window = window
-        self.drawnow = drawnow
-        self.width = window.getwinsize()[0]
-        self.selection = None
-        self.height = 0
-        window.setorigin(0, 0)
-        window.setdocsize(0, 0)
-        self.d = window.begindrawing()
-        SavingBackEnd.__init__(self)
-    #
-    def finish(self):
-        self.d.close()
-        self.d = None
-        self.window.setdocsize(0, self.height)
-    #
-    def addpara(self, p):
-        self.paralist.append(p)
-        if self.drawnow:
-            self.height = \
-                    p.render(self.d, 0, self.height, self.width)
-        else:
-            p.layout(self.width)
-            p.left = 0
-            p.top = self.height
-            p.right = self.width
-            p.bottom = self.height + p.height
-            self.height = p.bottom
-    #
-    def resize(self):
-        self.window.change((0, 0), (self.width, self.height))
-        self.width = self.window.getwinsize()[0]
-        self.height = 0
-        for p in self.paralist:
-            p.layout(self.width)
-            p.left = 0
-            p.top = self.height
-            p.right = self.width
-            p.bottom = self.height + p.height
-            self.height = p.bottom
-        self.window.change((0, 0), (self.width, self.height))
-        self.window.setdocsize(0, self.height)
-    #
-    def redraw(self, area):
-        d = self.window.begindrawing()
-        (left, top), (right, bottom) = area
-        d.erase(area)
-        d.cliprect(area)
-        for p in self.paralist:
-            if top < p.bottom and p.top < bottom:
-                v = p.render(d, p.left, p.top, p.right)
-        if self.selection:
-            self.invert(d, self.selection)
-        d.close()
-    #
-    def setselection(self, new):
-        if new:
-            long1, long2 = new
-            pos1 = long1[:3]
-            pos2 = long2[:3]
-            new = pos1, pos2
-        if new != self.selection:
-            d = self.window.begindrawing()
-            if self.selection:
-                self.invert(d, self.selection)
-            if new:
-                self.invert(d, new)
-            d.close()
-            self.selection = new
-    #
-    def getselection(self):
-        return self.selection
-    #
-    def extractselection(self):
-        if self.selection:
-            a, b = self.selection
-            return self.extractpart(a, b)
-        else:
-            return None
-    #
-    def invert(self, d, region):
-        long1, long2 = region
-        if long1 > long2: long1, long2 = long2, long1
-        para1, pos1 = long1
-        para2, pos2 = long2
-        while para1 < para2:
-            self.paralist[para1].invert(d, pos1, None)
-            pos1 = None
-            para1 = para1 + 1
-        self.paralist[para2].invert(d, pos1, pos2)
-    #
-    def search(self, prog):
-        import re, string
-        if type(prog) is type(''):
-            prog = re.compile(string.lower(prog))
-        if self.selection:
-            iold = self.selection[0][0]
-        else:
-            iold = -1
-        hit = None
-        for i in range(len(self.paralist)):
-            if i == iold or i < iold and hit:
-                continue
-            p = self.paralist[i]
-            text = string.lower(p.extract())
-            match = prog.search(text)
-            if match:
-                a, b = match.group(0)
-                long1 = i, a
-                long2 = i, b
-                hit = long1, long2
-                if i > iold:
-                    break
-        if hit:
-            self.setselection(hit)
-            i = hit[0][0]
-            p = self.paralist[i]
-            self.window.show((p.left, p.top), (p.right, p.bottom))
-            return 1
-        else:
-            return 0
-    #
-    def showanchor(self, id):
-        for i in range(len(self.paralist)):
-            p = self.paralist[i]
-            if p.hasanchor(id):
-                long1 = i, 0
-                long2 = i, len(p.extract())
-                hit = long1, long2
-                self.setselection(hit)
-                self.window.show(
-                        (p.left, p.top), (p.right, p.bottom))
-                break
-
-
-# GL extensions
-
-class GLFontCache:
-    #
-    def __init__(self):
-        self.reset()
-        self.setfont('')
-    #
-    def reset(self):
-        self.fontkey = None
-        self.fonthandle = None
-        self.fontinfo = None
-        self.fontcache = {}
-    #
-    def close(self):
-        self.reset()
-    #
-    def setfont(self, fontkey):
-        if fontkey == '':
-            fontkey = 'Times-Roman 12'
-        elif ' ' not in fontkey:
-            fontkey = fontkey + ' 12'
-        if fontkey == self.fontkey:
-            return
-        if self.fontcache.has_key(fontkey):
-            handle = self.fontcache[fontkey]
-        else:
-            import string
-            i = string.index(fontkey, ' ')
-            name, sizestr = fontkey[:i], fontkey[i:]
-            size = eval(sizestr)
-            key1 = name + ' 1'
-            key = name + ' ' + `size`
-            # NB key may differ from fontkey!
-            if self.fontcache.has_key(key):
-                handle = self.fontcache[key]
-            else:
-                if self.fontcache.has_key(key1):
-                    handle = self.fontcache[key1]
-                else:
-                    import fm
-                    handle = fm.findfont(name)
-                    self.fontcache[key1] = handle
-                handle = handle.scalefont(size)
-                self.fontcache[fontkey] = \
-                        self.fontcache[key] = handle
-        self.fontkey = fontkey
-        if self.fonthandle != handle:
-            self.fonthandle = handle
-            self.fontinfo = handle.getfontinfo()
-            handle.setfont()
-
-
-class GLMeasurer(GLFontCache):
-    #
-    def textwidth(self, text):
-        return self.fonthandle.getstrwidth(text)
-    #
-    def baseline(self):
-        return self.fontinfo[6] - self.fontinfo[3]
-    #
-    def lineheight(self):
-        return self.fontinfo[6]
-
-
-class GLWriter(GLFontCache):
-    #
-    # NOTES:
-    # (1) Use gl.ortho2 to use X pixel coordinates!
-    #
-    def text(self, (h, v), text):
-        import gl, fm
-        gl.cmov2i(h, v + self.fontinfo[6] - self.fontinfo[3])
-        fm.prstr(text)
-    #
-    def setfont(self, fontkey):
-        oldhandle = self.fonthandle
-        GLFontCache.setfont(fontkey)
-        if self.fonthandle != oldhandle:
-            handle.setfont()
-
-
-class GLMeasurerWriter(GLMeasurer, GLWriter):
-    pass
-
-
-class GLBackEnd(SavingBackEnd):
-    #
-    def __init__(self, wid):
-        import gl
-        gl.winset(wid)
-        self.wid = wid
-        self.width = gl.getsize()[1]
-        self.height = 0
-        self.d = GLMeasurerWriter()
-        SavingBackEnd.__init__(self)
-    #
-    def finish(self):
-        pass
-    #
-    def addpara(self, p):
-        self.paralist.append(p)
-        self.height = p.render(self.d, 0, self.height, self.width)
-    #
-    def redraw(self):
-        import gl
-        gl.winset(self.wid)
-        width = gl.getsize()[1]
-        if width != self.width:
-            setdocsize = 1
-            self.width = width
-            for p in self.paralist:
-                p.top = p.bottom = None
-        d = self.d
-        v = 0
-        for p in self.paralist:
-            v = p.render(d, 0, v, width)
diff --git a/Lib/lib-old/grep.py b/Lib/lib-old/grep.py
deleted file mode 100644
index 2926746..0000000
--- a/Lib/lib-old/grep.py
+++ /dev/null
@@ -1,79 +0,0 @@
-# 'grep'
-
-import regex
-from regex_syntax import *
-
-opt_show_where = 0
-opt_show_filename = 0
-opt_show_lineno = 1
-
-def grep(pat, *files):
-    return ggrep(RE_SYNTAX_GREP, pat, files)
-
-def egrep(pat, *files):
-    return ggrep(RE_SYNTAX_EGREP, pat, files)
-
-def emgrep(pat, *files):
-    return ggrep(RE_SYNTAX_EMACS, pat, files)
-
-def ggrep(syntax, pat, files):
-    if len(files) == 1 and type(files[0]) == type([]):
-        files = files[0]
-    global opt_show_filename
-    opt_show_filename = (len(files) != 1)
-    syntax = regex.set_syntax(syntax)
-    try:
-        prog = regex.compile(pat)
-    finally:
-        syntax = regex.set_syntax(syntax)
-    for filename in files:
-        fp = open(filename, 'r')
-        lineno = 0
-        while 1:
-            line = fp.readline()
-            if not line: break
-            lineno = lineno + 1
-            if prog.search(line) >= 0:
-                showline(filename, lineno, line, prog)
-        fp.close()
-
-def pgrep(pat, *files):
-    if len(files) == 1 and type(files[0]) == type([]):
-        files = files[0]
-    global opt_show_filename
-    opt_show_filename = (len(files) != 1)
-    import re
-    prog = re.compile(pat)
-    for filename in files:
-        fp = open(filename, 'r')
-        lineno = 0
-        while 1:
-            line = fp.readline()
-            if not line: break
-            lineno = lineno + 1
-            if prog.search(line):
-                showline(filename, lineno, line, prog)
-        fp.close()
-
-def showline(filename, lineno, line, prog):
-    if line[-1:] == '\n': line = line[:-1]
-    if opt_show_lineno:
-        prefix = `lineno`.rjust(3) + ': '
-    else:
-        prefix = ''
-    if opt_show_filename:
-        prefix = filename + ': ' + prefix
-    print prefix + line
-    if opt_show_where:
-        start, end = prog.regs()[0]
-        line = line[:start]
-        if '\t' not in line:
-            prefix = ' ' * (len(prefix) + start)
-        else:
-            prefix = ' ' * len(prefix)
-            for c in line:
-                if c != '\t': c = ' '
-                prefix = prefix + c
-        if start == end: prefix = prefix + '\\'
-        else: prefix = prefix + '^'*(end-start)
-        print prefix
diff --git a/Lib/lib-old/lockfile.py b/Lib/lib-old/lockfile.py
deleted file mode 100644
index cde9b48..0000000
--- a/Lib/lib-old/lockfile.py
+++ /dev/null
@@ -1,15 +0,0 @@
-import struct, fcntl
-
-def writelock(f):
-    _lock(f, fcntl.F_WRLCK)
-
-def readlock(f):
-    _lock(f, fcntl.F_RDLCK)
-
-def unlock(f):
-    _lock(f, fcntl.F_UNLCK)
-
-def _lock(f, op):
-    dummy = fcntl.fcntl(f.fileno(), fcntl.F_SETLKW,
-                        struct.pack('2h8l', op,
-                                    0, 0, 0, 0, 0, 0, 0, 0, 0))
diff --git a/Lib/lib-old/newdir.py b/Lib/lib-old/newdir.py
deleted file mode 100644
index 356becc..0000000
--- a/Lib/lib-old/newdir.py
+++ /dev/null
@@ -1,73 +0,0 @@
-# New dir() function
-
-
-# This should be the new dir(), except that it should still list
-# the current local name space by default
-
-def listattrs(x):
-    try:
-        dictkeys = x.__dict__.keys()
-    except (AttributeError, TypeError):
-        dictkeys = []
-    #
-    try:
-        methods = x.__methods__
-    except (AttributeError, TypeError):
-        methods = []
-    #
-    try:
-        members = x.__members__
-    except (AttributeError, TypeError):
-        members = []
-    #
-    try:
-        the_class = x.__class__
-    except (AttributeError, TypeError):
-        the_class = None
-    #
-    try:
-        bases = x.__bases__
-    except (AttributeError, TypeError):
-        bases = ()
-    #
-    total = dictkeys + methods + members
-    if the_class:
-        # It's a class instace; add the class's attributes
-        # that are functions (methods)...
-        class_attrs = listattrs(the_class)
-        class_methods = []
-        for name in class_attrs:
-            if is_function(getattr(the_class, name)):
-                class_methods.append(name)
-        total = total + class_methods
-    elif bases:
-        # It's a derived class; add the base class attributes
-        for base in bases:
-            base_attrs = listattrs(base)
-            total = total + base_attrs
-    total.sort()
-    return total
-    i = 0
-    while i+1 < len(total):
-        if total[i] == total[i+1]:
-            del total[i+1]
-        else:
-            i = i+1
-    return total
-
-
-# Helper to recognize functions
-
-def is_function(x):
-    return type(x) == type(is_function)
-
-
-# Approximation of builtin dir(); but note that this lists the user's
-# variables by default, not the current local name space.
-
-def dir(x = None):
-    if x is not None:
-        return listattrs(x)
-    else:
-        import __main__
-        return listattrs(__main__)
diff --git a/Lib/lib-old/ni.py b/Lib/lib-old/ni.py
deleted file mode 100644
index 074f989..0000000
--- a/Lib/lib-old/ni.py
+++ /dev/null
@@ -1,433 +0,0 @@
-"""New import scheme with package support.
-
-Quick Reference
----------------
-
-- To enable package support, execute "import ni" before importing any
-  packages.  Importing this module automatically installs the relevant
-  import hooks.
-
-- To create a package named spam containing sub-modules ham, bacon and
-  eggs, create a directory spam somewhere on Python's module search
-  path (i.e. spam's parent directory must be one of the directories in
-  sys.path or $PYTHONPATH); then create files ham.py, bacon.py and
-  eggs.py inside spam.
-
-- To import module ham from package spam and use function hamneggs()
-  from that module, you can either do
-
-    import spam.ham             # *not* "import spam" !!!
-    spam.ham.hamneggs()
-
-  or
-
-    from spam import ham
-    ham.hamneggs()
-
-  or
-
-    from spam.ham import hamneggs
-    hamneggs()
-
-- Importing just "spam" does not do what you expect: it creates an
-  empty package named spam if one does not already exist, but it does
-  not import spam's submodules.  The only submodule that is guaranteed
-  to be imported is spam.__init__, if it exists.  Note that
-  spam.__init__ is a submodule of package spam.  It can reference to
-  spam's namespace via the '__.' prefix, for instance
-
-    __.spam_inited = 1          # Set a package-level variable
-
-
-
-Theory of Operation
--------------------
-
-A Package is a module that can contain other modules.  Packages can be
-nested.  Package introduce dotted names for modules, like P.Q.M, which
-could correspond to a file P/Q/M.py found somewhere on sys.path.  It
-is possible to import a package itself, though this makes little sense
-unless the package contains a module called __init__.
-
-A package has two variables that control the namespace used for
-packages and modules, both initialized to sensible defaults the first
-time the package is referenced.
-
-(1) A package's *module search path*, contained in the per-package
-variable __path__, defines a list of *directories* where submodules or
-subpackages of the package are searched.  It is initialized to the
-directory containing the package.  Setting this variable to None makes
-the module search path default to sys.path (this is not quite the same
-as setting it to sys.path, since the latter won't track later
-assignments to sys.path).
-
-(2) A package's *import domain*, contained in the per-package variable
-__domain__, defines a list of *packages* that are searched (using
-their respective module search paths) to satisfy imports.  It is
-initialized to the list consisting of the package itself, its parent
-package, its parent's parent, and so on, ending with the root package
-(the nameless package containing all top-level packages and modules,
-whose module search path is None, implying sys.path).
-
-The default domain implements a search algorithm called "expanding
-search".  An alternative search algorithm called "explicit search"
-fixes the import search path to contain only the root package,
-requiring the modules in the package to name all imported modules by
-their full name.  The convention of using '__' to refer to the current
-package (both as a per-module variable and in module names) can be
-used by packages using explicit search to refer to modules in the same
-package; this combination is known as "explicit-relative search".
-
-The PackageImporter and PackageLoader classes together implement the
-following policies:
-
-- There is a root package, whose name is ''.  It cannot be imported
-  directly but may be referenced, e.g. by using '__' from a top-level
-  module.
-
-- In each module or package, the variable '__' contains a reference to
-  the parent package; in the root package, '__' points to itself.
-
-- In the name for imported modules (e.g. M in "import M" or "from M
-  import ..."), a leading '__' refers to the current package (i.e.
-  the package containing the current module); leading '__.__' and so
-  on refer to the current package's parent, and so on.  The use of
-  '__' elsewhere in the module name is not supported.
-
-- Modules are searched using the "expanding search" algorithm by
-  virtue of the default value for __domain__.
-
-- If A.B.C is imported, A is searched using __domain__; then
-  subpackage B is searched in A using its __path__, and so on.
-
-- Built-in modules have priority: even if a file sys.py exists in a
-  package, "import sys" imports the built-in sys module.
-
-- The same holds for frozen modules, for better or for worse.
-
-- Submodules and subpackages are not automatically loaded when their
-  parent packages is loaded.
-
-- The construct "from package import *" is illegal.  (It can still be
-  used to import names from a module.)
-
-- When "from package import module1, module2, ..." is used, those
-    modules are explicitly loaded.
-
-- When a package is loaded, if it has a submodule __init__, that
-  module is loaded.  This is the place where required submodules can
-  be loaded, the __path__ variable extended, etc.  The __init__ module
-  is loaded even if the package was loaded only in order to create a
-  stub for a sub-package: if "import P.Q.R" is the first reference to
-  P, and P has a submodule __init__, P.__init__ is loaded before P.Q
-  is even searched.
-
-Caveats:
-
-- It is possible to import a package that has no __init__ submodule;
-  this is not particularly useful but there may be useful applications
-  for it (e.g. to manipulate its search paths from the outside!).
-
-- There are no special provisions for os.chdir().  If you plan to use
-  os.chdir() before you have imported all your modules, it is better
-  not to have relative pathnames in sys.path.  (This could actually be
-  fixed by changing the implementation of path_join() in the hook to
-  absolutize paths.)
-
-- Packages and modules are introduced in sys.modules as soon as their
-  loading is started.  When the loading is terminated by an exception,
-  the sys.modules entries remain around.
-
-- There are no special measures to support mutually recursive modules,
-  but it will work under the same conditions where it works in the
-  flat module space system.
-
-- Sometimes dummy entries (whose value is None) are entered in
-  sys.modules, to indicate that a particular module does not exist --
-  this is done to speed up the expanding search algorithm when a
-  module residing at a higher level is repeatedly imported (Python
-  promises that importing a previously imported module is cheap!)
-
-- Although dynamically loaded extensions are allowed inside packages,
-  the current implementation (hardcoded in the interpreter) of their
-  initialization may cause problems if an extension invokes the
-  interpreter during its initialization.
-
-- reload() may find another version of the module only if it occurs on
-  the package search path.  Thus, it keeps the connection to the
-  package to which the module belongs, but may find a different file.
-
-XXX Need to have an explicit name for '', e.g. '__root__'.
-
-"""
-
-
-import imp
-import sys
-import __builtin__
-
-import ihooks
-from ihooks import ModuleLoader, ModuleImporter
-
-
-class PackageLoader(ModuleLoader):
-
-    """A subclass of ModuleLoader with package support.
-
-    find_module_in_dir() will succeed if there's a subdirectory with
-    the given name; load_module() will create a stub for a package and
-    load its __init__ module if it exists.
-
-    """
-
-    def find_module_in_dir(self, name, dir):
-        if dir is not None:
-            dirname = self.hooks.path_join(dir, name)
-            if self.hooks.path_isdir(dirname):
-                return None, dirname, ('', '', 'PACKAGE')
-        return ModuleLoader.find_module_in_dir(self, name, dir)
-
-    def load_module(self, name, stuff):
-        file, filename, info = stuff
-        suff, mode, type = info
-        if type == 'PACKAGE':
-            return self.load_package(name, stuff)
-        if sys.modules.has_key(name):
-            m = sys.modules[name]
-        else:
-            sys.modules[name] = m = imp.new_module(name)
-        self.set_parent(m)
-        if type == imp.C_EXTENSION and '.' in name:
-            return self.load_dynamic(name, stuff)
-        else:
-            return ModuleLoader.load_module(self, name, stuff)
-
-    def load_dynamic(self, name, stuff):
-        file, filename, (suff, mode, type) = stuff
-        # Hack around restriction in imp.load_dynamic()
-        i = name.rfind('.')
-        tail = name[i+1:]
-        if sys.modules.has_key(tail):
-            save = sys.modules[tail]
-        else:
-            save = None
-        sys.modules[tail] = imp.new_module(name)
-        try:
-            m = imp.load_dynamic(tail, filename, file)
-        finally:
-            if save:
-                sys.modules[tail] = save
-            else:
-                del sys.modules[tail]
-        sys.modules[name] = m
-        return m
-
-    def load_package(self, name, stuff):
-        file, filename, info = stuff
-        if sys.modules.has_key(name):
-            package = sys.modules[name]
-        else:
-            sys.modules[name] = package = imp.new_module(name)
-        package.__path__ = [filename]
-        self.init_package(package)
-        return package
-
-    def init_package(self, package):
-        self.set_parent(package)
-        self.set_domain(package)
-        self.call_init_module(package)
-
-    def set_parent(self, m):
-        name = m.__name__
-        if '.' in name:
-            name = name[:name.rfind('.')]
-        else:
-            name = ''
-        m.__ = sys.modules[name]
-
-    def set_domain(self, package):
-        name = package.__name__
-        package.__domain__ = domain = [name]
-        while '.' in name:
-            name = name[:name.rfind('.')]
-            domain.append(name)
-        if name:
-            domain.append('')
-
-    def call_init_module(self, package):
-        stuff = self.find_module('__init__', package.__path__)
-        if stuff:
-            m = self.load_module(package.__name__ + '.__init__', stuff)
-            package.__init__ = m
-
-
-class PackageImporter(ModuleImporter):
-
-    """Importer that understands packages and '__'."""
-
-    def __init__(self, loader = None, verbose = 0):
-        ModuleImporter.__init__(self,
-        loader or PackageLoader(None, verbose), verbose)
-
-    def import_module(self, name, globals={}, locals={}, fromlist=[]):
-        if globals.has_key('__'):
-            package = globals['__']
-        else:
-            # No calling context, assume in root package
-            package = sys.modules['']
-        if name[:3] in ('__.', '__'):
-            p = package
-            name = name[3:]
-            while name[:3] in ('__.', '__'):
-                p = p.__
-                name = name[3:]
-            if not name:
-                return self.finish(package, p, '', fromlist)
-            if '.' in name:
-                i = name.find('.')
-                name, tail = name[:i], name[i:]
-            else:
-                tail = ''
-            mname = p.__name__ and p.__name__+'.'+name or name
-            m = self.get1(mname)
-            return self.finish(package, m, tail, fromlist)
-        if '.' in name:
-            i = name.find('.')
-            name, tail = name[:i], name[i:]
-        else:
-            tail = ''
-        for pname in package.__domain__:
-            mname = pname and pname+'.'+name or name
-            m = self.get0(mname)
-            if m: break
-        else:
-            raise ImportError, "No such module %s" % name
-        return self.finish(m, m, tail, fromlist)
-
-    def finish(self, module, m, tail, fromlist):
-        # Got ....A; now get ....A.B.C.D
-        yname = m.__name__
-        if tail and sys.modules.has_key(yname + tail): # Fast path
-            yname, tail = yname + tail, ''
-            m = self.get1(yname)
-        while tail:
-            i = tail.find('.', 1)
-            if i > 0:
-                head, tail = tail[:i], tail[i:]
-            else:
-                head, tail = tail, ''
-            yname = yname + head
-            m = self.get1(yname)
-
-        # Got ....A.B.C.D; now finalize things depending on fromlist
-        if not fromlist:
-            return module
-        if '__' in fromlist:
-            raise ImportError, "Can't import __ from anywhere"
-        if not hasattr(m, '__path__'): return m
-        if '*' in fromlist:
-            raise ImportError, "Can't import * from a package"
-        for f in fromlist:
-            if hasattr(m, f): continue
-            fname = yname + '.' + f
-            self.get1(fname)
-        return m
-
-    def get1(self, name):
-        m = self.get(name)
-        if not m:
-            raise ImportError, "No module named %s" % name
-        return m
-
-    def get0(self, name):
-        m = self.get(name)
-        if not m:
-            sys.modules[name] = None
-        return m
-
-    def get(self, name):
-        # Internal routine to get or load a module when its parent exists
-        if sys.modules.has_key(name):
-            return sys.modules[name]
-        if '.' in name:
-            i = name.rfind('.')
-            head, tail = name[:i], name[i+1:]
-        else:
-            head, tail = '', name
-        path = sys.modules[head].__path__
-        stuff = self.loader.find_module(tail, path)
-        if not stuff:
-            return None
-        sys.modules[name] = m = self.loader.load_module(name, stuff)
-        if head:
-            setattr(sys.modules[head], tail, m)
-        return m
-
-    def reload(self, module):
-        name = module.__name__
-        if '.' in name:
-            i = name.rfind('.')
-            head, tail = name[:i], name[i+1:]
-            path = sys.modules[head].__path__
-        else:
-            tail = name
-            path = sys.modules[''].__path__
-        stuff = self.loader.find_module(tail, path)
-        if not stuff:
-            raise ImportError, "No module named %s" % name
-        return self.loader.load_module(name, stuff)
-
-    def unload(self, module):
-        if hasattr(module, '__path__'):
-            raise ImportError, "don't know how to unload packages yet"
-        PackageImporter.unload(self, module)
-
-    def install(self):
-        if not sys.modules.has_key(''):
-            sys.modules[''] = package = imp.new_module('')
-            package.__path__ = None
-            self.loader.init_package(package)
-            for m in sys.modules.values():
-                if not m: continue
-                if not hasattr(m, '__'):
-                    self.loader.set_parent(m)
-        ModuleImporter.install(self)
-
-
-def install(v = 0):
-    ihooks.install(PackageImporter(None, v))
-
-def uninstall():
-    ihooks.uninstall()
-
-def ni(v = 0):
-    install(v)
-
-def no():
-    uninstall()
-
-def test():
-    import pdb
-    try:
-        testproper()
-    except:
-        sys.last_type, sys.last_value, sys.last_traceback = sys.exc_info()
-        print
-        print sys.last_type, ':', sys.last_value
-        print
-        pdb.pm()
-
-def testproper():
-    install(1)
-    try:
-        import mactest
-        print dir(mactest)
-        raw_input('OK?')
-    finally:
-        uninstall()
-
-
-if __name__ == '__main__':
-    test()
-else:
-    install()
diff --git a/Lib/lib-old/packmail.py b/Lib/lib-old/packmail.py
deleted file mode 100644
index e569108..0000000
--- a/Lib/lib-old/packmail.py
+++ /dev/null
@@ -1,111 +0,0 @@
-# Module 'packmail' -- create a self-unpacking shell archive.
-
-# This module works on UNIX and on the Mac; the archives can unpack
-# themselves only on UNIX.
-
-import os
-from stat import ST_MTIME
-
-# Print help
-def help():
-    print 'All fns have a file open for writing as first parameter'
-    print 'pack(f, fullname, name): pack fullname as name'
-    print 'packsome(f, directory, namelist): selected files from directory'
-    print 'packall(f, directory): pack all files from directory'
-    print 'packnotolder(f, directory, name): pack all files from directory'
-    print '                        that are not older than a file there'
-    print 'packtree(f, directory): pack entire directory tree'
-
-# Pack one file
-def pack(outfp, file, name):
-    fp = open(file, 'r')
-    outfp.write('echo ' + name + '\n')
-    outfp.write('sed "s/^X//" >"' + name + '" <<"!"\n')
-    while 1:
-        line = fp.readline()
-        if not line: break
-        if line[-1:] != '\n':
-            line = line + '\n'
-        outfp.write('X' + line)
-    outfp.write('!\n')
-    fp.close()
-
-# Pack some files from a directory
-def packsome(outfp, dirname, names):
-    for name in names:
-        print name
-        file = os.path.join(dirname, name)
-        pack(outfp, file, name)
-
-# Pack all files from a directory
-def packall(outfp, dirname):
-    names = os.listdir(dirname)
-    try:
-        names.remove('.')
-    except:
-        pass
-    try:
-        names.remove('..')
-    except:
-        pass
-    names.sort()
-    packsome(outfp, dirname, names)
-
-# Pack all files from a directory that are not older than a give one
-def packnotolder(outfp, dirname, oldest):
-    names = os.listdir(dirname)
-    try:
-        names.remove('.')
-    except:
-        pass
-    try:
-        names.remove('..')
-    except:
-        pass
-    oldest = os.path.join(dirname, oldest)
-    st = os.stat(oldest)
-    mtime = st[ST_MTIME]
-    todo = []
-    for name in names:
-        print name, '...',
-        st = os.stat(os.path.join(dirname, name))
-        if st[ST_MTIME] >= mtime:
-            print 'Yes.'
-            todo.append(name)
-        else:
-            print 'No.'
-    todo.sort()
-    packsome(outfp, dirname, todo)
-
-# Pack a whole tree (no exceptions)
-def packtree(outfp, dirname):
-    print 'packtree', dirname
-    outfp.write('mkdir ' + unixfix(dirname) + '\n')
-    names = os.listdir(dirname)
-    try:
-        names.remove('.')
-    except:
-        pass
-    try:
-        names.remove('..')
-    except:
-        pass
-    subdirs = []
-    for name in names:
-        fullname = os.path.join(dirname, name)
-        if os.path.isdir(fullname):
-            subdirs.append(fullname)
-        else:
-            print 'pack', fullname
-            pack(outfp, fullname, unixfix(fullname))
-    for subdirname in subdirs:
-        packtree(outfp, subdirname)
-
-def unixfix(name):
-    comps = name.split(os.sep)
-    res = ''
-    for comp in comps:
-        if comp:
-            if res: res = res + '/'
-            res = res + comp
-    return res
diff --git a/Lib/lib-old/poly.py b/Lib/lib-old/poly.py
deleted file mode 100644
index fe6a1dc..0000000
--- a/Lib/lib-old/poly.py
+++ /dev/null
@@ -1,52 +0,0 @@
-# module 'poly' -- Polynomials
-
-# A polynomial is represented by a list of coefficients, e.g.,
-# [1, 10, 5] represents 1*x**0 + 10*x**1 + 5*x**2 (or 1 + 10x + 5x**2).
-# There is no way to suppress internal zeros; trailing zeros are
-# taken out by normalize().
-
-def normalize(p): # Strip unnecessary zero coefficients
-    n = len(p)
-    while n:
-        if p[n-1]: return p[:n]
-        n = n-1
-    return []
-
-def plus(a, b):
-    if len(a) < len(b): a, b = b, a # make sure a is the longest
-    res = a[:] # make a copy
-    for i in range(len(b)):
-        res[i] = res[i] + b[i]
-    return normalize(res)
-
-def minus(a, b):
-    neg_b = map(lambda x: -x, b[:])
-    return plus(a, neg_b)
-
-def one(power, coeff): # Representation of coeff * x**power
-    res = []
-    for i in range(power): res.append(0)
-    return res + [coeff]
-
-def times(a, b):
-    res = []
-    for i in range(len(a)):
-        for j in range(len(b)):
-            res = plus(res, one(i+j, a[i]*b[j]))
-    return res
-
-def power(a, n): # Raise polynomial a to the positive integral power n
-    if n == 0: return [1]
-    if n == 1: return a
-    if n/2*2 == n:
-        b = power(a, n/2)
-        return times(b, b)
-    return times(power(a, n-1), a)
-
-def der(a): # First derivative
-    res = a[1:]
-    for i in range(len(res)):
-        res[i] = res[i] * (i+1)
-    return res
-
-# Computing a primitive function would require rational arithmetic...
diff --git a/Lib/lib-old/rand.py b/Lib/lib-old/rand.py
deleted file mode 100644
index a557b69..0000000
--- a/Lib/lib-old/rand.py
+++ /dev/null
@@ -1,13 +0,0 @@
-# Module 'rand'
-# Don't use unless you want compatibility with C's rand()!
-
-import whrandom
-
-def srand(seed):
-    whrandom.seed(seed%256, seed/256%256, seed/65536%256)
-
-def rand():
-    return int(whrandom.random() * 32768.0) % 32768
-
-def choice(seq):
-    return seq[rand() % len(seq)]
diff --git a/Lib/lib-old/statcache.py b/Lib/lib-old/statcache.py
deleted file mode 100644
index d478393..0000000
--- a/Lib/lib-old/statcache.py
+++ /dev/null
@@ -1,82 +0,0 @@
-"""Maintain a cache of stat() information on files.
-
-There are functions to reset the cache or to selectively remove items.
-"""
-
-import warnings
-warnings.warn("The statcache module is obsolete.  Use os.stat() instead.",
-              DeprecationWarning)
-del warnings
-
-import os as _os
-from stat import *
-
-__all__ = ["stat","reset","forget","forget_prefix","forget_dir",
-           "forget_except_prefix","isdir"]
-
-# The cache.  Keys are pathnames, values are os.stat outcomes.
-# Remember that multiple threads may be calling this!  So, e.g., that
-# path in cache returns 1 doesn't mean the cache will still contain
-# path on the next line.  Code defensively.
-
-cache = {}
-
-def stat(path):
-    """Stat a file, possibly out of the cache."""
-    ret = cache.get(path, None)
-    if ret is None:
-        cache[path] = ret = _os.stat(path)
-    return ret
-
-def reset():
-    """Clear the cache."""
-    cache.clear()
-
-# For thread saftey, always use forget() internally too.
-def forget(path):
-    """Remove a given item from the cache, if it exists."""
-    try:
-        del cache[path]
-    except KeyError:
-        pass
-
-def forget_prefix(prefix):
-    """Remove all pathnames with a given prefix."""
-    for path in cache.keys():
-        if path.startswith(prefix):
-            forget(path)
-
-def forget_dir(prefix):
-    """Forget a directory and all entries except for entries in subdirs."""
-
-    # Remove trailing separator, if any.  This is tricky to do in a
-    # x-platform way.  For example, Windows accepts both / and \ as
-    # separators, and if there's nothing *but* a separator we want to
-    # preserve that this is the root.  Only os.path has the platform
-    # knowledge we need.
-    from os.path import split, join
-    prefix = split(join(prefix, "xxx"))[0]
-    forget(prefix)
-    for path in cache.keys():
-        # First check that the path at least starts with the prefix, so
-        # that when it doesn't we can avoid paying for split().
-        if path.startswith(prefix) and split(path)[0] == prefix:
-            forget(path)
-
-def forget_except_prefix(prefix):
-    """Remove all pathnames except with a given prefix.
-
-    Normally used with prefix = '/' after a chdir().
-    """
-
-    for path in cache.keys():
-        if not path.startswith(prefix):
-            forget(path)
-
-def isdir(path):
-    """Return True if directory, else False."""
-    try:
-        st = stat(path)
-    except _os.error:
-        return False
-    return S_ISDIR(st.st_mode)
diff --git a/Lib/lib-old/tb.py b/Lib/lib-old/tb.py
deleted file mode 100644
index 9063559..0000000
--- a/Lib/lib-old/tb.py
+++ /dev/null
@@ -1,177 +0,0 @@
-# Print tracebacks, with a dump of local variables.
-# Also an interactive stack trace browser.
-# Note -- this module is obsolete -- use pdb.pm() instead.
-
-import sys
-import os
-from stat import *
-import linecache
-
-def br(): browser(sys.last_traceback)
-
-def tb(): printtb(sys.last_traceback)
-
-def browser(tb):
-    if not tb:
-        print 'No traceback.'
-        return
-    tblist = []
-    while tb:
-        tblist.append(tb)
-        tb = tb.tb_next
-    ptr = len(tblist)-1
-    tb = tblist[ptr]
-    while 1:
-        if tb != tblist[ptr]:
-            tb = tblist[ptr]
-            print `ptr` + ':',
-            printtbheader(tb)
-        try:
-            line = raw_input('TB: ')
-        except KeyboardInterrupt:
-            print '\n[Interrupted]'
-            break
-        except EOFError:
-            print '\n[EOF]'
-            break
-        cmd = line.strip()
-        if cmd:
-            if cmd == 'quit':
-                break
-            elif cmd == 'list':
-                browserlist(tb)
-            elif cmd == 'up':
-                if ptr-1 >= 0: ptr = ptr-1
-                else: print 'Bottom of stack.'
-            elif cmd == 'down':
-                if ptr+1 < len(tblist): ptr = ptr+1
-                else: print 'Top of stack.'
-            elif cmd == 'locals':
-                printsymbols(tb.tb_frame.f_locals)
-            elif cmd == 'globals':
-                printsymbols(tb.tb_frame.f_globals)
-            elif cmd in ('?', 'help'):
-                browserhelp()
-            else:
-                browserexec(tb, cmd)
-
-def browserlist(tb):
-    filename = tb.tb_frame.f_code.co_filename
-    lineno = tb.tb_lineno
-    last = lineno
-    first = max(1, last-10)
-    for i in range(first, last+1):
-        if i == lineno: prefix = '***' + `i`.rjust(4) + ':'
-        else: prefix = `i`.rjust(7) + ':'
-        line = linecache.getline(filename, i)
-        if line[-1:] == '\n': line = line[:-1]
-        print prefix + line
-
-def browserexec(tb, cmd):
-    locals = tb.tb_frame.f_locals
-    globals = tb.tb_frame.f_globals
-    try:
-        exec cmd+'\n' in globals, locals
-    except:
-        t, v = sys.exc_info()[:2]
-        print '*** Exception:',
-        if type(t) is type(''):
-            print t,
-        else:
-            print t.__name__,
-        if v is not None:
-            print ':', v,
-        print
-        print 'Type help to get help.'
-
-def browserhelp():
-    print
-    print '    This is the traceback browser.  Commands are:'
-    print '        up      : move one level up in the call stack'
-    print '        down    : move one level down in the call stack'
-    print '        locals  : print all local variables at this level'
-    print '        globals : print all global variables at this level'
-    print '        list    : list source code around the failure'
-    print '        help    : print help (what you are reading now)'
-    print '        quit    : back to command interpreter'
-    print '    Typing any other 1-line statement will execute it'
-    print '    using the current level\'s symbol tables'
-    print
-
-def printtb(tb):
-    while tb:
-        print1tb(tb)
-        tb = tb.tb_next
-
-def print1tb(tb):
-    printtbheader(tb)
-    if tb.tb_frame.f_locals is not tb.tb_frame.f_globals:
-        printsymbols(tb.tb_frame.f_locals)
-
-def printtbheader(tb):
-    filename = tb.tb_frame.f_code.co_filename
-    lineno = tb.tb_lineno
-    info = '"' + filename + '"(' + `lineno` + ')'
-    line = linecache.getline(filename, lineno)
-    if line:
-        info = info + ': ' + line.strip()
-    print info
-
-def printsymbols(d):
-    keys = d.keys()
-    keys.sort()
-    for name in keys:
-        print '  ' + name.ljust(12) + ':',
-        printobject(d[name], 4)
-        print
-
-def printobject(v, maxlevel):
-    if v is None:
-        print 'None',
-    elif type(v) in (type(0), type(0.0)):
-        print v,
-    elif type(v) is type(''):
-        if len(v) > 20:
-            print `v[:17] + '...'`,
-        else:
-            print `v`,
-    elif type(v) is type(()):
-        print '(',
-        printlist(v, maxlevel)
-        print ')',
-    elif type(v) is type([]):
-        print '[',
-        printlist(v, maxlevel)
-        print ']',
-    elif type(v) is type({}):
-        print '{',
-        printdict(v, maxlevel)
-        print '}',
-    else:
-        print v,
-
-def printlist(v, maxlevel):
-    n = len(v)
-    if n == 0: return
-    if maxlevel <= 0:
-        print '...',
-        return
-    for i in range(min(6, n)):
-        printobject(v[i], maxlevel-1)
-        if i+1 < n: print ',',
-    if n > 6: print '...',
-
-def printdict(v, maxlevel):
-    keys = v.keys()
-    n = len(keys)
-    if n == 0: return
-    if maxlevel <= 0:
-        print '...',
-        return
-    keys.sort()
-    for i in range(min(6, n)):
-        key = keys[i]
-        print `key` + ':',
-        printobject(v[key], maxlevel-1)
-        if i+1 < n: print ',',
-    if n > 6: print '...',
diff --git a/Lib/lib-old/tzparse.py b/Lib/lib-old/tzparse.py
deleted file mode 100644
index 12468b5..0000000
--- a/Lib/lib-old/tzparse.py
+++ /dev/null
@@ -1,98 +0,0 @@
-"""Parse a timezone specification."""
-
-# XXX Unfinished.
-# XXX Only the typical form "XXXhhYYY;ddd/hh,ddd/hh" is currently supported.
-
-import warnings
-warnings.warn(
-    "The tzparse module is obsolete and will disappear in the future",
-    DeprecationWarning)
-
-tzpat = ('^([A-Z][A-Z][A-Z])([-+]?[0-9]+)([A-Z][A-Z][A-Z]);'
-          '([0-9]+)/([0-9]+),([0-9]+)/([0-9]+)$')
-
-tzprog = None
-
-def tzparse(tzstr):
-    """Given a timezone spec, return a tuple of information
-    (tzname, delta, dstname, daystart, hourstart, dayend, hourend),
-    where 'tzname' is the name of the timezone, 'delta' is the offset
-    in hours from GMT, 'dstname' is the name of the daylight-saving
-    timezone, and 'daystart'/'hourstart' and 'dayend'/'hourend'
-    specify the starting and ending points for daylight saving time."""
-    global tzprog
-    if tzprog is None:
-        import re
-        tzprog = re.compile(tzpat)
-    match = tzprog.match(tzstr)
-    if not match:
-        raise ValueError, 'not the TZ syntax I understand'
-    subs = []
-    for i in range(1, 8):
-        subs.append(match.group(i))
-    for i in (1, 3, 4, 5, 6):
-        subs[i] = eval(subs[i])
-    [tzname, delta, dstname, daystart, hourstart, dayend, hourend] = subs
-    return (tzname, delta, dstname, daystart, hourstart, dayend, hourend)
-
-def tzlocaltime(secs, params):
-    """Given a Unix time in seconds and a tuple of information about
-    a timezone as returned by tzparse(), return the local time in the
-    form (year, month, day, hour, min, sec, yday, wday, tzname)."""
-    import time
-    (tzname, delta, dstname, daystart, hourstart, dayend, hourend) = params
-    year, month, days, hours, mins, secs, yday, wday, isdst = \
-            time.gmtime(secs - delta*3600)
-    if (daystart, hourstart) <= (yday+1, hours) < (dayend, hourend):
-        tzname = dstname
-        hours = hours + 1
-    return year, month, days, hours, mins, secs, yday, wday, tzname
-
-def tzset():
-    """Determine the current timezone from the "TZ" environment variable."""
-    global tzparams, timezone, altzone, daylight, tzname
-    import os
-    tzstr = os.environ['TZ']
-    tzparams = tzparse(tzstr)
-    timezone = tzparams[1] * 3600
-    altzone = timezone - 3600
-    daylight = 1
-    tzname = tzparams[0], tzparams[2]
-
-def isdst(secs):
-    """Return true if daylight-saving time is in effect for the given
-    Unix time in the current timezone."""
-    import time
-    (tzname, delta, dstname, daystart, hourstart, dayend, hourend) = \
-            tzparams
-    year, month, days, hours, mins, secs, yday, wday, isdst = \
-            time.gmtime(secs - delta*3600)
-    return (daystart, hourstart) <= (yday+1, hours) < (dayend, hourend)
-
-tzset()
-
-def localtime(secs):
-    """Get the local time in the current timezone."""
-    return tzlocaltime(secs, tzparams)
-
-def test():
-    from time import asctime, gmtime
-    import time, sys
-    now = time.time()
-    x = localtime(now)
-    tm = x[:-1] + (0,)
-    print 'now =', now, '=', asctime(tm), x[-1]
-    now = now - now % (24*3600)
-    if sys.argv[1:]: now = now + eval(sys.argv[1])
-    x = gmtime(now)
-    tm = x[:-1] + (0,)
-    print 'gmtime =', now, '=', asctime(tm), 'yday =', x[-2]
-    jan1 = now - x[-2]*24*3600
-    x = localtime(jan1)
-    tm = x[:-1] + (0,)
-    print 'jan1 =', jan1, '=', asctime(tm), x[-1]
-    for d in range(85, 95) + range(265, 275):
-        t = jan1 + d*24*3600
-        x = localtime(t)
-        tm = x[:-1] + (0,)
-        print 'd =', d, 't =', t, '=', asctime(tm), x[-1]
diff --git a/Lib/lib-old/util.py b/Lib/lib-old/util.py
deleted file mode 100644
index 104af1e..0000000
--- a/Lib/lib-old/util.py
+++ /dev/null
@@ -1,25 +0,0 @@
-# Module 'util' -- some useful functions that don't fit elsewhere
-
-# NB: These are now built-in functions, but this module is provided
-# for compatibility.  Don't use in new programs unless you need backward
-# compatibility (i.e. need to run with old interpreters).
-
-
-# Remove an item from a list.
-# No complaints if it isn't in the list at all.
-# If it occurs more than once, remove the first occurrence.
-#
-def remove(item, list):
-    if item in list: list.remove(item)
-
-
-# Return a string containing a file's contents.
-#
-def readfile(fn):
-    return readopenfile(open(fn, 'r'))
-
-
-# Read an open file until EOF.
-#
-def readopenfile(fp):
-    return fp.read()
diff --git a/Lib/lib-old/whatsound.py b/Lib/lib-old/whatsound.py
deleted file mode 100644
index 1b1df23..0000000
--- a/Lib/lib-old/whatsound.py
+++ /dev/null
@@ -1 +0,0 @@
-from sndhdr import *
diff --git a/Lib/lib-old/whrandom.py b/Lib/lib-old/whrandom.py
deleted file mode 100644
index bc0d1a4..0000000
--- a/Lib/lib-old/whrandom.py
+++ /dev/null
@@ -1,144 +0,0 @@
-"""Wichman-Hill random number generator.
-
-Wichmann, B. A. & Hill, I. D. (1982)
-Algorithm AS 183:
-An efficient and portable pseudo-random number generator
-Applied Statistics 31 (1982) 188-190
-
-see also:
-        Correction to Algorithm AS 183
-        Applied Statistics 33 (1984) 123
-
-        McLeod, A. I. (1985)
-        A remark on Algorithm AS 183
-        Applied Statistics 34 (1985),198-200
-
-
-USE:
-whrandom.random()       yields double precision random numbers
-                        uniformly distributed between 0 and 1.
-
-whrandom.seed(x, y, z)  must be called before whrandom.random()
-                        to seed the generator
-
-There is also an interface to create multiple independent
-random generators, and to choose from other ranges.
-
-
-
-Multi-threading note: the random number generator used here is not
-thread-safe; it is possible that nearly simultaneous calls in
-different theads return the same random value.  To avoid this, you
-have to use a lock around all calls.  (I didn't want to slow this
-down in the serial case by using a lock here.)
-"""
-
-import warnings
-warnings.warn("the whrandom module is deprecated; please use the random module",
-              DeprecationWarning)
-
-# Translated by Guido van Rossum from C source provided by
-# Adrian Baddeley.
-
-
-class whrandom:
-    def __init__(self, x = 0, y = 0, z = 0):
-        """Initialize an instance.
-        Without arguments, initialize from current time.
-        With arguments (x, y, z), initialize from them."""
-        self.seed(x, y, z)
-
-    def seed(self, x = 0, y = 0, z = 0):
-        """Set the seed from (x, y, z).
-        These must be integers in the range [0, 256)."""
-        if not type(x) == type(y) == type(z) == type(0):
-            raise TypeError, 'seeds must be integers'
-        if not (0 <= x < 256 and 0 <= y < 256 and 0 <= z < 256):
-            raise ValueError, 'seeds must be in range(0, 256)'
-        if 0 == x == y == z:
-            # Initialize from current time
-            import time
-            t = long(time.time() * 256)
-            t = int((t&0xffffff) ^ (t>>24))
-            t, x = divmod(t, 256)
-            t, y = divmod(t, 256)
-            t, z = divmod(t, 256)
-        # Zero is a poor seed, so substitute 1
-        self._seed = (x or 1, y or 1, z or 1)
-
-    def random(self):
-        """Get the next random number in the range [0.0, 1.0)."""
-        # This part is thread-unsafe:
-        # BEGIN CRITICAL SECTION
-        x, y, z = self._seed
-        #
-        x = (171 * x) % 30269
-        y = (172 * y) % 30307
-        z = (170 * z) % 30323
-        #
-        self._seed = x, y, z
-        # END CRITICAL SECTION
-        #
-        return (x/30269.0 + y/30307.0 + z/30323.0) % 1.0
-
-    def uniform(self, a, b):
-        """Get a random number in the range [a, b)."""
-        return a + (b-a) * self.random()
-
-    def randint(self, a, b):
-        """Get a random integer in the range [a, b] including
-        both end points.
-
-        (Deprecated; use randrange below.)"""
-        return self.randrange(a, b+1)
-
-    def choice(self, seq):
-        """Choose a random element from a non-empty sequence."""
-        return seq[int(self.random() * len(seq))]
-
-    def randrange(self, start, stop=None, step=1, int=int, default=None):
-        """Choose a random item from range(start, stop[, step]).
-
-        This fixes the problem with randint() which includes the
-        endpoint; in Python this is usually not what you want.
-        Do not supply the 'int' and 'default' arguments."""
-        # This code is a bit messy to make it fast for the
-        # common case while still doing adequate error checking
-        istart = int(start)
-        if istart != start:
-            raise ValueError, "non-integer arg 1 for randrange()"
-        if stop is default:
-            if istart > 0:
-                return int(self.random() * istart)
-            raise ValueError, "empty range for randrange()"
-        istop = int(stop)
-        if istop != stop:
-            raise ValueError, "non-integer stop for randrange()"
-        if step == 1:
-            if istart < istop:
-                return istart + int(self.random() *
-                                   (istop - istart))
-            raise ValueError, "empty range for randrange()"
-        istep = int(step)
-        if istep != step:
-            raise ValueError, "non-integer step for randrange()"
-        if istep > 0:
-            n = (istop - istart + istep - 1) / istep
-        elif istep < 0:
-            n = (istop - istart + istep + 1) / istep
-        else:
-            raise ValueError, "zero step for randrange()"
-
-        if n <= 0:
-            raise ValueError, "empty range for randrange()"
-        return istart + istep*int(self.random() * n)
-
-
-# Initialize from the current time
-_inst = whrandom()
-seed = _inst.seed
-random = _inst.random
-uniform = _inst.uniform
-randint = _inst.randint
-choice = _inst.choice
-randrange = _inst.randrange
diff --git a/Lib/lib-old/zmod.py b/Lib/lib-old/zmod.py
deleted file mode 100644
index 55f49df..0000000
--- a/Lib/lib-old/zmod.py
+++ /dev/null
@@ -1,94 +0,0 @@
-# module 'zmod'
-
-# Compute properties of mathematical "fields" formed by taking
-# Z/n (the whole numbers modulo some whole number n) and an
-# irreducible polynomial (i.e., a polynomial with only complex zeros),
-# e.g., Z/5 and X**2 + 2.
-#
-# The field is formed by taking all possible linear combinations of
-# a set of d base vectors (where d is the degree of the polynomial).
-#
-# Note that this procedure doesn't yield a field for all combinations
-# of n and p: it may well be that some numbers have more than one
-# inverse and others have none.  This is what we check.
-#
-# Remember that a field is a ring where each element has an inverse.
-# A ring has commutative addition and multiplication, a zero and a one:
-# 0*x = x*0 = 0, 0+x = x+0 = x, 1*x = x*1 = x.  Also, the distributive
-# property holds: a*(b+c) = a*b + b*c.
-# (XXX I forget if this is an axiom or follows from the rules.)
-
-import poly
-
-
-# Example N and polynomial
-
-N = 5
-P = poly.plus(poly.one(0, 2), poly.one(2, 1)) # 2 + x**2
-
-
-# Return x modulo y.  Returns >= 0 even if x < 0.
-
-def mod(x, y):
-    return divmod(x, y)[1]
-
-
-# Normalize a polynomial modulo n and modulo p.
-
-def norm(a, n, p):
-    a = poly.modulo(a, p)
-    a = a[:]
-    for i in range(len(a)): a[i] = mod(a[i], n)
-    a = poly.normalize(a)
-    return a
-
-
-# Make a list of all n^d elements of the proposed field.
-
-def make_all(mat):
-    all = []
-    for row in mat:
-        for a in row:
-            all.append(a)
-    return all
-
-def make_elements(n, d):
-    if d == 0: return [poly.one(0, 0)]
-    sub = make_elements(n, d-1)
-    all = []
-    for a in sub:
-        for i in range(n):
-            all.append(poly.plus(a, poly.one(d-1, i)))
-    return all
-
-def make_inv(all, n, p):
-    x = poly.one(1, 1)
-    inv = []
-    for a in all:
-        inv.append(norm(poly.times(a, x), n, p))
-    return inv
-
-def checkfield(n, p):
-    all = make_elements(n, len(p)-1)
-    inv = make_inv(all, n, p)
-    all1 = all[:]
-    inv1 = inv[:]
-    all1.sort()
-    inv1.sort()
-    if all1 == inv1: print 'BINGO!'
-    else:
-        print 'Sorry:', n, p
-        print all
-        print inv
-
-def rj(s, width):
-    if type(s) is not type(''): s = `s`
-    n = len(s)
-    if n >= width: return s
-    return ' '*(width - n) + s
-
-def lj(s, width):
-    if type(s) is not type(''): s = `s`
-    n = len(s)
-    if n >= width: return s
-    return s + ' '*(width - n)
diff --git a/Lib/lib-tk/Tix.py b/Lib/lib-tk/Tix.py
index 2fb1307..14c3c24 100755
--- a/Lib/lib-tk/Tix.py
+++ b/Lib/lib-tk/Tix.py
@@ -1541,8 +1541,8 @@
         '''This command is used to indicate whether the entry given by
      entryPath has children entries and whether the children are visible. mode
      must be one of open, close or none. If mode is set to open, a (+)
-     indicator is drawn next to the entry. If mode is set to close, a (-)
-     indicator is drawn next to the entry. If mode is set to none, no
+     indicator is drawn next the the entry. If mode is set to close, a (-)
+     indicator is drawn next the the entry. If mode is set to none, no
      indicators will be drawn for this entry. The default mode is none. The
      open mode indicates the entry has hidden children and this entry can be
      opened by the user. The close mode indicates that all the children of the
@@ -1773,6 +1773,7 @@
     # FIXME: It should inherit -superclass tixScrolledWidget
     pass
 
+
 class Grid(TixWidget):
     '''The Tix Grid command creates a new window  and makes it into a
     tixGrid widget. Additional options, may be specified on the command
@@ -1787,26 +1788,101 @@
     border.
 
     Subwidgets - None'''
-    pass
+    # valid specific resources as of Tk 8.4
+    # editdonecmd, editnotifycmd, floatingcols, floatingrows, formatcmd,
+    # highlightbackground, highlightcolor, leftmargin, itemtype, selectmode,
+    # selectunit, topmargin,
+    def __init__(self, master=None, cnf={}, **kw):
+        static= []
+        self.cnf= cnf
+        TixWidget.__init__(self, master, 'tixGrid', static, cnf, kw)
 
+    # valid options as of Tk 8.4
+    # anchor, bdtype, cget, configure, delete, dragsite, dropsite, entrycget, edit
+    # entryconfigure, format, geometryinfo, info, index, move, nearest, selection
+    # set, size, unset, xview, yview
     # def anchor option ?args ...?
+    def anchor_get(self):
+        "Get the (x,y) coordinate of the current anchor cell"
+        return self._getints(self.tk.call(self, 'anchor', 'get'))
+
     # def bdtype
     # def delete dim from ?to?
+    def delete_row(self, from_, to=None):
+        """Delete rows between from_ and to inclusive.
+        If to is not provided,  delete only row at from_"""
+        if to is None:
+            self.tk.call(self, 'delete', 'row', from_)
+        else:
+            self.tk.call(self, 'delete', 'row', from_, to)
+    def delete_column(self, from_, to=None):
+        """Delete columns between from_ and to inclusive.
+        If to is not provided,  delete only column at from_"""
+        if to is None:
+            self.tk.call(self, 'delete', 'column', from_)
+        else:
+            self.tk.call(self, 'delete', 'column', from_, to)
     # def edit apply
     # def edit set x y
-    # def entrycget x y option
-    # def entryconfigure x y ?option? ?value option value ...?
+
+    def entrycget(self, x, y, option):
+        "Get the option value for cell at (x,y)"
+        return self.tk.call(self, 'entrycget', x, y, option)
+
+    def entryconfigure(self, x, y, **kw):
+        return self.tk.call(self, 'entryconfigure', x, y, *self._options(None, kw))
     # def format
     # def index
+
+    def info_exists(self, x, y):
+        "Return True if display item exists at (x,y)"
+        return bool(int(self.tk.call(self, 'info', 'exists', x, y)))
+
+    def info_bbox(self, x, y):
+        # This seems to always return '', at least for 'text' displayitems
+        return self.tk.call(self, 'info', 'bbox', x, y)
+
+    def nearest(self, x, y):
+        "Return coordinate of cell nearest pixel coordinate (x,y)"
+        return self._getints(self.tk.call(self, 'nearest', x, y))
+
+    # def selection adjust
+    # def selection clear
+    # def selection includes
+    # def selection set
+    # def selection toggle
     # def move dim from to offset
-    # def set x y ?-itemtype type? ?option value...?
+
+    def set(self, x, y, itemtype=None, **kw):
+        args= self._options(self.cnf, kw)
+        if itemtype is not None:
+            args= ('-itemtype', itemtype) + args
+        self.tk.call(self, 'set', x, y, *args)
+
     # def size dim index ?option value ...?
     # def unset x y
-    # def xview
-    # def yview
 
-class ScrolledGrid(TixWidget):
+    def xview(self):
+        return self._getdoubles(self.tk.call(self, 'xview'))
+    def xview_moveto(self, fraction):
+        self.tk.call(self,'xview', 'moveto', fraction)
+    def xview_scroll(self, count, what="units"):
+        "Scroll right (count>0) or left <count> of units|pages"
+        self.tk.call(self, 'xview', 'scroll', count, what)
+
+    def yview(self):
+        return self._getdoubles(self.tk.call(self, 'yview'))
+    def yview_moveto(self, fraction):
+        self.tk.call(self,'ysview', 'moveto', fraction)
+    def yview_scroll(self, count, what="units"):
+        "Scroll down (count>0) or up <count> of units|pages"
+        self.tk.call(self, 'yview', 'scroll', count, what)
+
+class ScrolledGrid(Grid):
     '''Scrolled Grid widgets'''
 
     # FIXME: It should inherit -superclass tixScrolledWidget
-    pass
+    def __init__(self, master=None, cnf={}, **kw):
+        static= []
+        self.cnf= cnf
+        TixWidget.__init__(self, master, 'tixScrolledGrid', static, cnf, kw)
diff --git a/Lib/lib-tk/Tkinter.py b/Lib/lib-tk/Tkinter.py
index d600cd7..0ba954e 100644
--- a/Lib/lib-tk/Tkinter.py
+++ b/Lib/lib-tk/Tkinter.py
@@ -449,18 +449,15 @@
             # I'd rather use time.sleep(ms*0.001)
             self.tk.call('after', ms)
         else:
-            # XXX Disgusting hack to clean up after calling func
-            tmp = []
-            def callit(func=func, args=args, self=self, tmp=tmp):
+            def callit():
                 try:
                     func(*args)
                 finally:
                     try:
-                        self.deletecommand(tmp[0])
+                        self.deletecommand(name)
                     except TclError:
                         pass
             name = self._register(callit)
-            tmp.append(name)
             return self.tk.call('after', ms, name)
     def after_idle(self, func, *args):
         """Call FUNC once if the Tcl main loop has no event to
@@ -486,7 +483,24 @@
     def bell(self, displayof=0):
         """Ring a display's bell."""
         self.tk.call(('bell',) + self._displayof(displayof))
+
     # Clipboard handling:
+    def clipboard_get(self, **kw):
+        """Retrieve data from the clipboard on window's display.
+
+        The window keyword defaults to the root window of the Tkinter
+        application.
+
+        The type keyword specifies the form in which the data is
+        to be returned and should be an atom name such as STRING
+        or FILE_NAME.  Type defaults to STRING.
+
+        This command is equivalent to:
+
+        selection_get(CLIPBOARD)
+        """
+        return self.tk.call(('clipboard', 'get') + self._options(kw))
+
     def clipboard_clear(self, **kw):
         """Clear the data in the Tk clipboard.
 
diff --git a/Lib/lib-tk/tkFont.py b/Lib/lib-tk/tkFont.py
index 5b5a6ba..15dea2e 100644
--- a/Lib/lib-tk/tkFont.py
+++ b/Lib/lib-tk/tkFont.py
@@ -108,7 +108,9 @@
         try:
             if self.delete_font:
                 self._call("font", "delete", self.name)
-        except (AttributeError, Tkinter.TclError):
+        except (KeyboardInterrupt, SystemExit):
+            raise
+        except Exception:
             pass
 
     def copy(self):
diff --git a/Lib/linecache.py b/Lib/linecache.py
index 2ccc6c6..f49695a 100644
--- a/Lib/linecache.py
+++ b/Lib/linecache.py
@@ -10,8 +10,8 @@
 
 __all__ = ["getline", "clearcache", "checkcache"]
 
-def getline(filename, lineno):
-    lines = getlines(filename)
+def getline(filename, lineno, module_globals=None):
+    lines = getlines(filename, module_globals)
     if 1 <= lineno <= len(lines):
         return lines[lineno-1]
     else:
@@ -30,14 +30,14 @@
     cache = {}
 
 
-def getlines(filename):
+def getlines(filename, module_globals=None):
     """Get the lines for a file from the cache.
     Update the cache if it doesn't contain an entry for this file already."""
 
     if filename in cache:
         return cache[filename][2]
     else:
-        return updatecache(filename)
+        return updatecache(filename, module_globals)
 
 
 def checkcache(filename=None):
@@ -54,6 +54,8 @@
 
     for filename in filenames:
         size, mtime, lines, fullname = cache[filename]
+        if mtime is None:
+            continue   # no-op for files loaded via a __loader__
         try:
             stat = os.stat(fullname)
         except os.error:
@@ -63,7 +65,7 @@
             del cache[filename]
 
 
-def updatecache(filename):
+def updatecache(filename, module_globals=None):
     """Update a cache entry and return its list of lines.
     If something's wrong, print a message, discard the cache entry,
     and return an empty list."""
@@ -72,12 +74,34 @@
         del cache[filename]
     if not filename or filename[0] + filename[-1] == '<>':
         return []
+
     fullname = filename
     try:
         stat = os.stat(fullname)
     except os.error, msg:
-        # Try looking through the module search path.
         basename = os.path.split(filename)[1]
+
+        # Try for a __loader__, if available
+        if module_globals and '__loader__' in module_globals:
+            name = module_globals.get('__name__')
+            loader = module_globals['__loader__']
+            get_source = getattr(loader, 'get_source', None)
+
+            if name and get_source:
+                if basename.startswith(name.split('.')[-1]+'.'):
+                    try:
+                        data = get_source(name)
+                    except (ImportError, IOError):
+                        pass
+                    else:
+                        cache[filename] = (
+                            len(data), None,
+                            [line+'\n' for line in data.splitlines()], fullname
+                        )
+                        return cache[filename][2]
+
+        # Try looking through the module search path.
+
         for dirname in sys.path:
             # When using imputil, sys.path may contain things other than
             # strings; ignore them when it happens.
diff --git a/Lib/logging/__init__.py b/Lib/logging/__init__.py
index 7db0dab..9798931 100644
--- a/Lib/logging/__init__.py
+++ b/Lib/logging/__init__.py
@@ -719,7 +719,7 @@
         If strm is not specified, sys.stderr is used.
         """
         Handler.__init__(self)
-        if not strm:
+        if strm is None:
             strm = sys.stderr
         self.stream = strm
         self.formatter = None
diff --git a/Lib/mimetools.py b/Lib/mimetools.py
index 0b698ac..8c1cc19 100644
--- a/Lib/mimetools.py
+++ b/Lib/mimetools.py
@@ -127,7 +127,10 @@
     import time
     if _prefix is None:
         import socket
-        hostid = socket.gethostbyname(socket.gethostname())
+        try:
+            hostid = socket.gethostbyname(socket.gethostname())
+        except socket.gaierror:
+            hostid = '127.0.0.1'
         try:
             uid = repr(os.getuid())
         except AttributeError:
diff --git a/Lib/mimetypes.py b/Lib/mimetypes.py
index 7a8b765..bee2ff7 100644
--- a/Lib/mimetypes.py
+++ b/Lib/mimetypes.py
@@ -315,162 +315,171 @@
     return db.types_map[True]
 
 
-suffix_map = {
-    '.tgz': '.tar.gz',
-    '.taz': '.tar.gz',
-    '.tz': '.tar.gz',
-    }
+def _default_mime_types():
+    global suffix_map
+    global encodings_map
+    global types_map
+    global common_types
 
-encodings_map = {
-    '.gz': 'gzip',
-    '.Z': 'compress',
-    }
+    suffix_map = {
+        '.tgz': '.tar.gz',
+        '.taz': '.tar.gz',
+        '.tz': '.tar.gz',
+        }
 
-# Before adding new types, make sure they are either registered with IANA, at
-# http://www.isi.edu/in-notes/iana/assignments/media-types
-# or extensions, i.e. using the x- prefix
+    encodings_map = {
+        '.gz': 'gzip',
+        '.Z': 'compress',
+        }
 
-# If you add to these, please keep them sorted!
-types_map = {
-    '.a'      : 'application/octet-stream',
-    '.ai'     : 'application/postscript',
-    '.aif'    : 'audio/x-aiff',
-    '.aifc'   : 'audio/x-aiff',
-    '.aiff'   : 'audio/x-aiff',
-    '.au'     : 'audio/basic',
-    '.avi'    : 'video/x-msvideo',
-    '.bat'    : 'text/plain',
-    '.bcpio'  : 'application/x-bcpio',
-    '.bin'    : 'application/octet-stream',
-    '.bmp'    : 'image/x-ms-bmp',
-    '.c'      : 'text/plain',
-    # Duplicates :(
-    '.cdf'    : 'application/x-cdf',
-    '.cdf'    : 'application/x-netcdf',
-    '.cpio'   : 'application/x-cpio',
-    '.csh'    : 'application/x-csh',
-    '.css'    : 'text/css',
-    '.dll'    : 'application/octet-stream',
-    '.doc'    : 'application/msword',
-    '.dot'    : 'application/msword',
-    '.dvi'    : 'application/x-dvi',
-    '.eml'    : 'message/rfc822',
-    '.eps'    : 'application/postscript',
-    '.etx'    : 'text/x-setext',
-    '.exe'    : 'application/octet-stream',
-    '.gif'    : 'image/gif',
-    '.gtar'   : 'application/x-gtar',
-    '.h'      : 'text/plain',
-    '.hdf'    : 'application/x-hdf',
-    '.htm'    : 'text/html',
-    '.html'   : 'text/html',
-    '.ief'    : 'image/ief',
-    '.jpe'    : 'image/jpeg',
-    '.jpeg'   : 'image/jpeg',
-    '.jpg'    : 'image/jpeg',
-    '.js'     : 'application/x-javascript',
-    '.ksh'    : 'text/plain',
-    '.latex'  : 'application/x-latex',
-    '.m1v'    : 'video/mpeg',
-    '.man'    : 'application/x-troff-man',
-    '.me'     : 'application/x-troff-me',
-    '.mht'    : 'message/rfc822',
-    '.mhtml'  : 'message/rfc822',
-    '.mif'    : 'application/x-mif',
-    '.mov'    : 'video/quicktime',
-    '.movie'  : 'video/x-sgi-movie',
-    '.mp2'    : 'audio/mpeg',
-    '.mp3'    : 'audio/mpeg',
-    '.mpa'    : 'video/mpeg',
-    '.mpe'    : 'video/mpeg',
-    '.mpeg'   : 'video/mpeg',
-    '.mpg'    : 'video/mpeg',
-    '.ms'     : 'application/x-troff-ms',
-    '.nc'     : 'application/x-netcdf',
-    '.nws'    : 'message/rfc822',
-    '.o'      : 'application/octet-stream',
-    '.obj'    : 'application/octet-stream',
-    '.oda'    : 'application/oda',
-    '.p12'    : 'application/x-pkcs12',
-    '.p7c'    : 'application/pkcs7-mime',
-    '.pbm'    : 'image/x-portable-bitmap',
-    '.pdf'    : 'application/pdf',
-    '.pfx'    : 'application/x-pkcs12',
-    '.pgm'    : 'image/x-portable-graymap',
-    '.pl'     : 'text/plain',
-    '.png'    : 'image/png',
-    '.pnm'    : 'image/x-portable-anymap',
-    '.pot'    : 'application/vnd.ms-powerpoint',
-    '.ppa'    : 'application/vnd.ms-powerpoint',
-    '.ppm'    : 'image/x-portable-pixmap',
-    '.pps'    : 'application/vnd.ms-powerpoint',
-    '.ppt'    : 'application/vnd.ms-powerpoint',
-    '.ps'     : 'application/postscript',
-    '.pwz'    : 'application/vnd.ms-powerpoint',
-    '.py'     : 'text/x-python',
-    '.pyc'    : 'application/x-python-code',
-    '.pyo'    : 'application/x-python-code',
-    '.qt'     : 'video/quicktime',
-    '.ra'     : 'audio/x-pn-realaudio',
-    '.ram'    : 'application/x-pn-realaudio',
-    '.ras'    : 'image/x-cmu-raster',
-    '.rdf'    : 'application/xml',
-    '.rgb'    : 'image/x-rgb',
-    '.roff'   : 'application/x-troff',
-    '.rtx'    : 'text/richtext',
-    '.sgm'    : 'text/x-sgml',
-    '.sgml'   : 'text/x-sgml',
-    '.sh'     : 'application/x-sh',
-    '.shar'   : 'application/x-shar',
-    '.snd'    : 'audio/basic',
-    '.so'     : 'application/octet-stream',
-    '.src'    : 'application/x-wais-source',
-    '.sv4cpio': 'application/x-sv4cpio',
-    '.sv4crc' : 'application/x-sv4crc',
-    '.swf'    : 'application/x-shockwave-flash',
-    '.t'      : 'application/x-troff',
-    '.tar'    : 'application/x-tar',
-    '.tcl'    : 'application/x-tcl',
-    '.tex'    : 'application/x-tex',
-    '.texi'   : 'application/x-texinfo',
-    '.texinfo': 'application/x-texinfo',
-    '.tif'    : 'image/tiff',
-    '.tiff'   : 'image/tiff',
-    '.tr'     : 'application/x-troff',
-    '.tsv'    : 'text/tab-separated-values',
-    '.txt'    : 'text/plain',
-    '.ustar'  : 'application/x-ustar',
-    '.vcf'    : 'text/x-vcard',
-    '.wav'    : 'audio/x-wav',
-    '.wiz'    : 'application/msword',
-    '.wsdl'   : 'application/xml',
-    '.xbm'    : 'image/x-xbitmap',
-    '.xlb'    : 'application/vnd.ms-excel',
-    # Duplicates :(
-    '.xls'    : 'application/excel',
-    '.xls'    : 'application/vnd.ms-excel',
-    '.xml'    : 'text/xml',
-    '.xpdl'   : 'application/xml',
-    '.xpm'    : 'image/x-xpixmap',
-    '.xsl'    : 'application/xml',
-    '.xwd'    : 'image/x-xwindowdump',
-    '.zip'    : 'application/zip',
-    }
+    # Before adding new types, make sure they are either registered with IANA,
+    # at http://www.isi.edu/in-notes/iana/assignments/media-types
+    # or extensions, i.e. using the x- prefix
 
-# These are non-standard types, commonly found in the wild.  They will only
-# match if strict=0 flag is given to the API methods.
+    # If you add to these, please keep them sorted!
+    types_map = {
+        '.a'      : 'application/octet-stream',
+        '.ai'     : 'application/postscript',
+        '.aif'    : 'audio/x-aiff',
+        '.aifc'   : 'audio/x-aiff',
+        '.aiff'   : 'audio/x-aiff',
+        '.au'     : 'audio/basic',
+        '.avi'    : 'video/x-msvideo',
+        '.bat'    : 'text/plain',
+        '.bcpio'  : 'application/x-bcpio',
+        '.bin'    : 'application/octet-stream',
+        '.bmp'    : 'image/x-ms-bmp',
+        '.c'      : 'text/plain',
+        # Duplicates :(
+        '.cdf'    : 'application/x-cdf',
+        '.cdf'    : 'application/x-netcdf',
+        '.cpio'   : 'application/x-cpio',
+        '.csh'    : 'application/x-csh',
+        '.css'    : 'text/css',
+        '.dll'    : 'application/octet-stream',
+        '.doc'    : 'application/msword',
+        '.dot'    : 'application/msword',
+        '.dvi'    : 'application/x-dvi',
+        '.eml'    : 'message/rfc822',
+        '.eps'    : 'application/postscript',
+        '.etx'    : 'text/x-setext',
+        '.exe'    : 'application/octet-stream',
+        '.gif'    : 'image/gif',
+        '.gtar'   : 'application/x-gtar',
+        '.h'      : 'text/plain',
+        '.hdf'    : 'application/x-hdf',
+        '.htm'    : 'text/html',
+        '.html'   : 'text/html',
+        '.ief'    : 'image/ief',
+        '.jpe'    : 'image/jpeg',
+        '.jpeg'   : 'image/jpeg',
+        '.jpg'    : 'image/jpeg',
+        '.js'     : 'application/x-javascript',
+        '.ksh'    : 'text/plain',
+        '.latex'  : 'application/x-latex',
+        '.m1v'    : 'video/mpeg',
+        '.man'    : 'application/x-troff-man',
+        '.me'     : 'application/x-troff-me',
+        '.mht'    : 'message/rfc822',
+        '.mhtml'  : 'message/rfc822',
+        '.mif'    : 'application/x-mif',
+        '.mov'    : 'video/quicktime',
+        '.movie'  : 'video/x-sgi-movie',
+        '.mp2'    : 'audio/mpeg',
+        '.mp3'    : 'audio/mpeg',
+        '.mpa'    : 'video/mpeg',
+        '.mpe'    : 'video/mpeg',
+        '.mpeg'   : 'video/mpeg',
+        '.mpg'    : 'video/mpeg',
+        '.ms'     : 'application/x-troff-ms',
+        '.nc'     : 'application/x-netcdf',
+        '.nws'    : 'message/rfc822',
+        '.o'      : 'application/octet-stream',
+        '.obj'    : 'application/octet-stream',
+        '.oda'    : 'application/oda',
+        '.p12'    : 'application/x-pkcs12',
+        '.p7c'    : 'application/pkcs7-mime',
+        '.pbm'    : 'image/x-portable-bitmap',
+        '.pdf'    : 'application/pdf',
+        '.pfx'    : 'application/x-pkcs12',
+        '.pgm'    : 'image/x-portable-graymap',
+        '.pl'     : 'text/plain',
+        '.png'    : 'image/png',
+        '.pnm'    : 'image/x-portable-anymap',
+        '.pot'    : 'application/vnd.ms-powerpoint',
+        '.ppa'    : 'application/vnd.ms-powerpoint',
+        '.ppm'    : 'image/x-portable-pixmap',
+        '.pps'    : 'application/vnd.ms-powerpoint',
+        '.ppt'    : 'application/vnd.ms-powerpoint',
+        '.ps'     : 'application/postscript',
+        '.pwz'    : 'application/vnd.ms-powerpoint',
+        '.py'     : 'text/x-python',
+        '.pyc'    : 'application/x-python-code',
+        '.pyo'    : 'application/x-python-code',
+        '.qt'     : 'video/quicktime',
+        '.ra'     : 'audio/x-pn-realaudio',
+        '.ram'    : 'application/x-pn-realaudio',
+        '.ras'    : 'image/x-cmu-raster',
+        '.rdf'    : 'application/xml',
+        '.rgb'    : 'image/x-rgb',
+        '.roff'   : 'application/x-troff',
+        '.rtx'    : 'text/richtext',
+        '.sgm'    : 'text/x-sgml',
+        '.sgml'   : 'text/x-sgml',
+        '.sh'     : 'application/x-sh',
+        '.shar'   : 'application/x-shar',
+        '.snd'    : 'audio/basic',
+        '.so'     : 'application/octet-stream',
+        '.src'    : 'application/x-wais-source',
+        '.sv4cpio': 'application/x-sv4cpio',
+        '.sv4crc' : 'application/x-sv4crc',
+        '.swf'    : 'application/x-shockwave-flash',
+        '.t'      : 'application/x-troff',
+        '.tar'    : 'application/x-tar',
+        '.tcl'    : 'application/x-tcl',
+        '.tex'    : 'application/x-tex',
+        '.texi'   : 'application/x-texinfo',
+        '.texinfo': 'application/x-texinfo',
+        '.tif'    : 'image/tiff',
+        '.tiff'   : 'image/tiff',
+        '.tr'     : 'application/x-troff',
+        '.tsv'    : 'text/tab-separated-values',
+        '.txt'    : 'text/plain',
+        '.ustar'  : 'application/x-ustar',
+        '.vcf'    : 'text/x-vcard',
+        '.wav'    : 'audio/x-wav',
+        '.wiz'    : 'application/msword',
+        '.wsdl'   : 'application/xml',
+        '.xbm'    : 'image/x-xbitmap',
+        '.xlb'    : 'application/vnd.ms-excel',
+        # Duplicates :(
+        '.xls'    : 'application/excel',
+        '.xls'    : 'application/vnd.ms-excel',
+        '.xml'    : 'text/xml',
+        '.xpdl'   : 'application/xml',
+        '.xpm'    : 'image/x-xpixmap',
+        '.xsl'    : 'application/xml',
+        '.xwd'    : 'image/x-xwindowdump',
+        '.zip'    : 'application/zip',
+        }
 
-# Please sort these too
-common_types = {
-    '.jpg' : 'image/jpg',
-    '.mid' : 'audio/midi',
-    '.midi': 'audio/midi',
-    '.pct' : 'image/pict',
-    '.pic' : 'image/pict',
-    '.pict': 'image/pict',
-    '.rtf' : 'application/rtf',
-    '.xul' : 'text/xul'
-    }
+    # These are non-standard types, commonly found in the wild.  They will
+    # only match if strict=0 flag is given to the API methods.
+
+    # Please sort these too
+    common_types = {
+        '.jpg' : 'image/jpg',
+        '.mid' : 'audio/midi',
+        '.midi': 'audio/midi',
+        '.pct' : 'image/pict',
+        '.pic' : 'image/pict',
+        '.pict': 'image/pict',
+        '.rtf' : 'application/rtf',
+        '.xul' : 'text/xul'
+        }
+
+
+_default_mime_types()
 
 
 if __name__ == '__main__':
diff --git a/Lib/pdb.py b/Lib/pdb.py
index 1aa2eae..5b7ea99 100755
--- a/Lib/pdb.py
+++ b/Lib/pdb.py
@@ -91,6 +91,12 @@
                 self.rcLines.append(line)
             rcFile.close()
 
+        self.commands = {} # associates a command list to breakpoint numbers
+        self.commands_doprompt = {} # for each bp num, tells if the prompt must be disp. after execing the cmd list
+        self.commands_silent = {} # for each bp num, tells if the stack trace must be disp. after execing the cmd list
+        self.commands_defining = False # True while in the process of defining a command list
+        self.commands_bnum = None # The breakpoint number for which we are defining a list
+
     def reset(self):
         bdb.Bdb.reset(self)
         self.forget()
@@ -137,7 +143,28 @@
                 or frame.f_lineno<= 0):
                 return
             self._wait_for_mainpyfile = 0
-        self.interaction(frame, None)
+        if self.bp_commands(frame):
+            self.interaction(frame, None)
+
+    def bp_commands(self,frame):
+        """ Call every command that was set for the current active breakpoint (if there is one)
+        Returns True if the normal interaction function must be called, False otherwise """
+        #self.currentbp is set in bdb.py in bdb.break_here if a breakpoint was hit
+        if getattr(self,"currentbp",False) and self.currentbp in self.commands:
+            currentbp = self.currentbp
+            self.currentbp = 0
+            lastcmd_back = self.lastcmd
+            self.setup(frame, None)
+            for line in self.commands[currentbp]:
+                self.onecmd(line)
+            self.lastcmd = lastcmd_back
+            if not self.commands_silent[currentbp]:
+                self.print_stack_entry(self.stack[self.curindex])
+            if self.commands_doprompt[currentbp]:
+                self.cmdloop()
+            self.forget()
+            return
+        return 1
 
     def user_return(self, frame, return_value):
         """This function is called when a return trap is set here."""
@@ -202,12 +229,70 @@
                 line = line[:marker].rstrip()
         return line
 
+    def onecmd(self, line):
+        """Interpret the argument as though it had been typed in response
+        to the prompt.
+
+        Checks wether  this line is typed in the normal prompt or in a breakpoint command list definition
+        """
+        if not self.commands_defining:
+            return cmd.Cmd.onecmd(self, line)
+        else:
+            return self.handle_command_def(line)
+
+    def handle_command_def(self,line):
+        """ Handles one command line during command list definition. """
+        cmd, arg, line = self.parseline(line)
+        if cmd == 'silent':
+            self.commands_silent[self.commands_bnum] = True
+            return # continue to handle other cmd def in the cmd list
+        elif cmd == 'end':
+            self.cmdqueue = []
+            return 1 # end of cmd list
+        cmdlist = self.commands[self.commands_bnum]
+        if (arg):
+            cmdlist.append(cmd+' '+arg)
+        else:
+            cmdlist.append(cmd)
+        # Determine if we must stop
+        try:
+            func = getattr(self, 'do_' + cmd)
+        except AttributeError:
+            func = self.default
+        if func.func_name in self.commands_resuming : # one of the resuming commands.
+            self.commands_doprompt[self.commands_bnum] = False
+            self.cmdqueue = []
+            return 1
+        return
+
     # Command definitions, called by cmdloop()
     # The argument is the remaining string on the command line
     # Return true to exit from the command loop
 
     do_h = cmd.Cmd.do_help
 
+    def do_commands(self, arg):
+        """Defines a list of commands associated to a breakpoint
+        Those commands will be executed whenever the breakpoint causes the program to stop execution."""
+        if not arg:
+            bnum = len(bdb.Breakpoint.bpbynumber)-1
+        else:
+            try:
+                bnum = int(arg)
+            except:
+                print "Usage : commands [bnum]\n        ...\n        end"
+                return
+        self.commands_bnum = bnum
+        self.commands[bnum] = []
+        self.commands_doprompt[bnum] = True
+        self.commands_silent[bnum] = False
+        prompt_back = self.prompt
+        self.prompt = '(com) '
+        self.commands_defining = True
+        self.cmdloop()
+        self.commands_defining = False
+        self.prompt = prompt_back
+
     def do_break(self, arg, temporary = 0):
         # break [ ([filename:]lineno | function) [, "condition"] ]
         if not arg:
@@ -691,6 +776,9 @@
         if args[0] in self.aliases:
             del self.aliases[args[0]]
 
+    #list of all the commands making the program resume execution.
+    commands_resuming = ['do_continue', 'do_step', 'do_next', 'do_return', 'do_quit', 'do_jump']
+
     # Print a traceback starting at the top stack frame.
     # The most recently entered frame is printed last;
     # this is different from dbx and gdb, but consistent with
@@ -944,6 +1032,41 @@
         print """unalias name
 Deletes the specified alias."""
 
+    def help_commands(self):
+        print """commands [bpnumber]
+(com) ...
+(com) end
+(Pdb)
+
+Specify a list of commands for breakpoint number bpnumber.  The
+commands themselves appear on the following lines.  Type a line
+containing just 'end' to terminate the commands.
+
+To remove all commands from a breakpoint, type commands and
+follow it immediately with  end; that is, give no commands.
+
+With no bpnumber argument, commands refers to the last
+breakpoint set.
+
+You can use breakpoint commands to start your program up again.
+Simply use the continue command, or step, or any other
+command that resumes execution.
+
+Specifying any command resuming execution (currently continue,
+step, next, return, jump, quit and their abbreviations) terminates
+the command list (as if that command was immediately followed by end).
+This is because any time you resume execution
+(even with a simple next or step), you may encounter
+another breakpoint--which could have its own command list, leading to
+ambiguities about which list to execute.
+
+   If you use the 'silent' command in the command list, the
+usual message about stopping at a breakpoint is not printed.  This may
+be desirable for breakpoints that are to print a specific message and
+then continue.  If none of the other commands print anything, you
+see no sign that the breakpoint was reached.
+"""
+
     def help_pdb(self):
         help()
 
diff --git a/Lib/pkg_resources.py b/Lib/pkg_resources.py
new file mode 100644
index 0000000..db6cc90
--- /dev/null
+++ b/Lib/pkg_resources.py
@@ -0,0 +1,2377 @@
+"""Package resource API
+--------------------
+
+A resource is a logical file contained within a package, or a logical
+subdirectory thereof.  The package resource API expects resource names
+to have their path parts separated with ``/``, *not* whatever the local
+path separator is.  Do not use os.path operations to manipulate resource
+names being passed into the API.
+
+The package resource API is designed to work with normal filesystem packages,
+.egg files, and unpacked .egg files.  It can also work in a limited way with
+.zip files and with custom PEP 302 loaders that support the ``get_data()``
+method.
+"""
+
+import sys, os, zipimport, time, re, imp, new, pkgutil  # XXX
+from sets import ImmutableSet
+from os import utime, rename, unlink    # capture these to bypass sandboxing
+from os import open as os_open
+
+def get_supported_platform():
+    """Return this platform's maximum compatible version.
+
+    distutils.util.get_platform() normally reports the minimum version
+    of Mac OS X that would be required to *use* extensions produced by
+    distutils.  But what we want when checking compatibility is to know the
+    version of Mac OS X that we are *running*.  To allow usage of packages that
+    explicitly require a newer version of Mac OS X, we must also know the
+    current version of the OS.
+
+    If this condition occurs for any other platform with a version in its
+    platform strings, this function should be extended accordingly.
+    """
+    plat = get_build_platform(); m = macosVersionString.match(plat)
+    if m is not None and sys.platform == "darwin":
+        try:
+            plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3))
+        except ValueError:
+            pass    # not Mac OS X
+    return plat
+
+__all__ = [
+    # Basic resource access and distribution/entry point discovery
+    'require', 'run_script', 'get_provider',  'get_distribution',
+    'load_entry_point', 'get_entry_map', 'get_entry_info', 'iter_entry_points',
+    'resource_string', 'resource_stream', 'resource_filename',
+    'resource_listdir', 'resource_exists', 'resource_isdir',
+
+    # Environmental control
+    'declare_namespace', 'working_set', 'add_activation_listener',
+    'find_distributions', 'set_extraction_path', 'cleanup_resources',
+    'get_default_cache',
+
+    # Primary implementation classes
+    'Environment', 'WorkingSet', 'ResourceManager',
+    'Distribution', 'Requirement', 'EntryPoint',
+
+    # Exceptions
+    'ResolutionError','VersionConflict','DistributionNotFound','UnknownExtra',
+    'ExtractionError',
+
+    # Parsing functions and string utilities
+    'parse_requirements', 'parse_version', 'safe_name', 'safe_version',
+    'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections',
+    'safe_extra', 'to_filename',
+
+    # filesystem utilities
+    'ensure_directory', 'normalize_path',
+
+    # Distribution "precedence" constants
+    'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST',
+
+    # "Provider" interfaces, implementations, and registration/lookup APIs
+    'IMetadataProvider', 'IResourceProvider', 'FileMetadata',
+    'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider',
+    'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider',
+    'register_finder', 'register_namespace_handler', 'register_loader_type',
+    'fixup_namespace_packages', 'get_importer',
+
+    # Deprecated/backward compatibility only
+    'run_main', 'AvailableDistributions',
+]
+class ResolutionError(Exception):
+    """Abstract base for dependency resolution errors"""
+    def __repr__(self):
+        return self.__class__.__name__+repr(self.args)
+
+class VersionConflict(ResolutionError):
+    """An already-installed version conflicts with the requested version"""
+
+class DistributionNotFound(ResolutionError):
+    """A requested distribution was not found"""
+
+class UnknownExtra(ResolutionError):
+    """Distribution doesn't have an "extra feature" of the given name"""
+
+_provider_factories = {}
+PY_MAJOR = sys.version[:3]
+EGG_DIST    = 3
+BINARY_DIST = 2
+SOURCE_DIST = 1
+CHECKOUT_DIST = 0
+DEVELOP_DIST = -1
+
+def register_loader_type(loader_type, provider_factory):
+    """Register `provider_factory` to make providers for `loader_type`
+
+    `loader_type` is the type or class of a PEP 302 ``module.__loader__``,
+    and `provider_factory` is a function that, passed a *module* object,
+    returns an ``IResourceProvider`` for that module.
+    """
+    _provider_factories[loader_type] = provider_factory
+
+def get_provider(moduleOrReq):
+    """Return an IResourceProvider for the named module or requirement"""
+    if isinstance(moduleOrReq,Requirement):
+        return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0]
+    try:
+        module = sys.modules[moduleOrReq]
+    except KeyError:
+        __import__(moduleOrReq)
+        module = sys.modules[moduleOrReq]
+    loader = getattr(module, '__loader__', None)
+    return _find_adapter(_provider_factories, loader)(module)
+
+def _macosx_vers(_cache=[]):
+    if not _cache:
+        info = os.popen('/usr/bin/sw_vers').read().splitlines()
+        for line in info:
+            key, value = line.split(None, 1)
+            if key == 'ProductVersion:':
+                _cache.append(value.strip().split("."))
+                break
+        else:
+            raise ValueError, "What?!"
+    return _cache[0]
+
+def _macosx_arch(machine):
+    return {'PowerPC':'ppc', 'Power_Macintosh':'ppc'}.get(machine,machine)
+
+def get_build_platform():
+    """Return this platform's string for platform-specific distributions
+
+    XXX Currently this is the same as ``distutils.util.get_platform()``, but it
+    needs some hacks for Linux and Mac OS X.
+    """
+    from distutils.util import get_platform
+    plat = get_platform()
+    if sys.platform == "darwin" and not plat.startswith('macosx-'):
+        try:
+            version = _macosx_vers()
+            machine = os.uname()[4].replace(" ", "_")
+            return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]),
+                _macosx_arch(machine))
+        except ValueError:
+            # if someone is running a non-Mac darwin system, this will fall
+            # through to the default implementation
+            pass
+    return plat
+
+macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)")
+darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)")
+get_platform = get_build_platform   # XXX backward compat
+
+def compatible_platforms(provided,required):
+    """Can code for the `provided` platform run on the `required` platform?
+
+    Returns true if either platform is ``None``, or the platforms are equal.
+
+    XXX Needs compatibility checks for Linux and other unixy OSes.
+    """
+    if provided is None or required is None or provided==required:
+        return True     # easy case
+
+    # Mac OS X special cases
+    reqMac = macosVersionString.match(required)
+    if reqMac:
+        provMac = macosVersionString.match(provided)
+
+        # is this a Mac package?
+        if not provMac:
+            # this is backwards compatibility for packages built before
+            # setuptools 0.6. All packages built after this point will
+            # use the new macosx designation.
+            provDarwin = darwinVersionString.match(provided)
+            if provDarwin:
+                dversion = int(provDarwin.group(1))
+                macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2))
+                if dversion == 7 and macosversion >= "10.3" or \
+                    dversion == 8 and macosversion >= "10.4":
+
+                    #import warnings
+                    #warnings.warn("Mac eggs should be rebuilt to "
+                    #    "use the macosx designation instead of darwin.",
+                    #    category=DeprecationWarning)
+                    return True
+            return False    # egg isn't macosx or legacy darwin
+
+        # are they the same major version and machine type?
+        if provMac.group(1) != reqMac.group(1) or \
+            provMac.group(3) != reqMac.group(3):
+            return False
+
+
+
+        # is the required OS major update >= the provided one?
+        if int(provMac.group(2)) > int(reqMac.group(2)):
+            return False
+
+        return True
+
+    # XXX Linux and other platforms' special cases should go here
+    return False
+
+
+def run_script(dist_spec, script_name):
+    """Locate distribution `dist_spec` and run its `script_name` script"""
+    ns = sys._getframe(1).f_globals
+    name = ns['__name__']
+    ns.clear()
+    ns['__name__'] = name
+    require(dist_spec)[0].run_script(script_name, ns)
+
+run_main = run_script   # backward compatibility
+
+def get_distribution(dist):
+    """Return a current distribution object for a Requirement or string"""
+    if isinstance(dist,basestring): dist = Requirement.parse(dist)
+    if isinstance(dist,Requirement): dist = get_provider(dist)
+    if not isinstance(dist,Distribution):
+        raise TypeError("Expected string, Requirement, or Distribution", dist)
+    return dist
+
+def load_entry_point(dist, group, name):
+    """Return `name` entry point of `group` for `dist` or raise ImportError"""
+    return get_distribution(dist).load_entry_point(group, name)
+
+def get_entry_map(dist, group=None):
+    """Return the entry point map for `group`, or the full entry map"""
+    return get_distribution(dist).get_entry_map(group)
+
+def get_entry_info(dist, group, name):
+    """Return the EntryPoint object for `group`+`name`, or ``None``"""
+    return get_distribution(dist).get_entry_info(group, name)
+
+
+try:
+    from pkgutil import get_importer
+except ImportError:
+    import _pkgutil as pkgutil
+    get_importer = pkgutil.get_importer
+else:
+    import pkgutil
+
+
+class IMetadataProvider:
+
+    def has_metadata(name):
+        """Does the package's distribution contain the named metadata?"""
+
+    def get_metadata(name):
+        """The named metadata resource as a string"""
+
+    def get_metadata_lines(name):
+        """Yield named metadata resource as list of non-blank non-comment lines
+
+       Leading and trailing whitespace is stripped from each line, and lines
+       with ``#`` as the first non-blank character are omitted."""
+
+    def metadata_isdir(name):
+        """Is the named metadata a directory?  (like ``os.path.isdir()``)"""
+
+    def metadata_listdir(name):
+        """List of metadata names in the directory (like ``os.listdir()``)"""
+
+    def run_script(script_name, namespace):
+        """Execute the named script in the supplied namespace dictionary"""
+
+
+
+
+
+
+
+
+
+
+class IResourceProvider(IMetadataProvider):
+    """An object that provides access to package resources"""
+
+    def get_resource_filename(manager, resource_name):
+        """Return a true filesystem path for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_stream(manager, resource_name):
+        """Return a readable file-like object for `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def get_resource_string(manager, resource_name):
+        """Return a string containing the contents of `resource_name`
+
+        `manager` must be an ``IResourceManager``"""
+
+    def has_resource(resource_name):
+        """Does the package contain the named resource?"""
+
+    def resource_isdir(resource_name):
+        """Is the named resource a directory?  (like ``os.path.isdir()``)"""
+
+    def resource_listdir(resource_name):
+        """List of resource names in the directory (like ``os.listdir()``)"""
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class WorkingSet(object):
+    """A collection of active distributions on sys.path (or a similar list)"""
+
+    def __init__(self, entries=None):
+        """Create working set from list of path entries (default=sys.path)"""
+        self.entries = []
+        self.entry_keys = {}
+        self.by_key = {}
+        self.callbacks = []
+
+        if entries is None:
+            entries = sys.path
+
+        for entry in entries:
+            self.add_entry(entry)
+
+
+    def add_entry(self, entry):
+        """Add a path item to ``.entries``, finding any distributions on it
+
+        ``find_distributions(entry,False)`` is used to find distributions
+        corresponding to the path entry, and they are added.  `entry` is
+        always appended to ``.entries``, even if it is already present.
+        (This is because ``sys.path`` can contain the same value more than
+        once, and the ``.entries`` of the ``sys.path`` WorkingSet should always
+        equal ``sys.path``.)
+        """
+        self.entry_keys.setdefault(entry, [])
+        self.entries.append(entry)
+        for dist in find_distributions(entry, True):
+            self.add(dist, entry, False)
+
+
+    def __contains__(self,dist):
+        """True if `dist` is the active distribution for its project"""
+        return self.by_key.get(dist.key) == dist
+
+
+
+
+
+    def find(self, req):
+        """Find a distribution matching requirement `req`
+
+        If there is an active distribution for the requested project, this
+        returns it as long as it meets the version requirement specified by
+        `req`.  But, if there is an active distribution for the project and it
+        does *not* meet the `req` requirement, ``VersionConflict`` is raised.
+        If there is no active distribution for the requested project, ``None``
+        is returned.
+        """
+        dist = self.by_key.get(req.key)
+        if dist is not None and dist not in req:
+            raise VersionConflict(dist,req)     # XXX add more info
+        else:
+            return dist
+
+    def iter_entry_points(self, group, name=None):
+        """Yield entry point objects from `group` matching `name`
+
+        If `name` is None, yields all entry points in `group` from all
+        distributions in the working set, otherwise only ones matching
+        both `group` and `name` are yielded (in distribution order).
+        """
+        for dist in self:
+            entries = dist.get_entry_map(group)
+            if name is None:
+                for ep in entries.values():
+                    yield ep
+            elif name in entries:
+                yield entries[name]
+
+    def run_script(self, requires, script_name):
+        """Locate distribution for `requires` and run `script_name` script"""
+        ns = sys._getframe(1).f_globals
+        name = ns['__name__']
+        ns.clear()
+        ns['__name__'] = name
+        self.require(requires)[0].run_script(script_name, ns)
+
+
+
+    def __iter__(self):
+        """Yield distributions for non-duplicate projects in the working set
+
+        The yield order is the order in which the items' path entries were
+        added to the working set.
+        """
+        seen = {}
+        for item in self.entries:
+            for key in self.entry_keys[item]:
+                if key not in seen:
+                    seen[key]=1
+                    yield self.by_key[key]
+
+    def add(self, dist, entry=None, insert=True):
+        """Add `dist` to working set, associated with `entry`
+
+        If `entry` is unspecified, it defaults to the ``.location`` of `dist`.
+        On exit from this routine, `entry` is added to the end of the working
+        set's ``.entries`` (if it wasn't already present).
+
+        `dist` is only added to the working set if it's for a project that
+        doesn't already have a distribution in the set.  If it's added, any
+        callbacks registered with the ``subscribe()`` method will be called.
+        """
+        if insert:
+            dist.insert_on(self.entries, entry)
+
+        if entry is None:
+            entry = dist.location
+        keys = self.entry_keys.setdefault(entry,[])
+
+        if dist.key in self.by_key:
+            return      # ignore hidden distros
+
+        self.by_key[dist.key] = dist
+        if dist.key not in keys:
+            keys.append(dist.key)
+
+        self._added_new(dist)
+
+
+    def resolve(self, requirements, env=None, installer=None):
+        """List all distributions needed to (recursively) meet `requirements`
+
+        `requirements` must be a sequence of ``Requirement`` objects.  `env`,
+        if supplied, should be an ``Environment`` instance.  If
+        not supplied, it defaults to all distributions available within any
+        entry or distribution in the working set.  `installer`, if supplied,
+        will be invoked with each requirement that cannot be met by an
+        already-installed distribution; it should return a ``Distribution`` or
+        ``None``.
+        """
+
+        requirements = list(requirements)[::-1]  # set up the stack
+        processed = {}  # set of processed requirements
+        best = {}  # key -> dist
+        to_activate = []
+
+        while requirements:
+            req = requirements.pop(0)   # process dependencies breadth-first
+            if req in processed:
+                # Ignore cyclic or redundant dependencies
+                continue
+            dist = best.get(req.key)
+            if dist is None:
+                # Find the best distribution and add it to the map
+                dist = self.by_key.get(req.key)
+                if dist is None:
+                    if env is None:
+                        env = Environment(self.entries)
+                    dist = best[req.key] = env.best_match(req, self, installer)
+                    if dist is None:
+                        raise DistributionNotFound(req)  # XXX put more info here
+                to_activate.append(dist)
+            if dist not in req:
+                # Oops, the "best" so far conflicts with a dependency
+                raise VersionConflict(dist,req) # XXX put more info here
+            requirements.extend(dist.requires(req.extras)[::-1])
+            processed[req] = True
+
+        return to_activate    # return list of distros to activate
+
+    def find_plugins(self,
+        plugin_env, full_env=None, installer=None, fallback=True
+    ):
+        """Find all activatable distributions in `plugin_env`
+
+        Example usage::
+
+            distributions, errors = working_set.find_plugins(
+                Environment(plugin_dirlist)
+            )
+            map(working_set.add, distributions)  # add plugins+libs to sys.path
+            print "Couldn't load", errors        # display errors
+
+        The `plugin_env` should be an ``Environment`` instance that contains
+        only distributions that are in the project's "plugin directory" or
+        directories. The `full_env`, if supplied, should be an ``Environment``
+        contains all currently-available distributions.  If `full_env` is not
+        supplied, one is created automatically from the ``WorkingSet`` this
+        method is called on, which will typically mean that every directory on
+        ``sys.path`` will be scanned for distributions.
+
+        `installer` is a standard installer callback as used by the
+        ``resolve()`` method. The `fallback` flag indicates whether we should
+        attempt to resolve older versions of a plugin if the newest version
+        cannot be resolved.
+
+        This method returns a 2-tuple: (`distributions`, `error_info`), where
+        `distributions` is a list of the distributions found in `plugin_env`
+        that were loadable, along with any other distributions that are needed
+        to resolve their dependencies.  `error_info` is a dictionary mapping
+        unloadable plugin distributions to an exception instance describing the
+        error that occurred. Usually this will be a ``DistributionNotFound`` or
+        ``VersionConflict`` instance.
+        """
+
+        plugin_projects = list(plugin_env)
+        plugin_projects.sort()  # scan project names in alphabetic order
+
+        error_info = {}
+        distributions = {}
+
+        if full_env is None:
+            env = Environment(self.entries)
+            env += plugin_env
+        else:
+            env = full_env + plugin_env
+
+        shadow_set = self.__class__([])
+        map(shadow_set.add, self)   # put all our entries in shadow_set
+
+        for project_name in plugin_projects:
+
+            for dist in plugin_env[project_name]:
+
+                req = [dist.as_requirement()]
+
+                try:
+                    resolvees = shadow_set.resolve(req, env, installer)
+
+                except ResolutionError,v:
+                    error_info[dist] = v    # save error info
+                    if fallback:
+                        continue    # try the next older version of project
+                    else:
+                        break       # give up on this project, keep going
+
+                else:
+                    map(shadow_set.add, resolvees)
+                    distributions.update(dict.fromkeys(resolvees))
+
+                    # success, no need to try any more versions of this project
+                    break
+
+        distributions = list(distributions)
+        distributions.sort()
+
+        return distributions, error_info
+
+
+
+
+
+    def require(self, *requirements):
+        """Ensure that distributions matching `requirements` are activated
+
+        `requirements` must be a string or a (possibly-nested) sequence
+        thereof, specifying the distributions and versions required.  The
+        return value is a sequence of the distributions that needed to be
+        activated to fulfill the requirements; all relevant distributions are
+        included, even if they were already activated in this working set.
+        """
+
+        needed = self.resolve(parse_requirements(requirements))
+
+        for dist in needed:
+            self.add(dist)
+
+        return needed
+
+
+    def subscribe(self, callback):
+        """Invoke `callback` for all distributions (including existing ones)"""
+        if callback in self.callbacks:
+            return
+        self.callbacks.append(callback)
+        for dist in self:
+            callback(dist)
+
+
+    def _added_new(self, dist):
+        for callback in self.callbacks:
+            callback(dist)
+
+
+
+
+
+
+
+
+
+
+
+class Environment(object):
+    """Searchable snapshot of distributions on a search path"""
+
+    def __init__(self, search_path=None, platform=get_supported_platform(), python=PY_MAJOR):
+        """Snapshot distributions available on a search path
+
+        Any distributions found on `search_path` are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.
+
+        `platform` is an optional string specifying the name of the platform
+        that platform-specific distributions must be compatible with.  If
+        unspecified, it defaults to the current platform.  `python` is an
+        optional string naming the desired version of Python (e.g. ``'2.4'``);
+        it defaults to the current version.
+
+        You may explicitly set `platform` (and/or `python`) to ``None`` if you
+        wish to map *all* distributions, not just those compatible with the
+        running platform or Python version.
+        """
+        self._distmap = {}
+        self._cache = {}
+        self.platform = platform
+        self.python = python
+        self.scan(search_path)
+
+    def can_add(self, dist):
+        """Is distribution `dist` acceptable for this environment?
+
+        The distribution must match the platform and python version
+        requirements specified when this environment was created, or False
+        is returned.
+        """
+        return (self.python is None or dist.py_version is None
+            or dist.py_version==self.python) \
+           and compatible_platforms(dist.platform,self.platform)
+
+    def remove(self, dist):
+        """Remove `dist` from the environment"""
+        self._distmap[dist.key].remove(dist)
+
+    def scan(self, search_path=None):
+        """Scan `search_path` for distributions usable in this environment
+
+        Any distributions found are added to the environment.
+        `search_path` should be a sequence of ``sys.path`` items.  If not
+        supplied, ``sys.path`` is used.  Only distributions conforming to
+        the platform/python version defined at initialization are added.
+        """
+        if search_path is None:
+            search_path = sys.path
+
+        for item in search_path:
+            for dist in find_distributions(item):
+                self.add(dist)
+
+    def __getitem__(self,project_name):
+        """Return a newest-to-oldest list of distributions for `project_name`
+        """
+        try:
+            return self._cache[project_name]
+        except KeyError:
+            project_name = project_name.lower()
+            if project_name not in self._distmap:
+                return []
+
+        if project_name not in self._cache:
+            dists = self._cache[project_name] = self._distmap[project_name]
+            _sort_dists(dists)
+
+        return self._cache[project_name]
+
+    def add(self,dist):
+        """Add `dist` if we ``can_add()`` it and it isn't already added"""
+        if self.can_add(dist) and dist.has_version():
+            dists = self._distmap.setdefault(dist.key,[])
+            if dist not in dists:
+                dists.append(dist)
+                if dist.key in self._cache:
+                    _sort_dists(self._cache[dist.key])
+
+
+    def best_match(self, req, working_set, installer=None):
+        """Find distribution best matching `req` and usable on `working_set`
+
+        This calls the ``find(req)`` method of the `working_set` to see if a
+        suitable distribution is already active.  (This may raise
+        ``VersionConflict`` if an unsuitable version of the project is already
+        active in the specified `working_set`.)  If a suitable distribution
+        isn't active, this method returns the newest distribution in the
+        environment that meets the ``Requirement`` in `req`.  If no suitable
+        distribution is found, and `installer` is supplied, then the result of
+        calling the environment's ``obtain(req, installer)`` method will be
+        returned.
+        """
+        dist = working_set.find(req)
+        if dist is not None:
+            return dist
+        for dist in self[req.key]:
+            if dist in req:
+                return dist
+        return self.obtain(req, installer) # try and download/install
+
+    def obtain(self, requirement, installer=None):
+        """Obtain a distribution matching `requirement` (e.g. via download)
+
+        Obtain a distro that matches requirement (e.g. via download).  In the
+        base ``Environment`` class, this routine just returns
+        ``installer(requirement)``, unless `installer` is None, in which case
+        None is returned instead.  This method is a hook that allows subclasses
+        to attempt other ways of obtaining a distribution before falling back
+        to the `installer` argument."""
+        if installer is not None:
+            return installer(requirement)
+
+    def __iter__(self):
+        """Yield the unique project names of the available distributions"""
+        for key in self._distmap.keys():
+            if self[key]: yield key
+
+
+
+
+    def __iadd__(self, other):
+        """In-place addition of a distribution or environment"""
+        if isinstance(other,Distribution):
+            self.add(other)
+        elif isinstance(other,Environment):
+            for project in other:
+                for dist in other[project]:
+                    self.add(dist)
+        else:
+            raise TypeError("Can't add %r to environment" % (other,))
+        return self
+
+    def __add__(self, other):
+        """Add an environment or distribution to an environment"""
+        new = self.__class__([], platform=None, python=None)
+        for env in self, other:
+            new += env
+        return new
+
+
+AvailableDistributions = Environment    # XXX backward compatibility
+
+
+class ExtractionError(RuntimeError):
+    """An error occurred extracting a resource
+
+    The following attributes are available from instances of this exception:
+
+    manager
+        The resource manager that raised this exception
+
+    cache_path
+        The base directory for resource extraction
+
+    original_error
+        The exception instance that caused extraction to fail
+    """
+
+
+
+
+class ResourceManager:
+    """Manage resource extraction and packages"""
+    extraction_path = None
+
+    def __init__(self):
+        self.cached_files = {}
+
+    def resource_exists(self, package_or_requirement, resource_name):
+        """Does the named resource exist?"""
+        return get_provider(package_or_requirement).has_resource(resource_name)
+
+    def resource_isdir(self, package_or_requirement, resource_name):
+        """Is the named resource an existing directory?"""
+        return get_provider(package_or_requirement).resource_isdir(
+            resource_name
+        )
+
+    def resource_filename(self, package_or_requirement, resource_name):
+        """Return a true filesystem path for specified resource"""
+        return get_provider(package_or_requirement).get_resource_filename(
+            self, resource_name
+        )
+
+    def resource_stream(self, package_or_requirement, resource_name):
+        """Return a readable file-like object for specified resource"""
+        return get_provider(package_or_requirement).get_resource_stream(
+            self, resource_name
+        )
+
+    def resource_string(self, package_or_requirement, resource_name):
+        """Return specified resource as a string"""
+        return get_provider(package_or_requirement).get_resource_string(
+            self, resource_name
+        )
+
+    def resource_listdir(self, package_or_requirement, resource_name):
+        """List the contents of the named resource directory"""
+        return get_provider(package_or_requirement).resource_listdir(
+            resource_name
+        )
+
+    def extraction_error(self):
+        """Give an error message for problems extracting file(s)"""
+
+        old_exc = sys.exc_info()[1]
+        cache_path = self.extraction_path or get_default_cache()
+
+        err = ExtractionError("""Can't extract file(s) to egg cache
+
+The following error occurred while trying to extract file(s) to the Python egg
+cache:
+
+  %s
+
+The Python egg cache directory is currently set to:
+
+  %s
+
+Perhaps your account does not have write access to this directory?  You can
+change the cache directory by setting the PYTHON_EGG_CACHE environment
+variable to point to an accessible directory.
+"""         % (old_exc, cache_path)
+        )
+        err.manager        = self
+        err.cache_path     = cache_path
+        err.original_error = old_exc
+        raise err
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def get_cache_path(self, archive_name, names=()):
+        """Return absolute location in cache for `archive_name` and `names`
+
+        The parent directory of the resulting path will be created if it does
+        not already exist.  `archive_name` should be the base filename of the
+        enclosing egg (which may not be the name of the enclosing zipfile!),
+        including its ".egg" extension.  `names`, if provided, should be a
+        sequence of path name parts "under" the egg's extraction location.
+
+        This method should only be called by resource providers that need to
+        obtain an extraction location, and only for names they intend to
+        extract, as it tracks the generated names for possible cleanup later.
+        """
+        extract_path = self.extraction_path or get_default_cache()
+        target_path = os.path.join(extract_path, archive_name+'-tmp', *names)
+        try:
+            ensure_directory(target_path)
+        except:
+            self.extraction_error()
+
+        self.cached_files[target_path] = 1
+        return target_path
+
+
+    def postprocess(self, tempname, filename):
+        """Perform any platform-specific postprocessing of `tempname`
+
+        This is where Mac header rewrites should be done; other platforms don't
+        have anything special they should do.
+
+        Resource providers should call this method ONLY after successfully
+        extracting a compressed resource.  They must NOT call it on resources
+        that are already in the filesystem.
+
+        `tempname` is the current (temporary) name of the file, and `filename`
+        is the name it will be renamed to by the caller after this routine
+        returns.
+        """
+        # XXX
+
+
+    def set_extraction_path(self, path):
+        """Set the base path where resources will be extracted to, if needed.
+
+        If you do not call this routine before any extractions take place, the
+        path defaults to the return value of ``get_default_cache()``.  (Which
+        is based on the ``PYTHON_EGG_CACHE`` environment variable, with various
+        platform-specific fallbacks.  See that routine's documentation for more
+        details.)
+
+        Resources are extracted to subdirectories of this path based upon
+        information given by the ``IResourceProvider``.  You may set this to a
+        temporary directory, but then you must call ``cleanup_resources()`` to
+        delete the extracted files when done.  There is no guarantee that
+        ``cleanup_resources()`` will be able to remove all extracted files.
+
+        (Note: you may not change the extraction path for a given resource
+        manager once resources have been extracted, unless you first call
+        ``cleanup_resources()``.)
+        """
+        if self.cached_files:
+            raise ValueError(
+                "Can't change extraction path, files already extracted"
+            )
+
+        self.extraction_path = path
+
+    def cleanup_resources(self, force=False):
+        """
+        Delete all extracted resource files and directories, returning a list
+        of the file and directory names that could not be successfully removed.
+        This function does not have any concurrency protection, so it should
+        generally only be called when the extraction path is a temporary
+        directory exclusive to a single process.  This method is not
+        automatically called; you must call it explicitly or register it as an
+        ``atexit`` function if you wish to ensure cleanup of a temporary
+        directory used for extractions.
+        """
+        # XXX
+
+
+
+def get_default_cache():
+    """Determine the default cache location
+
+    This returns the ``PYTHON_EGG_CACHE`` environment variable, if set.
+    Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the
+    "Application Data" directory.  On all other systems, it's "~/.python-eggs".
+    """
+    try:
+        return os.environ['PYTHON_EGG_CACHE']
+    except KeyError:
+        pass
+
+    if os.name!='nt':
+        return os.path.expanduser('~/.python-eggs')
+
+    app_data = 'Application Data'   # XXX this may be locale-specific!
+    app_homes = [
+        (('APPDATA',), None),       # best option, should be locale-safe
+        (('USERPROFILE',), app_data),
+        (('HOMEDRIVE','HOMEPATH'), app_data),
+        (('HOMEPATH',), app_data),
+        (('HOME',), None),
+        (('WINDIR',), app_data),    # 95/98/ME
+    ]
+
+    for keys, subdir in app_homes:
+        dirname = ''
+        for key in keys:
+            if key in os.environ:
+                dirname = os.path.join(os.environ[key])
+            else:
+                break
+        else:
+            if subdir:
+                dirname = os.path.join(dirname,subdir)
+            return os.path.join(dirname, 'Python-Eggs')
+    else:
+        raise RuntimeError(
+            "Please set the PYTHON_EGG_CACHE enviroment variable"
+        )
+
+def safe_name(name):
+    """Convert an arbitrary string to a standard distribution name
+
+    Any runs of non-alphanumeric/. characters are replaced with a single '-'.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '-', name)
+
+
+def safe_version(version):
+    """Convert an arbitrary string to a standard version string
+
+    Spaces become dots, and all other non-alphanumeric characters become
+    dashes, with runs of multiple dashes condensed to a single dash.
+    """
+    version = version.replace(' ','.')
+    return re.sub('[^A-Za-z0-9.]+', '-', version)
+
+
+def safe_extra(extra):
+    """Convert an arbitrary string to a standard 'extra' name
+
+    Any runs of non-alphanumeric characters are replaced with a single '_',
+    and the result is always lowercased.
+    """
+    return re.sub('[^A-Za-z0-9.]+', '_', extra).lower()
+
+
+def to_filename(name):
+    """Convert a project or version name to its filename-escaped form
+
+    Any '-' characters are currently replaced with '_'.
+    """
+    return name.replace('-','_')
+
+
+
+
+
+
+
+
+class NullProvider:
+    """Try to implement resources and metadata for arbitrary PEP 302 loaders"""
+
+    egg_name = None
+    egg_info = None
+    loader = None
+
+    def __init__(self, module):
+        self.loader = getattr(module, '__loader__', None)
+        self.module_path = os.path.dirname(getattr(module, '__file__', ''))
+
+    def get_resource_filename(self, manager, resource_name):
+        return self._fn(self.module_path, resource_name)
+
+    def get_resource_stream(self, manager, resource_name):
+        return StringIO(self.get_resource_string(manager, resource_name))
+
+    def get_resource_string(self, manager, resource_name):
+        return self._get(self._fn(self.module_path, resource_name))
+
+    def has_resource(self, resource_name):
+        return self._has(self._fn(self.module_path, resource_name))
+
+    def has_metadata(self, name):
+        return self.egg_info and self._has(self._fn(self.egg_info,name))
+
+    def get_metadata(self, name):
+        if not self.egg_info:
+            return ""
+        return self._get(self._fn(self.egg_info,name))
+
+    def get_metadata_lines(self, name):
+        return yield_lines(self.get_metadata(name))
+
+    def resource_isdir(self,resource_name):
+        return self._isdir(self._fn(self.module_path, resource_name))
+
+    def metadata_isdir(self,name):
+        return self.egg_info and self._isdir(self._fn(self.egg_info,name))
+
+
+    def resource_listdir(self,resource_name):
+        return self._listdir(self._fn(self.module_path,resource_name))
+
+    def metadata_listdir(self,name):
+        if self.egg_info:
+            return self._listdir(self._fn(self.egg_info,name))
+        return []
+
+    def run_script(self,script_name,namespace):
+        script = 'scripts/'+script_name
+        if not self.has_metadata(script):
+            raise ResolutionError("No script named %r" % script_name)
+        script_text = self.get_metadata(script).replace('\r\n','\n')
+        script_text = script_text.replace('\r','\n')
+        script_filename = self._fn(self.egg_info,script)
+        namespace['__file__'] = script_filename
+        if os.path.exists(script_filename):
+            execfile(script_filename, namespace, namespace)
+        else:
+            from linecache import cache
+            cache[script_filename] = (
+                len(script_text), 0, script_text.split('\n'), script_filename
+            )
+            script_code = compile(script_text,script_filename,'exec')
+            exec script_code in namespace, namespace
+
+    def _has(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _isdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _listdir(self, path):
+        raise NotImplementedError(
+            "Can't perform this operation for unregistered loader type"
+        )
+
+    def _fn(self, base, resource_name):
+        return os.path.join(base, *resource_name.split('/'))
+
+    def _get(self, path):
+        if hasattr(self.loader, 'get_data'):
+            return self.loader.get_data(path)
+        raise NotImplementedError(
+            "Can't perform this operation for loaders without 'get_data()'"
+        )
+
+register_loader_type(object, NullProvider)
+
+
+class EggProvider(NullProvider):
+    """Provider based on a virtual filesystem"""
+
+    def __init__(self,module):
+        NullProvider.__init__(self,module)
+        self._setup_prefix()
+
+    def _setup_prefix(self):
+        # we assume here that our metadata may be nested inside a "basket"
+        # of multiple eggs; that's why we use module_path instead of .archive
+        path = self.module_path
+        old = None
+        while path!=old:
+            if path.lower().endswith('.egg'):
+                self.egg_name = os.path.basename(path)
+                self.egg_info = os.path.join(path, 'EGG-INFO')
+                self.egg_root = path
+                break
+            old = path
+            path, base = os.path.split(path)
+
+
+
+
+
+
+
+
+class DefaultProvider(EggProvider):
+    """Provides access to package resources in the filesystem"""
+
+    def _has(self, path):
+        return os.path.exists(path)
+
+    def _isdir(self,path):
+        return os.path.isdir(path)
+
+    def _listdir(self,path):
+        return os.listdir(path)
+
+    def get_resource_stream(self, manager, resource_name):
+        return open(self._fn(self.module_path, resource_name), 'rb')
+
+    def _get(self, path):
+        stream = open(path, 'rb')
+        try:
+            return stream.read()
+        finally:
+            stream.close()
+
+register_loader_type(type(None), DefaultProvider)
+
+
+class EmptyProvider(NullProvider):
+    """Provider that returns nothing for all requests"""
+
+    _isdir = _has = lambda self,path: False
+    _get          = lambda self,path: ''
+    _listdir      = lambda self,path: []
+    module_path   = None
+
+    def __init__(self):
+        pass
+
+empty_provider = EmptyProvider()
+
+
+
+
+class ZipProvider(EggProvider):
+    """Resource support for zips and eggs"""
+
+    eagers = None
+
+    def __init__(self, module):
+        EggProvider.__init__(self,module)
+        self.zipinfo = zipimport._zip_directory_cache[self.loader.archive]
+        self.zip_pre = self.loader.archive+os.sep
+
+    def _zipinfo_name(self, fspath):
+        # Convert a virtual filename (full path to file) into a zipfile subpath
+        # usable with the zipimport directory cache for our target archive
+        if fspath.startswith(self.zip_pre):
+            return fspath[len(self.zip_pre):]
+        raise AssertionError(
+            "%s is not a subpath of %s" % (fspath,self.zip_pre)
+        )
+
+    def _parts(self,zip_path):
+        # Convert a zipfile subpath into an egg-relative path part list
+        fspath = self.zip_pre+zip_path  # pseudo-fs path
+        if fspath.startswith(self.egg_root+os.sep):
+            return fspath[len(self.egg_root)+1:].split(os.sep)
+        raise AssertionError(
+            "%s is not a subpath of %s" % (fspath,self.egg_root)
+        )
+
+    def get_resource_filename(self, manager, resource_name):
+        if not self.egg_name:
+            raise NotImplementedError(
+                "resource_filename() only supported for .egg, not .zip"
+            )
+        # no need to lock for extraction, since we use temp names
+        zip_path = self._resource_to_zip(resource_name)
+        eagers = self._get_eager_resources()
+        if '/'.join(self._parts(zip_path)) in eagers:
+            for name in eagers:
+                self._extract_resource(manager, self._eager_to_zip(name))
+        return self._extract_resource(manager, zip_path)
+
+    def _extract_resource(self, manager, zip_path):
+
+        if zip_path in self._index():
+            for name in self._index()[zip_path]:
+                last = self._extract_resource(
+                    manager, os.path.join(zip_path, name)
+                )
+            return os.path.dirname(last)  # return the extracted directory name
+
+        zip_stat = self.zipinfo[zip_path]
+        t,d,size = zip_stat[5], zip_stat[6], zip_stat[3]
+        date_time = (
+            (d>>9)+1980, (d>>5)&0xF, d&0x1F,                      # ymd
+            (t&0xFFFF)>>11, (t>>5)&0x3F, (t&0x1F) * 2, 0, 0, -1   # hms, etc.
+        )
+        timestamp = time.mktime(date_time)
+
+        try:
+            real_path = manager.get_cache_path(
+                self.egg_name, self._parts(zip_path)
+            )
+
+            if os.path.isfile(real_path):
+                stat = os.stat(real_path)
+                if stat.st_size==size and stat.st_mtime==timestamp:
+                    # size and stamp match, don't bother extracting
+                    return real_path
+
+            outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path))
+            os.write(outf, self.loader.get_data(zip_path))
+            os.close(outf)
+            utime(tmpnam, (timestamp,timestamp))
+            manager.postprocess(tmpnam, real_path)
+
+            try:
+                rename(tmpnam, real_path)
+
+            except os.error:
+                if os.path.isfile(real_path):
+                    stat = os.stat(real_path)
+
+                    if stat.st_size==size and stat.st_mtime==timestamp:
+                        # size and stamp match, somebody did it just ahead of
+                        # us, so we're done
+                        return real_path
+                    elif os.name=='nt':     # Windows, del old file and retry
+                        unlink(real_path)
+                        rename(tmpnam, real_path)
+                        return real_path
+                raise
+
+        except os.error:
+            manager.extraction_error()  # report a user-friendly error
+
+        return real_path
+
+    def _get_eager_resources(self):
+        if self.eagers is None:
+            eagers = []
+            for name in ('native_libs.txt', 'eager_resources.txt'):
+                if self.has_metadata(name):
+                    eagers.extend(self.get_metadata_lines(name))
+            self.eagers = eagers
+        return self.eagers
+
+    def _index(self):
+        try:
+            return self._dirindex
+        except AttributeError:
+            ind = {}
+            for path in self.zipinfo:
+                parts = path.split(os.sep)
+                while parts:
+                    parent = os.sep.join(parts[:-1])
+                    if parent in ind:
+                        ind[parent].append(parts[-1])
+                        break
+                    else:
+                        ind[parent] = [parts.pop()]
+            self._dirindex = ind
+            return ind
+
+    def _has(self, fspath):
+        zip_path = self._zipinfo_name(fspath)
+        return zip_path in self.zipinfo or zip_path in self._index()
+
+    def _isdir(self,fspath):
+        return self._zipinfo_name(fspath) in self._index()
+
+    def _listdir(self,fspath):
+        return list(self._index().get(self._zipinfo_name(fspath), ()))
+
+    def _eager_to_zip(self,resource_name):
+        return self._zipinfo_name(self._fn(self.egg_root,resource_name))
+
+    def _resource_to_zip(self,resource_name):
+        return self._zipinfo_name(self._fn(self.module_path,resource_name))
+
+register_loader_type(zipimport.zipimporter, ZipProvider)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class FileMetadata(EmptyProvider):
+    """Metadata handler for standalone PKG-INFO files
+
+    Usage::
+
+        metadata = FileMetadata("/path/to/PKG-INFO")
+
+    This provider rejects all data and metadata requests except for PKG-INFO,
+    which is treated as existing, and will be the contents of the file at
+    the provided location.
+    """
+
+    def __init__(self,path):
+        self.path = path
+
+    def has_metadata(self,name):
+        return name=='PKG-INFO'
+
+    def get_metadata(self,name):
+        if name=='PKG-INFO':
+            return open(self.path,'rU').read()
+        raise KeyError("No metadata except PKG-INFO is available")
+
+    def get_metadata_lines(self,name):
+        return yield_lines(self.get_metadata(name))
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class PathMetadata(DefaultProvider):
+    """Metadata provider for egg directories
+
+    Usage::
+
+        # Development eggs:
+
+        egg_info = "/path/to/PackageName.egg-info"
+        base_dir = os.path.dirname(egg_info)
+        metadata = PathMetadata(base_dir, egg_info)
+        dist_name = os.path.splitext(os.path.basename(egg_info))[0]
+        dist = Distribution(basedir,project_name=dist_name,metadata=metadata)
+
+        # Unpacked egg directories:
+
+        egg_path = "/path/to/PackageName-ver-pyver-etc.egg"
+        metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO'))
+        dist = Distribution.from_filename(egg_path, metadata=metadata)
+    """
+    def __init__(self, path, egg_info):
+        self.module_path = path
+        self.egg_info = egg_info
+
+
+class EggMetadata(ZipProvider):
+    """Metadata provider for .egg files"""
+
+    def __init__(self, importer):
+        """Create a metadata provider from a zipimporter"""
+
+        self.zipinfo = zipimport._zip_directory_cache[importer.archive]
+        self.zip_pre = importer.archive+os.sep
+        self.loader = importer
+        if importer.prefix:
+            self.module_path = os.path.join(importer.archive, importer.prefix)
+        else:
+            self.module_path = importer.archive
+        self._setup_prefix()
+
+
+
+_distribution_finders = {}
+
+def register_finder(importer_type, distribution_finder):
+    """Register `distribution_finder` to find distributions in sys.path items
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `distribution_finder` is a callable that, passed a path
+    item and the importer instance, yields ``Distribution`` instances found on
+    that path item.  See ``pkg_resources.find_on_path`` for an example."""
+    _distribution_finders[importer_type] = distribution_finder
+
+
+def find_distributions(path_item, only=False):
+    """Yield distributions accessible via `path_item`"""
+    importer = get_importer(path_item)
+    finder = _find_adapter(_distribution_finders, importer)
+    return finder(importer, path_item, only)
+
+def find_in_zip(importer, path_item, only=False):
+    metadata = EggMetadata(importer)
+    if metadata.has_metadata('PKG-INFO'):
+        yield Distribution.from_filename(path_item, metadata=metadata)
+    if only:
+        return  # don't yield nested distros
+    for subitem in metadata.resource_listdir('/'):
+        if subitem.endswith('.egg'):
+            subpath = os.path.join(path_item, subitem)
+            for dist in find_in_zip(zipimport.zipimporter(subpath), subpath):
+                yield dist
+
+register_finder(zipimport.zipimporter, find_in_zip)
+
+def StringIO(*args, **kw):
+    """Thunk to load the real StringIO on demand"""
+    global StringIO
+    try:
+        from cStringIO import StringIO
+    except ImportError:
+        from StringIO import StringIO
+    return StringIO(*args,**kw)
+
+def find_nothing(importer, path_item, only=False):
+    return ()
+register_finder(object,find_nothing)
+
+def find_on_path(importer, path_item, only=False):
+    """Yield distributions accessible on a sys.path directory"""
+    path_item = _normalize_cached(path_item)
+
+    if os.path.isdir(path_item):
+        if path_item.lower().endswith('.egg'):
+            # unpacked egg
+            yield Distribution.from_filename(
+                path_item, metadata=PathMetadata(
+                    path_item, os.path.join(path_item,'EGG-INFO')
+                )
+            )
+        else:
+            # scan for .egg and .egg-info in directory
+            for entry in os.listdir(path_item):
+                lower = entry.lower()
+                if lower.endswith('.egg-info'):
+                    fullpath = os.path.join(path_item, entry)
+                    if os.path.isdir(fullpath):
+                        # egg-info directory, allow getting metadata
+                        metadata = PathMetadata(path_item, fullpath)
+                    else:
+                        metadata = FileMetadata(fullpath)
+                    yield Distribution.from_location(
+                        path_item,entry,metadata,precedence=DEVELOP_DIST
+                    )
+                elif not only and lower.endswith('.egg'):
+                    for dist in find_distributions(os.path.join(path_item, entry)):
+                        yield dist
+                elif not only and lower.endswith('.egg-link'):
+                    for line in file(os.path.join(path_item, entry)):
+                        if not line.strip(): continue
+                        for item in find_distributions(line.rstrip()):
+                            yield item
+
+register_finder(pkgutil.ImpImporter, find_on_path)
+
+_namespace_handlers = {}
+_namespace_packages = {}
+
+def register_namespace_handler(importer_type, namespace_handler):
+    """Register `namespace_handler` to declare namespace packages
+
+    `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item
+    handler), and `namespace_handler` is a callable like this::
+
+        def namespace_handler(importer,path_entry,moduleName,module):
+            # return a path_entry to use for child packages
+
+    Namespace handlers are only called if the importer object has already
+    agreed that it can handle the relevant path item, and they should only
+    return a subpath if the module __path__ does not already contain an
+    equivalent subpath.  For an example namespace handler, see
+    ``pkg_resources.file_ns_handler``.
+    """
+    _namespace_handlers[importer_type] = namespace_handler
+
+def _handle_ns(packageName, path_item):
+    """Ensure that named package includes a subpath of path_item (if needed)"""
+    importer = get_importer(path_item)
+    if importer is None:
+        return None
+    loader = importer.find_module(packageName)
+    if loader is None:
+        return None
+    module = sys.modules.get(packageName)
+    if module is None:
+        module = sys.modules[packageName] = new.module(packageName)
+        module.__path__ = []; _set_parent_ns(packageName)
+    elif not hasattr(module,'__path__'):
+        raise TypeError("Not a package:", packageName)
+    handler = _find_adapter(_namespace_handlers, importer)
+    subpath = handler(importer,path_item,packageName,module)
+    if subpath is not None:
+        path = module.__path__; path.append(subpath)
+        loader.load_module(packageName); module.__path__ = path
+    return subpath
+
+def declare_namespace(packageName):
+    """Declare that package 'packageName' is a namespace package"""
+
+    imp.acquire_lock()
+    try:
+        if packageName in _namespace_packages:
+            return
+
+        path, parent = sys.path, None
+        if '.' in packageName:
+            parent = '.'.join(packageName.split('.')[:-1])
+            declare_namespace(parent)
+            __import__(parent)
+            try:
+                path = sys.modules[parent].__path__
+            except AttributeError:
+                raise TypeError("Not a package:", parent)
+
+        # Track what packages are namespaces, so when new path items are added,
+        # they can be updated
+        _namespace_packages.setdefault(parent,[]).append(packageName)
+        _namespace_packages.setdefault(packageName,[])
+
+        for path_item in path:
+            # Ensure all the parent's path items are reflected in the child,
+            # if they apply
+            _handle_ns(packageName, path_item)
+
+    finally:
+        imp.release_lock()
+
+def fixup_namespace_packages(path_item, parent=None):
+    """Ensure that previously-declared namespace packages include path_item"""
+    imp.acquire_lock()
+    try:
+        for package in _namespace_packages.get(parent,()):
+            subpath = _handle_ns(package, path_item)
+            if subpath: fixup_namespace_packages(subpath,package)
+    finally:
+        imp.release_lock()
+
+def file_ns_handler(importer, path_item, packageName, module):
+    """Compute an ns-package subpath for a filesystem or zipfile importer"""
+
+    subpath = os.path.join(path_item, packageName.split('.')[-1])
+    normalized = _normalize_cached(subpath)
+    for item in module.__path__:
+        if _normalize_cached(item)==normalized:
+            break
+    else:
+        # Only return the path if it's not already there
+        return subpath
+
+register_namespace_handler(pkgutil.ImpImporter, file_ns_handler)
+register_namespace_handler(zipimport.zipimporter, file_ns_handler)
+
+
+def null_ns_handler(importer, path_item, packageName, module):
+    return None
+
+register_namespace_handler(object,null_ns_handler)
+
+
+def normalize_path(filename):
+    """Normalize a file/dir name for comparison purposes"""
+    return os.path.normcase(os.path.realpath(filename))
+
+def _normalize_cached(filename,_cache={}):
+    try:
+        return _cache[filename]
+    except KeyError:
+        _cache[filename] = result = normalize_path(filename)
+        return result
+
+def _set_parent_ns(packageName):
+    parts = packageName.split('.')
+    name = parts.pop()
+    if parts:
+        parent = '.'.join(parts)
+        setattr(sys.modules[parent], name, sys.modules[packageName])
+
+
+def yield_lines(strs):
+    """Yield non-empty/non-comment lines of a ``basestring`` or sequence"""
+    if isinstance(strs,basestring):
+        for s in strs.splitlines():
+            s = s.strip()
+            if s and not s.startswith('#'):     # skip blank lines/comments
+                yield s
+    else:
+        for ss in strs:
+            for s in yield_lines(ss):
+                yield s
+
+LINE_END = re.compile(r"\s*(#.*)?$").match         # whitespace and comment
+CONTINUE = re.compile(r"\s*\\\s*(#.*)?$").match    # line continuation
+DISTRO   = re.compile(r"\s*((\w|[-.])+)").match    # Distribution or extra
+VERSION  = re.compile(r"\s*(<=?|>=?|==|!=)\s*((\w|[-.])+)").match  # ver. info
+COMMA    = re.compile(r"\s*,").match               # comma between items
+OBRACKET = re.compile(r"\s*\[").match
+CBRACKET = re.compile(r"\s*\]").match
+MODULE   = re.compile(r"\w+(\.\w+)*$").match
+EGG_NAME = re.compile(
+    r"(?P<name>[^-]+)"
+    r"( -(?P<ver>[^-]+) (-py(?P<pyver>[^-]+) (-(?P<plat>.+))? )? )?",
+    re.VERBOSE | re.IGNORECASE
+).match
+
+component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE)
+replace = {'pre':'c', 'preview':'c','-':'final-','rc':'c'}.get
+
+def _parse_version_parts(s):
+    for part in component_re.split(s):
+        part = replace(part,part)
+        if not part or part=='.':
+            continue
+        if part[:1] in '0123456789':
+            yield part.zfill(8)    # pad for numeric comparison
+        else:
+            yield '*'+part
+
+    yield '*final'  # ensure that alpha/beta/candidate are before final
+
+def parse_version(s):
+    """Convert a version string to a chronologically-sortable key
+
+    This is a rough cross between distutils' StrictVersion and LooseVersion;
+    if you give it versions that would work with StrictVersion, then it behaves
+    the same; otherwise it acts like a slightly-smarter LooseVersion. It is
+    *possible* to create pathological version coding schemes that will fool
+    this parser, but they should be very rare in practice.
+
+    The returned value will be a tuple of strings.  Numeric portions of the
+    version are padded to 8 digits so they will compare numerically, but
+    without relying on how numbers compare relative to strings.  Dots are
+    dropped, but dashes are retained.  Trailing zeros between alpha segments
+    or dashes are suppressed, so that e.g. "2.4.0" is considered the same as
+    "2.4". Alphanumeric parts are lower-cased.
+
+    The algorithm assumes that strings like "-" and any alpha string that
+    alphabetically follows "final"  represents a "patch level".  So, "2.4-1"
+    is assumed to be a branch or patch of "2.4", and therefore "2.4.1" is
+    considered newer than "2.4-1", whic in turn is newer than "2.4".
+
+    Strings like "a", "b", "c", "alpha", "beta", "candidate" and so on (that
+    come before "final" alphabetically) are assumed to be pre-release versions,
+    so that the version "2.4" is considered newer than "2.4a1".
+
+    Finally, to handle miscellaneous cases, the strings "pre", "preview", and
+    "rc" are treated as if they were "c", i.e. as though they were release
+    candidates, and therefore are not as new as a version string that does not
+    contain them.
+    """
+    parts = []
+    for part in _parse_version_parts(s.lower()):
+        if part.startswith('*'):
+            if part<'*final':   # remove '-' before a prerelease tag
+                while parts and parts[-1]=='*final-': parts.pop()
+            # remove trailing zeros from each series of numeric parts
+            while parts and parts[-1]=='00000000':
+                parts.pop()
+        parts.append(part)
+    return tuple(parts)
+
+class EntryPoint(object):
+    """Object representing an advertised importable object"""
+
+    def __init__(self, name, module_name, attrs=(), extras=(), dist=None):
+        if not MODULE(module_name):
+            raise ValueError("Invalid module name", module_name)
+        self.name = name
+        self.module_name = module_name
+        self.attrs = tuple(attrs)
+        self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras
+        self.dist = dist
+
+    def __str__(self):
+        s = "%s = %s" % (self.name, self.module_name)
+        if self.attrs:
+            s += ':' + '.'.join(self.attrs)
+        if self.extras:
+            s += ' [%s]' % ','.join(self.extras)
+        return s
+
+    def __repr__(self):
+        return "EntryPoint.parse(%r)" % str(self)
+
+    def load(self, require=True, env=None, installer=None):
+        if require: self.require(env, installer)
+        entry = __import__(self.module_name, globals(),globals(), ['__name__'])
+        for attr in self.attrs:
+            try:
+                entry = getattr(entry,attr)
+            except AttributeError:
+                raise ImportError("%r has no %r attribute" % (entry,attr))
+        return entry
+
+    def require(self, env=None, installer=None):
+        if self.extras and not self.dist:
+            raise UnknownExtra("Can't require() without a distribution", self)
+        map(working_set.add,
+            working_set.resolve(self.dist.requires(self.extras),env,installer))
+
+
+
+    #@classmethod
+    def parse(cls, src, dist=None):
+        """Parse a single entry point from string `src`
+
+        Entry point syntax follows the form::
+
+            name = some.module:some.attr [extra1,extra2]
+
+        The entry name and module name are required, but the ``:attrs`` and
+        ``[extras]`` parts are optional
+        """
+        try:
+            attrs = extras = ()
+            name,value = src.split('=',1)
+            if '[' in value:
+                value,extras = value.split('[',1)
+                req = Requirement.parse("x["+extras)
+                if req.specs: raise ValueError
+                extras = req.extras
+            if ':' in value:
+                value,attrs = value.split(':',1)
+                if not MODULE(attrs.rstrip()):
+                    raise ValueError
+                attrs = attrs.rstrip().split('.')
+        except ValueError:
+            raise ValueError(
+                "EntryPoint must be in 'name=module:attrs [extras]' format",
+                src
+            )
+        else:
+            return cls(name.strip(), value.lstrip(), attrs, extras, dist)
+
+    parse = classmethod(parse)
+
+
+
+
+
+
+
+
+    #@classmethod
+    def parse_group(cls, group, lines, dist=None):
+        """Parse an entry point group"""
+        if not MODULE(group):
+            raise ValueError("Invalid group name", group)
+        this = {}
+        for line in yield_lines(lines):
+            ep = cls.parse(line, dist)
+            if ep.name in this:
+                raise ValueError("Duplicate entry point", group, ep.name)
+            this[ep.name]=ep
+        return this
+
+    parse_group = classmethod(parse_group)
+
+    #@classmethod
+    def parse_map(cls, data, dist=None):
+        """Parse a map of entry point groups"""
+        if isinstance(data,dict):
+            data = data.items()
+        else:
+            data = split_sections(data)
+        maps = {}
+        for group, lines in data:
+            if group is None:
+                if not lines:
+                    continue
+                raise ValueError("Entry points must be listed in groups")
+            group = group.strip()
+            if group in maps:
+                raise ValueError("Duplicate group name", group)
+            maps[group] = cls.parse_group(group, lines, dist)
+        return maps
+
+    parse_map = classmethod(parse_map)
+
+
+
+
+
+
+class Distribution(object):
+    """Wrap an actual or potential sys.path entry w/metadata"""
+    def __init__(self,
+        location=None, metadata=None, project_name=None, version=None,
+        py_version=PY_MAJOR, platform=None, precedence = EGG_DIST
+    ):
+        self.project_name = safe_name(project_name or 'Unknown')
+        if version is not None:
+            self._version = safe_version(version)
+        self.py_version = py_version
+        self.platform = platform
+        self.location = location
+        self.precedence = precedence
+        self._provider = metadata or empty_provider
+
+    #@classmethod
+    def from_location(cls,location,basename,metadata=None,**kw):
+        project_name, version, py_version, platform = [None]*4
+        basename, ext = os.path.splitext(basename)
+        if ext.lower() in (".egg",".egg-info"):
+            match = EGG_NAME(basename)
+            if match:
+                project_name, version, py_version, platform = match.group(
+                    'name','ver','pyver','plat'
+                )
+        return cls(
+            location, metadata, project_name=project_name, version=version,
+            py_version=py_version, platform=platform, **kw
+        )
+    from_location = classmethod(from_location)
+
+    hashcmp = property(
+        lambda self: (
+            getattr(self,'parsed_version',()), self.precedence, self.key,
+            -len(self.location or ''), self.location, self.py_version,
+            self.platform
+        )
+    )
+    def __cmp__(self, other): return cmp(self.hashcmp, other)
+    def __hash__(self): return hash(self.hashcmp)
+
+    # These properties have to be lazy so that we don't have to load any
+    # metadata until/unless it's actually needed.  (i.e., some distributions
+    # may not know their name or version without loading PKG-INFO)
+
+    #@property
+    def key(self):
+        try:
+            return self._key
+        except AttributeError:
+            self._key = key = self.project_name.lower()
+            return key
+    key = property(key)
+
+    #@property
+    def parsed_version(self):
+        try:
+            return self._parsed_version
+        except AttributeError:
+            self._parsed_version = pv = parse_version(self.version)
+            return pv
+
+    parsed_version = property(parsed_version)
+
+    #@property
+    def version(self):
+        try:
+            return self._version
+        except AttributeError:
+            for line in self._get_metadata('PKG-INFO'):
+                if line.lower().startswith('version:'):
+                    self._version = safe_version(line.split(':',1)[1].strip())
+                    return self._version
+            else:
+                raise ValueError(
+                    "Missing 'Version:' header and/or PKG-INFO file", self
+                )
+    version = property(version)
+
+
+
+
+    #@property
+    def _dep_map(self):
+        try:
+            return self.__dep_map
+        except AttributeError:
+            dm = self.__dep_map = {None: []}
+            for name in 'requires.txt', 'depends.txt':
+                for extra,reqs in split_sections(self._get_metadata(name)):
+                    if extra: extra = safe_extra(extra)
+                    dm.setdefault(extra,[]).extend(parse_requirements(reqs))
+            return dm
+    _dep_map = property(_dep_map)
+
+    def requires(self,extras=()):
+        """List of Requirements needed for this distro if `extras` are used"""
+        dm = self._dep_map
+        deps = []
+        deps.extend(dm.get(None,()))
+        for ext in extras:
+            try:
+                deps.extend(dm[safe_extra(ext)])
+            except KeyError:
+                raise UnknownExtra(
+                    "%s has no such extra feature %r" % (self, ext)
+                )
+        return deps
+
+    def _get_metadata(self,name):
+        if self.has_metadata(name):
+            for line in self.get_metadata_lines(name):
+                yield line
+
+    def activate(self,path=None):
+        """Ensure distribution is importable on `path` (default=sys.path)"""
+        if path is None: path = sys.path
+        self.insert_on(path)
+        if path is sys.path:
+            fixup_namespace_packages(self.location)
+            for pkg in self._get_metadata('namespace_packages.txt'):
+                if pkg in sys.modules: declare_namespace(pkg)
+
+    def egg_name(self):
+        """Return what this distribution's standard .egg filename should be"""
+        filename = "%s-%s-py%s" % (
+            to_filename(self.project_name), to_filename(self.version),
+            self.py_version or PY_MAJOR
+        )
+
+        if self.platform:
+            filename += '-'+self.platform
+        return filename
+
+    def __repr__(self):
+        if self.location:
+            return "%s (%s)" % (self,self.location)
+        else:
+            return str(self)
+
+    def __str__(self):
+        try: version = getattr(self,'version',None)
+        except ValueError: version = None
+        version = version or "[unknown version]"
+        return "%s %s" % (self.project_name,version)
+
+    def __getattr__(self,attr):
+        """Delegate all unrecognized public attributes to .metadata provider"""
+        if attr.startswith('_'):
+            raise AttributeError,attr
+        return getattr(self._provider, attr)
+
+    #@classmethod
+    def from_filename(cls,filename,metadata=None, **kw):
+        return cls.from_location(
+            _normalize_cached(filename), os.path.basename(filename), metadata,
+            **kw
+        )
+    from_filename = classmethod(from_filename)
+
+    def as_requirement(self):
+        """Return a ``Requirement`` that matches this distribution exactly"""
+        return Requirement.parse('%s==%s' % (self.project_name, self.version))
+
+    def load_entry_point(self, group, name):
+        """Return the `name` entry point of `group` or raise ImportError"""
+        ep = self.get_entry_info(group,name)
+        if ep is None:
+            raise ImportError("Entry point %r not found" % ((group,name),))
+        return ep.load()
+
+    def get_entry_map(self, group=None):
+        """Return the entry point map for `group`, or the full entry map"""
+        try:
+            ep_map = self._ep_map
+        except AttributeError:
+            ep_map = self._ep_map = EntryPoint.parse_map(
+                self._get_metadata('entry_points.txt'), self
+            )
+        if group is not None:
+            return ep_map.get(group,{})
+        return ep_map
+
+    def get_entry_info(self, group, name):
+        """Return the EntryPoint object for `group`+`name`, or ``None``"""
+        return self.get_entry_map(group).get(name)
+
+    def insert_on(self, path, loc = None):
+        """Insert self.location in path before its nearest parent directory"""
+        loc = loc or self.location
+        if not loc: return
+        if path is sys.path:
+            self.check_version_conflict()
+        best, pos = 0, -1
+        for p,item in enumerate(path):
+            item = _normalize_cached(item)
+            if loc.startswith(item) and len(item)>best and loc<>item:
+                best, pos = len(item), p
+        if pos==-1:
+            if loc not in path: path.append(loc)
+        elif loc not in path[:pos+1]:
+            while loc in path: path.remove(loc)
+            path.insert(pos,loc)
+
+
+    def check_version_conflict(self):
+        if self.key=='setuptools':
+            return      # ignore the inevitable setuptools self-conflicts  :(
+
+        nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt'))
+        loc = normalize_path(self.location)
+        for modname in self._get_metadata('top_level.txt'):
+            if (modname not in sys.modules or modname in nsp
+                or modname in _namespace_packages
+            ):
+                continue
+
+            fn = getattr(sys.modules[modname], '__file__', None)
+            if fn and normalize_path(fn).startswith(loc):
+                continue
+            issue_warning(
+                "Module %s was already imported from %s, but %s is being added"
+                " to sys.path" % (modname, fn, self.location),
+            )
+
+    def has_version(self):
+        try:
+            self.version
+        except ValueError:
+            issue_warning("Unbuilt egg for "+repr(self))
+            return False
+        return True
+
+    def clone(self,**kw):
+        """Copy this distribution, substituting in any changed keyword args"""
+        for attr in (
+            'project_name', 'version', 'py_version', 'platform', 'location',
+            'precedence'
+        ):
+            kw.setdefault(attr, getattr(self,attr,None))
+        kw.setdefault('metadata', self._provider)
+        return self.__class__(**kw)
+
+
+
+
+    #@property
+    def extras(self):
+        return [dep for dep in self._dep_map if dep]
+    extras = property(extras)
+
+
+def issue_warning(*args,**kw):
+    level = 1
+    g = globals()
+    try:
+        # find the first stack frame that is *not* code in
+        # the pkg_resources module, to use for the warning
+        while sys._getframe(level).f_globals is g:
+            level += 1
+    except ValueError:
+        pass
+    from warnings import warn
+    warn(stacklevel = level+1, *args, **kw)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def parse_requirements(strs):
+    """Yield ``Requirement`` objects for each specification in `strs`
+
+    `strs` must be an instance of ``basestring``, or a (possibly-nested)
+    iterable thereof.
+    """
+    # create a steppable iterator, so we can handle \-continuations
+    lines = iter(yield_lines(strs))
+
+    def scan_list(ITEM,TERMINATOR,line,p,groups,item_name):
+
+        items = []
+
+        while not TERMINATOR(line,p):
+            if CONTINUE(line,p):
+                try:
+                    line = lines.next(); p = 0
+                except StopIteration:
+                    raise ValueError(
+                        "\\ must not appear on the last nonblank line"
+                    )
+
+            match = ITEM(line,p)
+            if not match:
+                raise ValueError("Expected "+item_name+" in",line,"at",line[p:])
+
+            items.append(match.group(*groups))
+            p = match.end()
+
+            match = COMMA(line,p)
+            if match:
+                p = match.end() # skip the comma
+            elif not TERMINATOR(line,p):
+                raise ValueError(
+                    "Expected ',' or end-of-list in",line,"at",line[p:]
+                )
+
+        match = TERMINATOR(line,p)
+        if match: p = match.end()   # skip the terminator, if any
+        return line, p, items
+
+    for line in lines:
+        match = DISTRO(line)
+        if not match:
+            raise ValueError("Missing distribution spec", line)
+        project_name = match.group(1)
+        p = match.end()
+        extras = []
+
+        match = OBRACKET(line,p)
+        if match:
+            p = match.end()
+            line, p, extras = scan_list(
+                DISTRO, CBRACKET, line, p, (1,), "'extra' name"
+            )
+
+        line, p, specs = scan_list(VERSION,LINE_END,line,p,(1,2),"version spec")
+        specs = [(op,safe_version(val)) for op,val in specs]
+        yield Requirement(project_name, specs, extras)
+
+
+def _sort_dists(dists):
+    tmp = [(dist.hashcmp,dist) for dist in dists]
+    tmp.sort()
+    dists[::-1] = [d for hc,d in tmp]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class Requirement:
+    def __init__(self, project_name, specs, extras):
+        """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!"""
+        self.unsafe_name, project_name = project_name, safe_name(project_name)
+        self.project_name, self.key = project_name, project_name.lower()
+        index = [(parse_version(v),state_machine[op],op,v) for op,v in specs]
+        index.sort()
+        self.specs = [(op,ver) for parsed,trans,op,ver in index]
+        self.index, self.extras = index, tuple(map(safe_extra,extras))
+        self.hashCmp = (
+            self.key, tuple([(op,parsed) for parsed,trans,op,ver in index]),
+            ImmutableSet(self.extras)
+        )
+        self.__hash = hash(self.hashCmp)
+
+    def __str__(self):
+        specs = ','.join([''.join(s) for s in self.specs])
+        extras = ','.join(self.extras)
+        if extras: extras = '[%s]' % extras
+        return '%s%s%s' % (self.project_name, extras, specs)
+
+    def __eq__(self,other):
+        return isinstance(other,Requirement) and self.hashCmp==other.hashCmp
+
+    def __contains__(self,item):
+        if isinstance(item,Distribution):
+            if item.key <> self.key: return False
+            if self.index: item = item.parsed_version  # only get if we need it
+        elif isinstance(item,basestring):
+            item = parse_version(item)
+        last = None
+        for parsed,trans,op,ver in self.index:
+            action = trans[cmp(item,parsed)]
+            if action=='F':     return False
+            elif action=='T':   return True
+            elif action=='+':   last = True
+            elif action=='-' or last is None:   last = False
+        if last is None: last = True    # no rules encountered
+        return last
+
+
+    def __hash__(self):
+        return self.__hash
+
+    def __repr__(self): return "Requirement.parse(%r)" % str(self)
+
+    #@staticmethod
+    def parse(s):
+        reqs = list(parse_requirements(s))
+        if reqs:
+            if len(reqs)==1:
+                return reqs[0]
+            raise ValueError("Expected only one requirement", s)
+        raise ValueError("No requirements found", s)
+
+    parse = staticmethod(parse)
+
+state_machine = {
+    #       =><
+    '<' :  '--T',
+    '<=':  'T-T',
+    '>' :  'F+F',
+    '>=':  'T+F',
+    '==':  'T..',
+    '!=':  'F++',
+}
+
+
+def _get_mro(cls):
+    """Get an mro for a type or classic class"""
+    if not isinstance(cls,type):
+        class cls(cls,object): pass
+        return cls.__mro__[1:]
+    return cls.__mro__
+
+def _find_adapter(registry, ob):
+    """Return an adapter factory for `ob` from `registry`"""
+    for t in _get_mro(getattr(ob, '__class__', type(ob))):
+        if t in registry:
+            return registry[t]
+
+
+def ensure_directory(path):
+    """Ensure that the parent directory of `path` exists"""
+    dirname = os.path.dirname(path)
+    if not os.path.isdir(dirname):
+        os.makedirs(dirname)
+
+def split_sections(s):
+    """Split a string or iterable thereof into (section,content) pairs
+
+    Each ``section`` is a stripped version of the section header ("[section]")
+    and each ``content`` is a list of stripped lines excluding blank lines and
+    comment-only lines.  If there are any such lines before the first section
+    header, they're returned in a first ``section`` of ``None``.
+    """
+    section = None
+    content = []
+    for line in yield_lines(s):
+        if line.startswith("["):
+            if line.endswith("]"):
+                if section or content:
+                    yield section, content
+                section = line[1:-1].strip()
+                content = []
+            else:
+                raise ValueError("Invalid section heading", line)
+        else:
+            content.append(line)
+
+    # wrap up last segment
+    yield section, content
+
+def _mkstemp(*args,**kw):
+    from tempfile import mkstemp
+    old_open = os.open
+    try:
+        os.open = os_open   # temporarily bypass sandboxing
+        return mkstemp(*args,**kw)
+    finally:
+        os.open = old_open  # and then put it back
+
+
+# Set up global resource manager
+_manager = ResourceManager()
+def _initialize(g):
+    for name in dir(_manager):
+        if not name.startswith('_'):
+            g[name] = getattr(_manager, name)
+_initialize(globals())
+
+# Prepare the master working set and make the ``require()`` API available
+working_set = WorkingSet()
+try:
+    # Does the main program list any requirements?
+    from __main__ import __requires__
+except ImportError:
+    pass # No: just use the default working set based on sys.path
+else:
+    # Yes: ensure the requirements are met, by prefixing sys.path if necessary
+    try:
+        working_set.require(__requires__)
+    except VersionConflict:     # try it without defaults already on sys.path
+        working_set = WorkingSet([])    # by starting with an empty path
+        for dist in working_set.resolve(
+            parse_requirements(__requires__), Environment()
+        ):
+            working_set.add(dist)
+        for entry in sys.path:  # add any missing entries from sys.path
+            if entry not in working_set.entries:
+                working_set.add_entry(entry)
+        sys.path[:] = working_set.entries   # then copy back to sys.path
+
+require = working_set.require
+iter_entry_points = working_set.iter_entry_points
+add_activation_listener = working_set.subscribe
+run_script = working_set.run_script
+run_main = run_script   # backward compatibility
+# Activate all distributions already on sys.path, and ensure that
+# all distributions added to the working set in the future (e.g. by
+# calling ``require()``) will get activated as well.
+add_activation_listener(lambda dist: dist.activate())
+working_set.entries=[]; map(working_set.add_entry,sys.path) # match order
diff --git a/Lib/pkgutil.py b/Lib/pkgutil.py
index fbd708a..7316892 100644
--- a/Lib/pkgutil.py
+++ b/Lib/pkgutil.py
@@ -1,7 +1,432 @@
 """Utilities to support packages."""
 
+# NOTE: This module must remain compatible with Python 2.3, as it is shared
+# by setuptools for distribution with Python 2.3 and up.
+
 import os
 import sys
+import imp
+import os.path
+from types import ModuleType
+
+__all__ = [
+    'get_importer', 'iter_importers', 'get_loader', 'find_loader',
+    'walk_packages', 'iter_modules',
+    'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
+]
+
+def read_code(stream):
+    # This helper is needed in order for the PEP 302 emulation to
+    # correctly handle compiled files
+    import marshal
+
+    magic = stream.read(4)
+    if magic != imp.get_magic():
+        return None
+
+    stream.read(4) # Skip timestamp
+    return marshal.load(stream)
+
+
+def simplegeneric(func):
+    """Make a trivial single-dispatch generic function"""
+    registry = {}
+    def wrapper(*args,**kw):
+        ob = args[0]
+        try:
+            cls = ob.__class__
+        except AttributeError:
+            cls = type(ob)
+        try:
+            mro = cls.__mro__
+        except AttributeError:
+            try:
+                class cls(cls,object): pass
+                mro = cls.__mro__[1:]
+            except TypeError:
+                mro = object,   # must be an ExtensionClass or some such  :(
+        for t in mro:
+            if t in registry:
+                return registry[t](*args,**kw)
+        else:
+            return func(*args,**kw)
+    try:
+        wrapper.__name__ = func.__name__
+    except (TypeError,AttributeError):
+        pass    # Python 2.3 doesn't allow functions to be renamed
+
+    def register(typ, func=None):
+        if func is None:
+            return lambda f: register(typ, f)
+        registry[typ] = func
+        return func
+
+    wrapper.__dict__ = func.__dict__
+    wrapper.__doc__ = func.__doc__
+    wrapper.register = register
+    return wrapper
+
+
+def walk_packages(path=None, prefix='', onerror=None):
+    """Yield submodule names+loaders recursively, for path or sys.path"""
+
+    def seen(p,m={}):
+        if p in m: return True
+        m[p] = True
+
+    for importer, name, ispkg in iter_modules(path, prefix):
+        yield importer, name, ispkg
+
+        if ispkg:
+            try:
+                __import__(name)
+            except ImportError:
+                if onerror is not None:
+                    onerror()
+            else:
+                path = getattr(sys.modules[name], '__path__', None) or []
+
+                # don't traverse path items we've seen before
+                path = [p for p in path if not seen(p)]
+
+                for item in walk_packages(path, name+'.'):
+                    yield item
+
+
+def iter_modules(path=None, prefix=''):
+    """Yield submodule names+loaders for path or sys.path"""
+    if path is None:
+        importers = iter_importers()
+    else:
+        importers = map(get_importer, path)
+
+    yielded = {}
+    for i in importers:
+        for name, ispkg in iter_importer_modules(i, prefix):
+            if name not in yielded:
+                yielded[name] = 1
+                yield i, name, ispkg
+
+
+#@simplegeneric
+def iter_importer_modules(importer, prefix=''):
+    if not hasattr(importer,'iter_modules'):
+        return []
+    return importer.iter_modules(prefix)
+
+iter_importer_modules = simplegeneric(iter_importer_modules)
+
+
+class ImpImporter:
+    """PEP 302 Importer that wraps Python's "classic" import algorithm
+
+    ImpImporter(dirname) produces a PEP 302 importer that searches that
+    directory.  ImpImporter(None) produces a PEP 302 importer that searches
+    the current sys.path, plus any modules that are frozen or built-in.
+
+    Note that ImpImporter does not currently support being used by placement
+    on sys.meta_path.
+    """
+
+    def __init__(self, path=None):
+        self.path = path
+
+    def find_module(self, fullname, path=None):
+        # Note: we ignore 'path' argument since it is only used via meta_path
+        subname = fullname.split(".")[-1]
+        if subname != fullname and self.path is None:
+            return None
+        if self.path is None:
+            path = None
+        else:
+            path = [os.path.realpath(self.path)]
+        try:
+            file, filename, etc = imp.find_module(subname, path)
+        except ImportError:
+            return None
+        return ImpLoader(fullname, file, filename, etc)
+
+    def iter_modules(self, prefix=''):
+        if self.path is None or not os.path.isdir(self.path):
+            return
+
+        yielded = {}
+        import inspect
+
+        filenames = os.listdir(self.path)
+        filenames.sort()  # handle packages before same-named modules
+
+        for fn in filenames:
+            modname = inspect.getmodulename(fn)
+            if modname=='__init__' or modname in yielded:
+                continue
+
+            path = os.path.join(self.path, fn)
+            ispkg = False
+
+            if not modname and os.path.isdir(path) and '.' not in fn:
+                modname = fn
+                for fn in os.listdir(path):
+                    subname = inspect.getmodulename(fn)
+                    if subname=='__init__':
+                        ispkg = True
+                        break
+                else:
+                    continue    # not a package
+
+            if modname and '.' not in modname:
+                yielded[modname] = 1
+                yield prefix + modname, ispkg
+
+
+class ImpLoader:
+    """PEP 302 Loader that wraps Python's "classic" import algorithm
+    """
+    code = source = None
+
+    def __init__(self, fullname, file, filename, etc):
+        self.file = file
+        self.filename = filename
+        self.fullname = fullname
+        self.etc = etc
+
+    def load_module(self, fullname):
+        self._reopen()
+        try:
+            mod = imp.load_module(fullname, self.file, self.filename, self.etc)
+        finally:
+            if self.file:
+                self.file.close()
+        # Note: we don't set __loader__ because we want the module to look
+        # normal; i.e. this is just a wrapper for standard import machinery
+        return mod
+
+    def get_data(self, pathname):
+        return open(pathname, "rb").read()
+
+    def _reopen(self):
+        if self.file and self.file.closed:
+            if mod_type==imp.PY_SOURCE:
+                self.file = open(self.filename, 'rU')
+            elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
+                self.file = open(self.filename, 'rb')
+
+    def _fix_name(self, fullname):
+        if fullname is None:
+            fullname = self.fullname
+        elif fullname != self.fullname:
+            raise ImportError("Loader for module %s cannot handle "
+                              "module %s" % (self.fullname, fullname))
+        return fullname
+
+    def is_package(self, fullname):
+        fullname = self._fix_name(fullname)
+        return self.etc[2]==imp.PKG_DIRECTORY
+
+    def get_code(self, fullname=None):
+        fullname = self._fix_name(fullname)
+        if self.code is None:
+            mod_type = self.etc[2]
+            if mod_type==imp.PY_SOURCE:
+                source = self.get_source(fullname)
+                self.code = compile(source, self.filename, 'exec')
+            elif mod_type==imp.PY_COMPILED:
+                self._reopen()
+                try:
+                    self.code = read_code(self.file)
+                finally:
+                    self.file.close()
+            elif mod_type==imp.PKG_DIRECTORY:
+                self.code = self._get_delegate().get_code()
+        return self.code
+
+    def get_source(self, fullname=None):
+        fullname = self._fix_name(fullname)
+        if self.source is None:
+            mod_type = self.etc[2]
+            if mod_type==imp.PY_SOURCE:
+                self._reopen()
+                try:
+                    self.source = self.file.read()
+                finally:
+                    self.file.close()
+            elif mod_type==imp.PY_COMPILED:
+                if os.path.exists(self.filename[:-1]):
+                    f = open(self.filename[:-1], 'rU')
+                    self.source = f.read()
+                    f.close()
+            elif mod_type==imp.PKG_DIRECTORY:
+                self.source = self._get_delegate().get_source()
+        return self.source
+
+
+    def _get_delegate(self):
+        return ImpImporter(self.filename).find_module('__init__')
+
+    def get_filename(self, fullname=None):
+        fullname = self._fix_name(fullname)
+        mod_type = self.etc[2]
+        if self.etc[2]==imp.PKG_DIRECTORY:
+            return self._get_delegate().get_filename()
+        elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
+            return self.filename
+        return None
+
+
+try:
+    import zipimport
+    from zipimport import zipimporter
+
+    def iter_zipimport_modules(importer, prefix=''):
+        dirlist = zipimport._zip_directory_cache[importer.archive].keys()
+        dirlist.sort()
+        _prefix = importer.prefix
+        plen = len(_prefix)
+        yielded = {}
+        import inspect
+        for fn in dirlist:
+            if not fn.startswith(_prefix):
+                continue
+
+            fn = fn[plen:].split(os.sep)
+
+            if len(fn)==2 and fn[1].startswith('__init__.py'):
+                if fn[0] not in yielded:
+                    yielded[fn[0]] = 1
+                    yield fn[0], True
+
+            if len(fn)!=1:
+                continue
+
+            modname = inspect.getmodulename(fn[0])
+            if modname=='__init__':
+                continue
+
+            if modname and '.' not in modname and modname not in yielded:
+                yielded[modname] = 1
+                yield prefix + modname, False
+
+    iter_importer_modules.register(zipimporter, iter_zipimport_modules)
+
+except ImportError:
+    pass
+
+
+def get_importer(path_item):
+    """Retrieve a PEP 302 importer for the given path item
+
+    The returned importer is cached in sys.path_importer_cache
+    if it was newly created by a path hook.
+
+    If there is no importer, a wrapper around the basic import
+    machinery is returned. This wrapper is never inserted into
+    the importer cache (None is inserted instead).
+
+    The cache (or part of it) can be cleared manually if a
+    rescan of sys.path_hooks is necessary.
+    """
+    try:
+        importer = sys.path_importer_cache[path_item]
+    except KeyError:
+        for path_hook in sys.path_hooks:
+            try:
+                importer = path_hook(path_item)
+                break
+            except ImportError:
+                pass
+        else:
+            importer = None
+        sys.path_importer_cache.setdefault(path_item,importer)
+
+    if importer is None:
+        try:
+            importer = ImpImporter(path_item)
+        except ImportError:
+            pass
+    return importer
+
+
+def iter_importers(fullname=""):
+    """Yield PEP 302 importers for the given module name
+
+    If fullname contains a '.', the importers will be for the package
+    containing fullname, otherwise they will be importers for sys.meta_path,
+    sys.path, and Python's "classic" import machinery, in that order.  If
+    the named module is in a package, that package is imported as a side
+    effect of invoking this function.
+
+    Non PEP 302 mechanisms (e.g. the Windows registry) used by the
+    standard import machinery to find files in alternative locations
+    are partially supported, but are searched AFTER sys.path. Normally,
+    these locations are searched BEFORE sys.path, preventing sys.path
+    entries from shadowing them.
+
+    For this to cause a visible difference in behaviour, there must
+    be a module or package name that is accessible via both sys.path
+    and one of the non PEP 302 file system mechanisms. In this case,
+    the emulation will find the former version, while the builtin
+    import mechanism will find the latter.
+
+    Items of the following types can be affected by this discrepancy:
+        imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
+    """
+    if fullname.startswith('.'):
+        raise ImportError("Relative module names not supported")
+    if '.' in fullname:
+        # Get the containing package's __path__
+        pkg = '.'.join(fullname.split('.')[:-1])
+        if pkg not in sys.modules:
+            __import__(pkg)
+        path = getattr(sys.modules[pkg],'__path__',None) or []
+    else:
+        for importer in sys.meta_path:
+            yield importer
+        path = sys.path
+    for item in path:
+        yield get_importer(item)
+    if '.' not in fullname:
+        yield ImpImporter()
+
+def get_loader(module_or_name):
+    """Get a PEP 302 "loader" object for module_or_name
+
+    If the module or package is accessible via the normal import
+    mechanism, a wrapper around the relevant part of that machinery
+    is returned.  Returns None if the module cannot be found or imported.
+    If the named module is not already imported, its containing package
+    (if any) is imported, in order to establish the package __path__.
+
+    This function uses iter_importers(), and is thus subject to the same
+    limitations regarding platform-specific special import locations such
+    as the Windows registry.
+    """
+    if module_or_name in sys.modules:
+        module_or_name = sys.modules[module_or_name]
+    if isinstance(module_or_name, ModuleType):
+        module = module_or_name
+        loader = getattr(module,'__loader__',None)
+        if loader is not None:
+            return loader
+        fullname = module.__name__
+    else:
+        fullname = module_or_name
+    return find_loader(fullname)
+
+def find_loader(fullname):
+    """Find a PEP 302 "loader" object for fullname
+
+    If fullname contains dots, path must be the containing package's __path__.
+    Returns None if the module cannot be found or imported. This function uses
+    iter_importers(), and is thus subject to the same limitations regarding
+    platform-specific special import locations such as the Windows registry.
+    """
+    for importer in iter_importers(fullname):
+        loader = importer.find_module(fullname)
+        if loader is not None:
+            return loader
+
+    return None
+
 
 def extend_path(path, name):
     """Extend a package's path.
diff --git a/Lib/plat-mac/applesingle.py b/Lib/plat-mac/applesingle.py
index b035d9e..76bdb06 100644
--- a/Lib/plat-mac/applesingle.py
+++ b/Lib/plat-mac/applesingle.py
@@ -25,7 +25,7 @@
     pass
 
 # File header format: magic, version, unused, number of entries
-AS_HEADER_FORMAT=">ll16sh"
+AS_HEADER_FORMAT=">LL16sh"
 AS_HEADER_LENGTH=26
 # The flag words for AppleSingle
 AS_MAGIC=0x00051600
diff --git a/Lib/platform.py b/Lib/platform.py
index 62fdaf4..288bc95 100755
--- a/Lib/platform.py
+++ b/Lib/platform.py
@@ -607,7 +607,8 @@
         versioninfo = (version,stage,nonrel)
     if sysa:
         machine = {0x1: '68k',
-                   0x2: 'PowerPC'}.get(sysa,'')
+                   0x2: 'PowerPC',
+                   0xa: 'i386'}.get(sysa,'')
     return release,versioninfo,machine
 
 def _java_getprop(name,default):
diff --git a/Lib/popen2.py b/Lib/popen2.py
index 54543be..67ebd26 100644
--- a/Lib/popen2.py
+++ b/Lib/popen2.py
@@ -20,7 +20,13 @@
 
 def _cleanup():
     for inst in _active[:]:
-        inst.poll()
+        if inst.poll(_deadstate=sys.maxint) >= 0:
+            try:
+                _active.remove(inst)
+            except ValueError:
+                # This can happen if two threads create a new Popen instance.
+                # It's harmless that it was already removed, so ignore.
+                pass
 
 class Popen3:
     """Class representing a child process.  Normally instances are created
@@ -39,6 +45,7 @@
         specified, it specifies the size of the I/O buffers to/from the child
         process."""
         _cleanup()
+        self.cmd = cmd
         p2cread, p2cwrite = os.pipe()
         c2pread, c2pwrite = os.pipe()
         if capturestderr:
@@ -60,7 +67,13 @@
             self.childerr = os.fdopen(errout, 'r', bufsize)
         else:
             self.childerr = None
-        _active.append(self)
+
+    def __del__(self):
+        # In case the child hasn't been waited on, check if it's done.
+        self.poll(_deadstate=sys.maxint)
+        if self.sts < 0:
+            # Child is still running, keep us alive until we can wait on it.
+            _active.append(self)
 
     def _run_child(self, cmd):
         if isinstance(cmd, basestring):
@@ -75,26 +88,28 @@
         finally:
             os._exit(1)
 
-    def poll(self):
+    def poll(self, _deadstate=None):
         """Return the exit status of the child process if it has finished,
         or -1 if it hasn't finished yet."""
         if self.sts < 0:
             try:
                 pid, sts = os.waitpid(self.pid, os.WNOHANG)
+                # pid will be 0 if self.pid hasn't terminated
                 if pid == self.pid:
                     self.sts = sts
-                    _active.remove(self)
             except os.error:
-                pass
+                if _deadstate is not None:
+                    self.sts = _deadstate
         return self.sts
 
     def wait(self):
         """Wait for and return the exit status of the child process."""
         if self.sts < 0:
             pid, sts = os.waitpid(self.pid, 0)
-            if pid == self.pid:
-                self.sts = sts
-                _active.remove(self)
+            # This used to be a test, but it is believed to be
+            # always true, so I changed it to an assertion - mvl
+            assert pid == self.pid
+            self.sts = sts
         return self.sts
 
 
@@ -103,6 +118,7 @@
 
     def __init__(self, cmd, bufsize=-1):
         _cleanup()
+        self.cmd = cmd
         p2cread, p2cwrite = os.pipe()
         c2pread, c2pwrite = os.pipe()
         self.pid = os.fork()
@@ -116,7 +132,6 @@
         self.tochild = os.fdopen(p2cwrite, 'w', bufsize)
         os.close(c2pwrite)
         self.fromchild = os.fdopen(c2pread, 'r', bufsize)
-        _active.append(self)
 
 
 if sys.platform[:3] == "win" or sys.platform == "os2emx":
@@ -186,6 +201,9 @@
     __all__.extend(["Popen3", "Popen4"])
 
 def _test():
+    # When the test runs, there shouldn't be any open pipes
+    _cleanup()
+    assert not _active, "Active pipes when test starts " + repr([c.cmd for c in _active])
     cmd  = "cat"
     teststr = "ab cd\n"
     if os.name == "nt":
@@ -216,6 +234,7 @@
         raise ValueError("unexpected %r on stderr" % (got,))
     for inst in _active[:]:
         inst.wait()
+    _cleanup()
     if _active:
         raise ValueError("_active not empty")
     print "All OK"
diff --git a/Lib/pstats.py b/Lib/pstats.py
index 930cc6d..c3a8828 100644
--- a/Lib/pstats.py
+++ b/Lib/pstats.py
@@ -32,6 +32,7 @@
 # CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
 
 
+import sys
 import os
 import time
 import marshal
@@ -58,18 +59,31 @@
     printed.
 
     The sort_stats() method now processes some additional options (i.e., in
-    addition to the old -1, 0, 1, or 2).  It takes an arbitrary number of quoted
-    strings to select the sort order.  For example sort_stats('time', 'name')
-    sorts on the major key of "internal function time", and on the minor
-    key of 'the name of the function'.  Look at the two tables in sort_stats()
-    and get_sort_arg_defs(self) for more examples.
+    addition to the old -1, 0, 1, or 2).  It takes an arbitrary number of
+    quoted strings to select the sort order.  For example sort_stats('time',
+    'name') sorts on the major key of 'internal function time', and on the
+    minor key of 'the name of the function'.  Look at the two tables in
+    sort_stats() and get_sort_arg_defs(self) for more examples.
 
-    All methods now return "self",  so you can string together commands like:
+    All methods return self,  so you can string together commands like:
         Stats('foo', 'goo').strip_dirs().sort_stats('calls').\
                             print_stats(5).print_callers(5)
     """
 
-    def __init__(self, *args):
+    def __init__(self, *args, **kwds):
+        # I can't figure out how to explictly specify a stream keyword arg
+        # with *args:
+        #   def __init__(self, *args, stream=sys.stdout): ...
+        # so I use **kwds and sqauwk if something unexpected is passed in.
+        self.stream = sys.stdout
+        if "stream" in kwds:
+            self.stream = kwds["stream"]
+            del kwds["stream"]
+        if kwds:
+            keys = kwds.keys()
+            keys.sort()
+            extras = ", ".join(["%s=%s" % (k, kwds[k]) for k in keys])
+            raise ValueError, "unrecognized keyword args: %s" % extras
         if not len(args):
             arg = None
         else:
@@ -96,9 +110,9 @@
             trouble = 0
         finally:
             if trouble:
-                print "Invalid timing data",
-                if self.files: print self.files[-1],
-                print
+                print >> self.stream, "Invalid timing data",
+                if self.files: print >> self.stream, self.files[-1],
+                print >> self.stream
 
     def load_stats(self, arg):
         if not arg:  self.stats = {}
@@ -320,7 +334,7 @@
 
         if not list:
             return 0, list
-        print msg
+        print >> self.stream, msg
         if count < len(self.stats):
             width = 0
             for func in list:
@@ -330,24 +344,24 @@
 
     def print_stats(self, *amount):
         for filename in self.files:
-            print filename
-        if self.files: print
+            print >> self.stream, filename
+        if self.files: print >> self.stream
         indent = ' ' * 8
         for func in self.top_level:
-            print indent, func_get_function_name(func)
+            print >> self.stream, indent, func_get_function_name(func)
 
-        print indent, self.total_calls, "function calls",
+        print >> self.stream, indent, self.total_calls, "function calls",
         if self.total_calls != self.prim_calls:
-            print "(%d primitive calls)" % self.prim_calls,
-        print "in %.3f CPU seconds" % self.total_tt
-        print
+            print >> self.stream, "(%d primitive calls)" % self.prim_calls,
+        print >> self.stream, "in %.3f CPU seconds" % self.total_tt
+        print >> self.stream
         width, list = self.get_print_list(amount)
         if list:
             self.print_title()
             for func in list:
                 self.print_line(func)
-            print
-            print
+            print >> self.stream
+            print >> self.stream
         return self
 
     def print_callees(self, *amount):
@@ -361,8 +375,8 @@
                     self.print_call_line(width, func, self.all_callees[func])
                 else:
                     self.print_call_line(width, func, {})
-            print
-            print
+            print >> self.stream
+            print >> self.stream
         return self
 
     def print_callers(self, *amount):
@@ -372,12 +386,12 @@
             for func in list:
                 cc, nc, tt, ct, callers = self.stats[func]
                 self.print_call_line(width, func, callers, "<-")
-            print
-            print
+            print >> self.stream
+            print >> self.stream
         return self
 
     def print_call_heading(self, name_size, column_title):
-        print "Function ".ljust(name_size) + column_title
+        print >> self.stream, "Function ".ljust(name_size) + column_title
         # print sub-header only if we have new-style callers
         subheader = False
         for cc, nc, tt, ct, callers in self.stats.itervalues():
@@ -386,12 +400,12 @@
                 subheader = isinstance(value, tuple)
                 break
         if subheader:
-            print " "*name_size + "    ncalls  tottime  cumtime"
+            print >> self.stream, " "*name_size + "    ncalls  tottime  cumtime"
 
     def print_call_line(self, name_size, source, call_dict, arrow="->"):
-        print func_std_string(source).ljust(name_size) + arrow,
+        print >> self.stream, func_std_string(source).ljust(name_size) + arrow,
         if not call_dict:
-            print
+            print >> self.stream
             return
         clist = call_dict.keys()
         clist.sort()
@@ -411,30 +425,30 @@
             else:
                 substats = '%s(%r) %s' % (name, value, f8(self.stats[func][3]))
                 left_width = name_size + 3
-            print indent*left_width + substats
+            print >> self.stream, indent*left_width + substats
             indent = " "
 
     def print_title(self):
-        print '   ncalls  tottime  percall  cumtime  percall', \
-              'filename:lineno(function)'
+        print >> self.stream, '   ncalls  tottime  percall  cumtime  percall',
+        print >> self.stream, 'filename:lineno(function)'
 
     def print_line(self, func):  # hack : should print percentages
         cc, nc, tt, ct, callers = self.stats[func]
         c = str(nc)
         if nc != cc:
             c = c + '/' + str(cc)
-        print c.rjust(9),
-        print f8(tt),
+        print >> self.stream, c.rjust(9),
+        print >> self.stream, f8(tt),
         if nc == 0:
-            print ' '*8,
+            print >> self.stream, ' '*8,
         else:
-            print f8(tt/nc),
-        print f8(ct),
+            print >> self.stream, f8(tt/nc),
+        print >> self.stream, f8(ct),
         if cc == 0:
-            print ' '*8,
+            print >> self.stream, ' '*8,
         else:
-            print f8(ct/cc),
-        print func_std_string(func)
+            print >> self.stream, f8(ct/cc),
+        print >> self.stream, func_std_string(func)
 
 class TupleComp:
     """This class provides a generic function for comparing any two tuples.
@@ -549,7 +563,7 @@
                 try:
                     frac = float(term)
                     if frac > 1 or frac < 0:
-                        print "Fraction argument mus be in [0, 1]"
+                        print >> self.stream, "Fraction argument must be in [0, 1]"
                         continue
                     processed.append(frac)
                     continue
@@ -559,93 +573,93 @@
             if self.stats:
                 getattr(self.stats, fn)(*processed)
             else:
-                print "No statistics object is loaded."
+                print >> self.stream, "No statistics object is loaded."
             return 0
         def generic_help(self):
-            print "Arguments may be:"
-            print "* An integer maximum number of entries to print."
-            print "* A decimal fractional number between 0 and 1, controlling"
-            print "  what fraction of selected entries to print."
-            print "* A regular expression; only entries with function names"
-            print "  that match it are printed."
+            print >> self.stream, "Arguments may be:"
+            print >> self.stream, "* An integer maximum number of entries to print."
+            print >> self.stream, "* A decimal fractional number between 0 and 1, controlling"
+            print >> self.stream, "  what fraction of selected entries to print."
+            print >> self.stream, "* A regular expression; only entries with function names"
+            print >> self.stream, "  that match it are printed."
 
         def do_add(self, line):
             self.stats.add(line)
             return 0
         def help_add(self):
-            print "Add profile info from given file to current statistics object."
+            print >> self.stream, "Add profile info from given file to current statistics object."
 
         def do_callees(self, line):
             return self.generic('print_callees', line)
         def help_callees(self):
-            print "Print callees statistics from the current stat object."
+            print >> self.stream, "Print callees statistics from the current stat object."
             self.generic_help()
 
         def do_callers(self, line):
             return self.generic('print_callers', line)
         def help_callers(self):
-            print "Print callers statistics from the current stat object."
+            print >> self.stream, "Print callers statistics from the current stat object."
             self.generic_help()
 
         def do_EOF(self, line):
-            print ""
+            print >> self.stream, ""
             return 1
         def help_EOF(self):
-            print "Leave the profile brower."
+            print >> self.stream, "Leave the profile brower."
 
         def do_quit(self, line):
             return 1
         def help_quit(self):
-            print "Leave the profile brower."
+            print >> self.stream, "Leave the profile brower."
 
         def do_read(self, line):
             if line:
                 try:
                     self.stats = Stats(line)
                 except IOError, args:
-                    print args[1]
+                    print >> self.stream, args[1]
                     return
                 self.prompt = line + "% "
             elif len(self.prompt) > 2:
                 line = self.prompt[-2:]
             else:
-                print "No statistics object is current -- cannot reload."
+                print >> self.stream, "No statistics object is current -- cannot reload."
             return 0
         def help_read(self):
-            print "Read in profile data from a specified file."
+            print >> self.stream, "Read in profile data from a specified file."
 
         def do_reverse(self, line):
             self.stats.reverse_order()
             return 0
         def help_reverse(self):
-            print "Reverse the sort order of the profiling report."
+            print >> self.stream, "Reverse the sort order of the profiling report."
 
         def do_sort(self, line):
             abbrevs = self.stats.get_sort_arg_defs()
             if line and not filter(lambda x,a=abbrevs: x not in a,line.split()):
                 self.stats.sort_stats(*line.split())
             else:
-                print "Valid sort keys (unique prefixes are accepted):"
+                print >> self.stream, "Valid sort keys (unique prefixes are accepted):"
                 for (key, value) in Stats.sort_arg_dict_default.iteritems():
-                    print "%s -- %s" % (key, value[1])
+                    print >> self.stream, "%s -- %s" % (key, value[1])
             return 0
         def help_sort(self):
-            print "Sort profile data according to specified keys."
-            print "(Typing `sort' without arguments lists valid keys.)"
+            print >> self.stream, "Sort profile data according to specified keys."
+            print >> self.stream, "(Typing `sort' without arguments lists valid keys.)"
         def complete_sort(self, text, *args):
             return [a for a in Stats.sort_arg_dict_default if a.startswith(text)]
 
         def do_stats(self, line):
             return self.generic('print_stats', line)
         def help_stats(self):
-            print "Print statistics from the current stat object."
+            print >> self.stream, "Print statistics from the current stat object."
             self.generic_help()
 
         def do_strip(self, line):
             self.stats.strip_dirs()
             return 0
         def help_strip(self):
-            print "Strip leading path information from filenames in the report."
+            print >> self.stream, "Strip leading path information from filenames in the report."
 
         def postcmd(self, stop, line):
             if stop:
@@ -653,14 +667,14 @@
             return None
 
     import sys
-    print "Welcome to the profile statistics browser."
+    print >> self.stream, "Welcome to the profile statistics browser."
     if len(sys.argv) > 1:
         initprofile = sys.argv[1]
     else:
         initprofile = None
     try:
         ProfileBrowser(initprofile).cmdloop()
-        print "Goodbye."
+        print >> self.stream, "Goodbye."
     except KeyboardInterrupt:
         pass
 
diff --git a/Lib/pydoc.py b/Lib/pydoc.py
index b6afc7f..cf38630 100755
--- a/Lib/pydoc.py
+++ b/Lib/pydoc.py
@@ -52,10 +52,16 @@
 #     the current directory is changed with os.chdir(), an incorrect
 #     path will be displayed.
 
-import sys, imp, os, re, types, inspect, __builtin__
+import sys, imp, os, re, types, inspect, __builtin__, pkgutil
 from repr import Repr
 from string import expandtabs, find, join, lower, split, strip, rfind, rstrip
-from collections import deque
+try:
+    from collections import deque
+except ImportError:
+    # Python 2.3 compatibility
+    class deque(list):
+        def popleft(self):
+            return self.pop(0)
 
 # --------------------------------------------------------- common routines
 
@@ -182,6 +188,23 @@
                 return True
     return False
 
+def source_synopsis(file):
+    line = file.readline()
+    while line[:1] == '#' or not strip(line):
+        line = file.readline()
+        if not line: break
+    line = strip(line)
+    if line[:4] == 'r"""': line = line[1:]
+    if line[:3] == '"""':
+        line = line[3:]
+        if line[-1:] == '\\': line = line[:-1]
+        while not strip(line):
+            line = file.readline()
+            if not line: break
+        result = strip(split(line, '"""')[0])
+    else: result = None
+    return result
+
 def synopsis(filename, cache={}):
     """Get the one-line summary out of a module file."""
     mtime = os.stat(filename).st_mtime
@@ -196,24 +219,11 @@
         if info and 'b' in info[2]: # binary modules have to be imported
             try: module = imp.load_module('__temp__', file, filename, info[1:])
             except: return None
-            result = split(module.__doc__ or '', '\n')[0]
+            result = (module.__doc__ or '').splitlines()[0]
             del sys.modules['__temp__']
         else: # text modules can be directly examined
-            line = file.readline()
-            while line[:1] == '#' or not strip(line):
-                line = file.readline()
-                if not line: break
-            line = strip(line)
-            if line[:4] == 'r"""': line = line[1:]
-            if line[:3] == '"""':
-                line = line[3:]
-                if line[-1:] == '\\': line = line[:-1]
-                while not strip(line):
-                    line = file.readline()
-                    if not line: break
-                result = strip(split(line, '"""')[0])
-            else: result = None
-        file.close()
+            result = source_synopsis(file)
+            file.close()
         cache[filename] = (mtime, result)
     return result
 
@@ -643,16 +653,8 @@
 
         if hasattr(object, '__path__'):
             modpkgs = []
-            modnames = []
-            for file in os.listdir(object.__path__[0]):
-                path = os.path.join(object.__path__[0], file)
-                modname = inspect.getmodulename(file)
-                if modname != '__init__':
-                    if modname and modname not in modnames:
-                        modpkgs.append((modname, name, 0, 0))
-                        modnames.append(modname)
-                    elif ispackage(path):
-                        modpkgs.append((file, name, 1, 0))
+            for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
+                modpkgs.append((modname, name, ispkg, 0))
             modpkgs.sort()
             contents = self.multicolumn(modpkgs, self.modpkglink)
             result = result + self.bigsection(
@@ -796,7 +798,10 @@
             tag += ':<br>\n'
 
             # Sort attrs by name.
-            attrs.sort(key=lambda t: t[0])
+            try:
+                attrs.sort(key=lambda t: t[0])
+            except TypeError:
+                attrs.sort(lambda t1, t2: cmp(t1[0], t2[0]))    # 2.3 compat
 
             # Pump out the attrs, segregated by kind.
             attrs = spill('Methods %s' % tag, attrs,
@@ -914,25 +919,9 @@
         """Generate an HTML index for a directory of modules."""
         modpkgs = []
         if shadowed is None: shadowed = {}
-        seen = {}
-        files = os.listdir(dir)
-
-        def found(name, ispackage,
-                  modpkgs=modpkgs, shadowed=shadowed, seen=seen):
-            if name not in seen:
-                modpkgs.append((name, '', ispackage, name in shadowed))
-                seen[name] = 1
-                shadowed[name] = 1
-
-        # Package spam/__init__.py takes precedence over module spam.py.
-        for file in files:
-            path = os.path.join(dir, file)
-            if ispackage(path): found(file, 1)
-        for file in files:
-            path = os.path.join(dir, file)
-            if os.path.isfile(path):
-                modname = inspect.getmodulename(file)
-                if modname: found(modname, 0)
+        for importer, name, ispkg in pkgutil.iter_modules([dir]):
+            modpkgs.append((name, '', ispkg, name in shadowed))
+            shadowed[name] = 1
 
         modpkgs.sort()
         contents = self.multicolumn(modpkgs, self.modpkglink)
@@ -1059,14 +1048,12 @@
 
         if hasattr(object, '__path__'):
             modpkgs = []
-            for file in os.listdir(object.__path__[0]):
-                path = os.path.join(object.__path__[0], file)
-                modname = inspect.getmodulename(file)
-                if modname != '__init__':
-                    if modname and modname not in modpkgs:
-                        modpkgs.append(modname)
-                    elif ispackage(path):
-                        modpkgs.append(file + ' (package)')
+            for importer, modname, ispkg in pkgutil.iter_modules(object.__path__):
+                if ispkg:
+                    modpkgs.append(modname + ' (package)')
+                else:
+                    modpkgs.append(modname)
+
             modpkgs.sort()
             result = result + self.section(
                 'PACKAGE CONTENTS', join(modpkgs, '\n'))
@@ -1490,20 +1477,9 @@
 def writedocs(dir, pkgpath='', done=None):
     """Write out HTML documentation for all modules in a directory tree."""
     if done is None: done = {}
-    for file in os.listdir(dir):
-        path = os.path.join(dir, file)
-        if ispackage(path):
-            writedocs(path, pkgpath + file + '.', done)
-        elif os.path.isfile(path):
-            modname = inspect.getmodulename(path)
-            if modname:
-                if modname == '__init__':
-                    modname = pkgpath[:-1] # remove trailing period
-                else:
-                    modname = pkgpath + modname
-                if modname not in done:
-                    done[modname] = 1
-                    writedoc(modname)
+    for importer, modname, ispkg in pkgutil.walk_packages([dir], pkgpath):
+        writedoc(modname)
+    return
 
 def raw_input(prompt):
     sys.stdout.write(prompt)
@@ -1835,30 +1811,9 @@
             self.state.append((child, self.children(child)))
         return child
 
-class ModuleScanner(Scanner):
+
+class ModuleScanner:
     """An interruptible scanner that searches module synopses."""
-    def __init__(self):
-        roots = map(lambda dir: (dir, ''), pathdirs())
-        Scanner.__init__(self, roots, self.submodules, self.isnewpackage)
-        self.inodes = map(lambda (dir, pkg): os.stat(dir).st_ino, roots)
-
-    def submodules(self, (dir, package)):
-        children = []
-        for file in os.listdir(dir):
-            path = os.path.join(dir, file)
-            if ispackage(path):
-                children.append((path, package + (package and '.') + file))
-            else:
-                children.append((path, package))
-        children.sort() # so that spam.py comes before spam.pyc or spam.pyo
-        return children
-
-    def isnewpackage(self, (dir, package)):
-        inode = os.path.exists(dir) and os.stat(dir).st_ino
-        if not (os.path.islink(dir) and inode in self.inodes):
-            self.inodes.append(inode) # detect circular symbolic links
-            return ispackage(dir)
-        return False
 
     def run(self, callback, key=None, completer=None):
         if key: key = lower(key)
@@ -1875,22 +1830,31 @@
                     if find(lower(modname + ' - ' + desc), key) >= 0:
                         callback(None, modname, desc)
 
-        while not self.quit:
-            node = self.next()
-            if not node: break
-            path, package = node
-            modname = inspect.getmodulename(path)
-            if os.path.isfile(path) and modname:
-                modname = package + (package and '.') + modname
-                if not modname in seen:
-                    seen[modname] = 1 # if we see spam.py, skip spam.pyc
-                    if key is None:
-                        callback(path, modname, '')
+        for importer, modname, ispkg in pkgutil.walk_packages():
+            if self.quit:
+                break
+            if key is None:
+                callback(None, modname, '')
+            else:
+                loader = importer.find_module(modname)
+                if hasattr(loader,'get_source'):
+                    import StringIO
+                    desc = source_synopsis(
+                        StringIO.StringIO(loader.get_source(modname))
+                    ) or ''
+                    if hasattr(loader,'get_filename'):
+                        path = loader.get_filename(modname)
                     else:
-                        desc = synopsis(path) or ''
-                        if find(lower(modname + ' - ' + desc), key) >= 0:
-                            callback(path, modname, desc)
-        if completer: completer()
+                        path = None
+                else:
+                    module = loader.load_module(modname)
+                    desc = (module.__doc__ or '').splitlines()[0]
+                    path = getattr(module,'__file__',None)
+                if find(lower(modname + ' - ' + desc), key) >= 0:
+                    callback(path, modname, desc)
+
+        if completer:
+            completer()
 
 def apropos(key):
     """Print all the one-line module summaries that contain a substring."""
@@ -1955,7 +1919,7 @@
                     'Built-in Modules', '#ffffff', '#ee77aa', contents)]
 
                 seen = {}
-                for dir in pathdirs():
+                for dir in sys.path:
                     indices.append(html.index(dir, seen))
                 contents = heading + join(indices) + '''<p align=right>
 <font color="#909090" face="helvetica, arial"><strong>
diff --git a/Lib/random.py b/Lib/random.py
index b4ad2b3..465f477 100644
--- a/Lib/random.py
+++ b/Lib/random.py
@@ -285,6 +285,15 @@
         large population:   sample(xrange(10000000), 60)
         """
 
+        # XXX Although the documentation says `population` is "a sequence",
+        # XXX attempts are made to cater to any iterable with a __len__
+        # XXX method.  This has had mixed success.  Examples from both
+        # XXX sides:  sets work fine, and should become officially supported;
+        # XXX dicts are much harder, and have failed in various subtle
+        # XXX ways across attempts.  Support for mapping types should probably
+        # XXX be dropped (and users should pass mapping.keys() or .values()
+        # XXX explicitly).
+
         # Sampling without replacement entails tracking either potential
         # selections (the pool) in a list or previous selections in a set.
 
@@ -304,7 +313,9 @@
         setsize = 21        # size of a small set minus size of an empty list
         if k > 5:
             setsize += 4 ** _ceil(_log(k * 3, 4)) # table size for big sets
-        if n <= setsize:    # is an n-length list smaller than a k-length set
+        if n <= setsize or hasattr(population, "keys"):
+            # An n-length list is smaller than a k-length set, or this is a
+            # mapping type so the other algorithm wouldn't work.
             pool = list(population)
             for i in xrange(k):         # invariant:  non-selected at [0,n-i)
                 j = _int(random() * (n-i))
@@ -312,17 +323,18 @@
                 pool[j] = pool[n-i-1]   # move non-selected item into vacancy
         else:
             try:
-                n > 0 and (population[0], population[n//2], population[n-1])
-            except (TypeError, KeyError):   # handle non-sequence iterables
-                population = tuple(population)
-            selected = set()
-            selected_add = selected.add
-            for i in xrange(k):
-                j = _int(random() * n)
-                while j in selected:
+                selected = set()
+                selected_add = selected.add
+                for i in xrange(k):
                     j = _int(random() * n)
-                selected_add(j)
-                result[i] = population[j]
+                    while j in selected:
+                        j = _int(random() * n)
+                    selected_add(j)
+                    result[i] = population[j]
+            except (TypeError, KeyError):   # handle (at least) sets
+                if isinstance(population, list):
+                    raise
+                return self.sample(tuple(population), k)
         return result
 
 ## -------------------- real-valued distributions  -------------------
diff --git a/Lib/reconvert.py b/Lib/reconvert.py
deleted file mode 100755
index 64bab5b..0000000
--- a/Lib/reconvert.py
+++ /dev/null
@@ -1,192 +0,0 @@
-#! /usr/bin/env python
-
-r"""Convert old ("regex") regular expressions to new syntax ("re").
-
-When imported as a module, there are two functions, with their own
-strings:
-
-  convert(s, syntax=None) -- convert a regex regular expression to re syntax
-
-  quote(s) -- return a quoted string literal
-
-When used as a script, read a Python string literal (or any other
-expression evaluating to a string) from stdin, and write the
-translated expression to stdout as a string literal.  Unless stdout is
-a tty, no trailing \n is written to stdout.  This is done so that it
-can be used with Emacs C-U M-| (shell-command-on-region with argument
-which filters the region through the shell command).
-
-No attempt has been made at coding for performance.
-
-Translation table...
-
-    \(    (     (unless RE_NO_BK_PARENS set)
-    \)    )     (unless RE_NO_BK_PARENS set)
-    \|    |     (unless RE_NO_BK_VBAR set)
-    \<    \b    (not quite the same, but alla...)
-    \>    \b    (not quite the same, but alla...)
-    \`    \A
-    \'    \Z
-
-Not translated...
-
-    .
-    ^
-    $
-    *
-    +           (unless RE_BK_PLUS_QM set, then to \+)
-    ?           (unless RE_BK_PLUS_QM set, then to \?)
-    \
-    \b
-    \B
-    \w
-    \W
-    \1 ... \9
-
-Special cases...
-
-    Non-printable characters are always replaced by their 3-digit
-    escape code (except \t, \n, \r, which use mnemonic escapes)
-
-    Newline is turned into | when RE_NEWLINE_OR is set
-
-XXX To be done...
-
-    [...]     (different treatment of backslashed items?)
-    [^...]    (different treatment of backslashed items?)
-    ^ $ * + ? (in some error contexts these are probably treated differently)
-    \vDD  \DD (in the regex docs but only works when RE_ANSI_HEX set)
-
-"""
-
-
-import warnings
-warnings.filterwarnings("ignore", ".* regex .*", DeprecationWarning, __name__,
-                        append=1)
-
-import regex
-from regex_syntax import * # RE_*
-
-__all__ = ["convert","quote"]
-
-# Default translation table
-mastertable = {
-    r'\<': r'\b',
-    r'\>': r'\b',
-    r'\`': r'\A',
-    r'\'': r'\Z',
-    r'\(': '(',
-    r'\)': ')',
-    r'\|': '|',
-    '(': r'\(',
-    ')': r'\)',
-    '|': r'\|',
-    '\t': r'\t',
-    '\n': r'\n',
-    '\r': r'\r',
-}
-
-
-def convert(s, syntax=None):
-    """Convert a regex regular expression to re syntax.
-
-    The first argument is the regular expression, as a string object,
-    just like it would be passed to regex.compile().  (I.e., pass the
-    actual string object -- string quotes must already have been
-    removed and the standard escape processing has already been done,
-    e.g. by eval().)
-
-    The optional second argument is the regex syntax variant to be
-    used.  This is an integer mask as passed to regex.set_syntax();
-    the flag bits are defined in regex_syntax.  When not specified, or
-    when None is given, the current regex syntax mask (as retrieved by
-    regex.get_syntax()) is used -- which is 0 by default.
-
-    The return value is a regular expression, as a string object that
-    could be passed to re.compile().  (I.e., no string quotes have
-    been added -- use quote() below, or repr().)
-
-    The conversion is not always guaranteed to be correct.  More
-    syntactical analysis should be performed to detect borderline
-    cases and decide what to do with them.  For example, 'x*?' is not
-    translated correctly.
-
-    """
-    table = mastertable.copy()
-    if syntax is None:
-        syntax = regex.get_syntax()
-    if syntax & RE_NO_BK_PARENS:
-        del table[r'\('], table[r'\)']
-        del table['('], table[')']
-    if syntax & RE_NO_BK_VBAR:
-        del table[r'\|']
-        del table['|']
-    if syntax & RE_BK_PLUS_QM:
-        table['+'] = r'\+'
-        table['?'] = r'\?'
-        table[r'\+'] = '+'
-        table[r'\?'] = '?'
-    if syntax & RE_NEWLINE_OR:
-        table['\n'] = '|'
-    res = ""
-
-    i = 0
-    end = len(s)
-    while i < end:
-        c = s[i]
-        i = i+1
-        if c == '\\':
-            c = s[i]
-            i = i+1
-            key = '\\' + c
-            key = table.get(key, key)
-            res = res + key
-        else:
-            c = table.get(c, c)
-            res = res + c
-    return res
-
-
-def quote(s, quote=None):
-    """Convert a string object to a quoted string literal.
-
-    This is similar to repr() but will return a "raw" string (r'...'
-    or r"...") when the string contains backslashes, instead of
-    doubling all backslashes.  The resulting string does *not* always
-    evaluate to the same string as the original; however it will do
-    just the right thing when passed into re.compile().
-
-    The optional second argument forces the string quote; it must be
-    a single character which is a valid Python string quote.
-
-    """
-    if quote is None:
-        q = "'"
-        altq = "'"
-        if q in s and altq not in s:
-            q = altq
-    else:
-        assert quote in ('"', "'", '"""', "'''")
-        q = quote
-    res = q
-    for c in s:
-        if c == q: c = '\\' + c
-        elif c < ' ' or c > '~': c = "\\%03o" % ord(c)
-        res = res + c
-    res = res + q
-    if '\\' in res:
-        res = 'r' + res
-    return res
-
-
-def main():
-    """Main program -- called when run as a script."""
-    import sys
-    s = eval(sys.stdin.read())
-    sys.stdout.write(quote(convert(s)))
-    if sys.stdout.isatty():
-        sys.stdout.write("\n")
-
-
-if __name__ == '__main__':
-    main()
diff --git a/Lib/regex_syntax.py b/Lib/regex_syntax.py
deleted file mode 100644
index b0a0dbf..0000000
--- a/Lib/regex_syntax.py
+++ /dev/null
@@ -1,53 +0,0 @@
-"""Constants for selecting regexp syntaxes for the obsolete regex module.
-
-This module is only for backward compatibility.  "regex" has now
-been replaced by the new regular expression module, "re".
-
-These bits are passed to regex.set_syntax() to choose among
-alternative regexp syntaxes.
-"""
-
-# 1 means plain parentheses serve as grouping, and backslash
-#   parentheses are needed for literal searching.
-# 0 means backslash-parentheses are grouping, and plain parentheses
-#   are for literal searching.
-RE_NO_BK_PARENS = 1
-
-# 1 means plain | serves as the "or"-operator, and \| is a literal.
-# 0 means \| serves as the "or"-operator, and | is a literal.
-RE_NO_BK_VBAR = 2
-
-# 0 means plain + or ? serves as an operator, and \+, \? are literals.
-# 1 means \+, \? are operators and plain +, ? are literals.
-RE_BK_PLUS_QM = 4
-
-# 1 means | binds tighter than ^ or $.
-# 0 means the contrary.
-RE_TIGHT_VBAR = 8
-
-# 1 means treat \n as an _OR operator
-# 0 means treat it as a normal character
-RE_NEWLINE_OR = 16
-
-# 0 means that a special characters (such as *, ^, and $) always have
-#   their special meaning regardless of the surrounding context.
-# 1 means that special characters may act as normal characters in some
-#   contexts.  Specifically, this applies to:
-#       ^ - only special at the beginning, or after ( or |
-#       $ - only special at the end, or before ) or |
-#       *, +, ? - only special when not after the beginning, (, or |
-RE_CONTEXT_INDEP_OPS = 32
-
-# ANSI sequences (\n etc) and \xhh
-RE_ANSI_HEX = 64
-
-# No GNU extensions
-RE_NO_GNU_EXTENSIONS = 128
-
-# Now define combinations of bits for the standard possibilities.
-RE_SYNTAX_AWK = (RE_NO_BK_PARENS | RE_NO_BK_VBAR | RE_CONTEXT_INDEP_OPS)
-RE_SYNTAX_EGREP = (RE_SYNTAX_AWK | RE_NEWLINE_OR)
-RE_SYNTAX_GREP = (RE_BK_PLUS_QM | RE_NEWLINE_OR)
-RE_SYNTAX_EMACS = 0
-
-# (Python's obsolete "regexp" module used a syntax similar to awk.)
diff --git a/Lib/regsub.py b/Lib/regsub.py
deleted file mode 100644
index 0fc10a5..0000000
--- a/Lib/regsub.py
+++ /dev/null
@@ -1,198 +0,0 @@
-"""Regexp-based split and replace using the obsolete regex module.
-
-This module is only for backward compatibility.  These operations
-are now provided by the new regular expression module, "re".
-
-sub(pat, repl, str):        replace first occurrence of pattern in string
-gsub(pat, repl, str):       replace all occurrences of pattern in string
-split(str, pat, maxsplit):  split string using pattern as delimiter
-splitx(str, pat, maxsplit): split string using pattern as delimiter plus
-                            return delimiters
-"""
-
-import warnings
-warnings.warn("the regsub module is deprecated; please use re.sub()",
-              DeprecationWarning)
-
-# Ignore further deprecation warnings about this module
-warnings.filterwarnings("ignore", "", DeprecationWarning, __name__)
-
-import regex
-
-__all__ = ["sub","gsub","split","splitx","capwords"]
-
-# Replace first occurrence of pattern pat in string str by replacement
-# repl.  If the pattern isn't found, the string is returned unchanged.
-# The replacement may contain references \digit to subpatterns and
-# escaped backslashes.  The pattern may be a string or an already
-# compiled pattern.
-
-def sub(pat, repl, str):
-    prog = compile(pat)
-    if prog.search(str) >= 0:
-        regs = prog.regs
-        a, b = regs[0]
-        str = str[:a] + expand(repl, regs, str) + str[b:]
-    return str
-
-
-# Replace all (non-overlapping) occurrences of pattern pat in string
-# str by replacement repl.  The same rules as for sub() apply.
-# Empty matches for the pattern are replaced only when not adjacent to
-# a previous match, so e.g. gsub('', '-', 'abc') returns '-a-b-c-'.
-
-def gsub(pat, repl, str):
-    prog = compile(pat)
-    new = ''
-    start = 0
-    first = 1
-    while prog.search(str, start) >= 0:
-        regs = prog.regs
-        a, b = regs[0]
-        if a == b == start and not first:
-            if start >= len(str) or prog.search(str, start+1) < 0:
-                break
-            regs = prog.regs
-            a, b = regs[0]
-        new = new + str[start:a] + expand(repl, regs, str)
-        start = b
-        first = 0
-    new = new + str[start:]
-    return new
-
-
-# Split string str in fields separated by delimiters matching pattern
-# pat.  Only non-empty matches for the pattern are considered, so e.g.
-# split('abc', '') returns ['abc'].
-# The optional 3rd argument sets the number of splits that are performed.
-
-def split(str, pat, maxsplit = 0):
-    return intsplit(str, pat, maxsplit, 0)
-
-# Split string str in fields separated by delimiters matching pattern
-# pat.  Only non-empty matches for the pattern are considered, so e.g.
-# split('abc', '') returns ['abc']. The delimiters are also included
-# in the list.
-# The optional 3rd argument sets the number of splits that are performed.
-
-
-def splitx(str, pat, maxsplit = 0):
-    return intsplit(str, pat, maxsplit, 1)
-
-# Internal function used to implement split() and splitx().
-
-def intsplit(str, pat, maxsplit, retain):
-    prog = compile(pat)
-    res = []
-    start = next = 0
-    splitcount = 0
-    while prog.search(str, next) >= 0:
-        regs = prog.regs
-        a, b = regs[0]
-        if a == b:
-            next = next + 1
-            if next >= len(str):
-                break
-        else:
-            res.append(str[start:a])
-            if retain:
-                res.append(str[a:b])
-            start = next = b
-            splitcount = splitcount + 1
-            if (maxsplit and (splitcount >= maxsplit)):
-                break
-    res.append(str[start:])
-    return res
-
-
-# Capitalize words split using a pattern
-
-def capwords(str, pat='[^a-zA-Z0-9_]+'):
-    words = splitx(str, pat)
-    for i in range(0, len(words), 2):
-        words[i] = words[i].capitalize()
-    return "".join(words)
-
-
-# Internal subroutines:
-# compile(pat): compile a pattern, caching already compiled patterns
-# expand(repl, regs, str): expand \digit escapes in replacement string
-
-
-# Manage a cache of compiled regular expressions.
-#
-# If the pattern is a string a compiled version of it is returned.  If
-# the pattern has been used before we return an already compiled
-# version from the cache; otherwise we compile it now and save the
-# compiled version in the cache, along with the syntax it was compiled
-# with.  Instead of a string, a compiled regular expression can also
-# be passed.
-
-cache = {}
-
-def compile(pat):
-    if type(pat) != type(''):
-        return pat              # Assume it is a compiled regex
-    key = (pat, regex.get_syntax())
-    if key in cache:
-        prog = cache[key]       # Get it from the cache
-    else:
-        prog = cache[key] = regex.compile(pat)
-    return prog
-
-
-def clear_cache():
-    global cache
-    cache = {}
-
-
-# Expand \digit in the replacement.
-# Each occurrence of \digit is replaced by the substring of str
-# indicated by regs[digit].  To include a literal \ in the
-# replacement, double it; other \ escapes are left unchanged (i.e.
-# the \ and the following character are both copied).
-
-def expand(repl, regs, str):
-    if '\\' not in repl:
-        return repl
-    new = ''
-    i = 0
-    ord0 = ord('0')
-    while i < len(repl):
-        c = repl[i]; i = i+1
-        if c != '\\' or i >= len(repl):
-            new = new + c
-        else:
-            c = repl[i]; i = i+1
-            if '0' <= c <= '9':
-                a, b = regs[ord(c)-ord0]
-                new = new + str[a:b]
-            elif c == '\\':
-                new = new + c
-            else:
-                new = new + '\\' + c
-    return new
-
-
-# Test program, reads sequences "pat repl str" from stdin.
-# Optional argument specifies pattern used to split lines.
-
-def test():
-    import sys
-    if sys.argv[1:]:
-        delpat = sys.argv[1]
-    else:
-        delpat = '[ \t\n]+'
-    while 1:
-        if sys.stdin.isatty(): sys.stderr.write('--> ')
-        line = sys.stdin.readline()
-        if not line: break
-        if line[-1] == '\n': line = line[:-1]
-        fields = split(line, delpat)
-        if len(fields) != 3:
-            print 'Sorry, not three fields'
-            print 'split:', repr(fields)
-            continue
-        [pat, repl, str] = split(line, delpat)
-        print 'sub :', repr(sub(pat, repl, str))
-        print 'gsub:', repr(gsub(pat, repl, str))
diff --git a/Lib/rexec.py b/Lib/rexec.py
index ed01d24..10e4bc0 100644
--- a/Lib/rexec.py
+++ b/Lib/rexec.py
@@ -136,7 +136,7 @@
     ok_builtin_modules = ('audioop', 'array', 'binascii',
                           'cmath', 'errno', 'imageop',
                           'marshal', 'math', 'md5', 'operator',
-                          'parser', 'regex', 'select',
+                          'parser', 'select',
                           'sha', '_sre', 'strop', 'struct', 'time',
                           '_weakref')
 
diff --git a/Lib/runpy.py b/Lib/runpy.py
index afb0098..8290dfe 100755
--- a/Lib/runpy.py
+++ b/Lib/runpy.py
@@ -11,349 +11,15 @@
 
 import sys
 import imp
+try:
+    from imp import get_loader
+except ImportError:
+    from pkgutil import get_loader
 
 __all__ = [
     "run_module",
 ]
 
-try:
-    _get_loader = imp.get_loader
-except AttributeError:
-    # get_loader() is not provided by the imp module, so emulate it
-    # as best we can using the PEP 302 import machinery exposed since
-    # Python 2.3. The emulation isn't perfect, but the differences
-    # in the way names are shadowed shouldn't matter in practice.
-    import os.path
-    import marshal                           # Handle compiled Python files
-
-    # This helper is needed in order for the PEP 302 emulation to
-    # correctly handle compiled files
-    def _read_compiled_file(compiled_file):
-        magic = compiled_file.read(4)
-        if magic != imp.get_magic():
-            return None
-        try:
-            compiled_file.read(4) # Skip timestamp
-            return marshal.load(compiled_file)
-        except Exception:
-            return None
-
-    class _AbsoluteImporter(object):
-        """PEP 302 importer wrapper for top level import machinery"""
-        def find_module(self, mod_name, path=None):
-            if path is not None:
-                return None
-            try:
-                file, filename, mod_info = imp.find_module(mod_name)
-            except ImportError:
-                return None
-            suffix, mode, mod_type = mod_info
-            if mod_type == imp.PY_SOURCE:
-                loader = _SourceFileLoader(mod_name, file,
-                                           filename, mod_info)
-            elif mod_type == imp.PY_COMPILED:
-                loader = _CompiledFileLoader(mod_name, file,
-                                             filename, mod_info)
-            elif mod_type == imp.PKG_DIRECTORY:
-                loader = _PackageDirLoader(mod_name, file,
-                                           filename, mod_info)
-            elif mod_type == imp.C_EXTENSION:
-                loader = _FileSystemLoader(mod_name, file,
-                                           filename, mod_info)
-            else:
-                loader = _BasicLoader(mod_name, file,
-                                      filename, mod_info)
-            return loader
-
-
-    class _FileSystemImporter(object):
-        """PEP 302 importer wrapper for filesystem based imports"""
-        def __init__(self, path_item=None):
-            if path_item is not None:
-                if path_item != '' and not os.path.isdir(path_item):
-                    raise ImportError("%s is not a directory" % path_item)
-                self.path_dir = path_item
-            else:
-                raise ImportError("Filesystem importer requires "
-                                  "a directory name")
-
-        def find_module(self, mod_name, path=None):
-            if path is not None:
-                return None
-            path_dir = self.path_dir
-            if path_dir == '':
-                path_dir = os.getcwd()
-            sub_name = mod_name.rsplit(".", 1)[-1]
-            try:
-                file, filename, mod_info = imp.find_module(sub_name,
-                                                           [path_dir])
-            except ImportError:
-                return None
-            if not filename.startswith(path_dir):
-                return None
-            suffix, mode, mod_type = mod_info
-            if mod_type == imp.PY_SOURCE:
-                loader = _SourceFileLoader(mod_name, file,
-                                           filename, mod_info)
-            elif mod_type == imp.PY_COMPILED:
-                loader = _CompiledFileLoader(mod_name, file,
-                                             filename, mod_info)
-            elif mod_type == imp.PKG_DIRECTORY:
-                loader = _PackageDirLoader(mod_name, file,
-                                           filename, mod_info)
-            elif mod_type == imp.C_EXTENSION:
-                loader = _FileSystemLoader(mod_name, file,
-                                           filename, mod_info)
-            else:
-                loader = _BasicLoader(mod_name, file,
-                                      filename, mod_info)
-            return loader
-
-
-    class _BasicLoader(object):
-        """PEP 302 loader wrapper for top level import machinery"""
-        def __init__(self, mod_name, file, filename, mod_info):
-            self.mod_name = mod_name
-            self.file = file
-            self.filename = filename
-            self.mod_info = mod_info
-
-        def _fix_name(self, mod_name):
-            if mod_name is None:
-                mod_name = self.mod_name
-            elif mod_name != self.mod_name:
-                raise ImportError("Loader for module %s cannot handle "
-                                  "module %s" % (self.mod_name, mod_name))
-            return mod_name
-
-        def load_module(self, mod_name=None):
-            mod_name = self._fix_name(mod_name)
-            mod = imp.load_module(mod_name, self.file,
-                                  self.filename, self.mod_info)
-            mod.__loader__ = self  # for introspection
-            return mod
-
-        def get_code(self, mod_name=None):
-            return None
-
-        def get_source(self, mod_name=None):
-            return None
-
-        def is_package(self, mod_name=None):
-            return False
-
-        def close(self):
-            if self.file:
-                self.file.close()
-
-        def __del__(self):
-            self.close()
-
-
-    class _FileSystemLoader(_BasicLoader):
-        """PEP 302 loader wrapper for filesystem based imports"""
-        def get_code(self, mod_name=None):
-            mod_name = self._fix_name(mod_name)
-            return self._get_code(mod_name)
-
-        def get_data(self, pathname):
-            return open(pathname, "rb").read()
-
-        def get_filename(self, mod_name=None):
-            mod_name = self._fix_name(mod_name)
-            return self._get_filename(mod_name)
-
-        def get_source(self, mod_name=None):
-            mod_name = self._fix_name(mod_name)
-            return self._get_source(mod_name)
-
-        def is_package(self, mod_name=None):
-            mod_name = self._fix_name(mod_name)
-            return self._is_package(mod_name)
-
-        def _get_code(self, mod_name):
-            return None
-
-        def _get_filename(self, mod_name):
-            return self.filename
-
-        def _get_source(self, mod_name):
-            return None
-
-        def _is_package(self, mod_name):
-            return False
-
-    class _PackageDirLoader(_FileSystemLoader):
-        """PEP 302 loader wrapper for PKG_DIRECTORY directories"""
-        def _is_package(self, mod_name):
-            return True
-
-
-    class _SourceFileLoader(_FileSystemLoader):
-        """PEP 302 loader wrapper for PY_SOURCE modules"""
-        def _get_code(self, mod_name):
-            return compile(self._get_source(mod_name),
-                           self.filename, 'exec')
-
-        def _get_source(self, mod_name):
-            f = self.file
-            f.seek(0)
-            return f.read()
-
-
-    class _CompiledFileLoader(_FileSystemLoader):
-        """PEP 302 loader wrapper for PY_COMPILED modules"""
-        def _get_code(self, mod_name):
-            f = self.file
-            f.seek(0)
-            return _read_compiled_file(f)
-
-
-    def _get_importer(path_item):
-        """Retrieve a PEP 302 importer for the given path item
-
-        The returned importer is cached in sys.path_importer_cache
-        if it was newly created by a path hook.
-
-        If there is no importer, a wrapper around the basic import
-        machinery is returned. This wrapper is never inserted into
-        the importer cache (None is inserted instead).
-
-        The cache (or part of it) can be cleared manually if a
-        rescan of sys.path_hooks is necessary.
-        """
-        try:
-            importer = sys.path_importer_cache[path_item]
-        except KeyError:
-            for path_hook in sys.path_hooks:
-                try:
-                    importer = path_hook(path_item)
-                    break
-                except ImportError:
-                    pass
-            else:
-                importer = None
-            sys.path_importer_cache[path_item] = importer
-        if importer is None:
-            try:
-                importer = _FileSystemImporter(path_item)
-            except ImportError:
-                pass
-        return importer
-
-
-    def _get_path_loader(mod_name, path=None):
-        """Retrieve a PEP 302 loader using a path importer"""
-        if path is None:
-            path = sys.path
-            absolute_loader = _AbsoluteImporter().find_module(mod_name)
-            if isinstance(absolute_loader, _FileSystemLoader):
-                # Found in filesystem, so scan path hooks
-                # before accepting this one as the right one
-                loader = None
-            else:
-                # Not found in filesystem, so use top-level loader
-                loader = absolute_loader
-        else:
-            loader = absolute_loader = None
-        if loader is None:
-            for path_item in path:
-                importer = _get_importer(path_item)
-                if importer is not None:
-                    loader = importer.find_module(mod_name)
-                    if loader is not None:
-                        # Found a loader for our module
-                        break
-            else:
-                # No path hook found, so accept the top level loader
-                loader = absolute_loader
-        return loader
-
-    def _get_package(pkg_name):
-        """Retrieve a named package"""
-        pkg = __import__(pkg_name)
-        sub_pkg_names = pkg_name.split(".")
-        for sub_pkg in sub_pkg_names[1:]:
-            pkg = getattr(pkg, sub_pkg)
-        return pkg
-
-    def _get_loader(mod_name, path=None):
-        """Retrieve a PEP 302 loader for the given module or package
-
-        If the module or package is accessible via the normal import
-        mechanism, a wrapper around the relevant part of that machinery
-        is returned.
-
-        Non PEP 302 mechanisms (e.g. the Windows registry) used by the
-        standard import machinery to find files in alternative locations
-        are partially supported, but are searched AFTER sys.path. Normally,
-        these locations are searched BEFORE sys.path, preventing sys.path
-        entries from shadowing them.
-        For this to cause a visible difference in behaviour, there must
-        be a module or package name that is accessible via both sys.path
-        and one of the non PEP 302 file system mechanisms. In this case,
-        the emulation will find the former version, while the builtin
-        import mechanism will find the latter.
-        Items of the following types can be affected by this discrepancy:
-            imp.C_EXTENSION
-            imp.PY_SOURCE
-            imp.PY_COMPILED
-            imp.PKG_DIRECTORY
-        """
-        try:
-            loader = sys.modules[mod_name].__loader__
-        except (KeyError, AttributeError):
-            loader = None
-        if loader is None:
-            imp.acquire_lock()
-            try:
-                # Module not in sys.modules, or uses an unhooked loader
-                parts = mod_name.rsplit(".", 1)
-                if len(parts) == 2:
-                    # Sub package, so use parent package's path
-                    pkg_name, sub_name = parts
-                    if pkg_name and pkg_name[0] != '.':
-                        if path is not None:
-                            raise ImportError("Path argument must be None "
-                                            "for a dotted module name")
-                        pkg = _get_package(pkg_name)
-                        try:
-                            path = pkg.__path__
-                        except AttributeError:
-                            raise ImportError(pkg_name +
-                                            " is not a package")
-                    else:
-                        raise ImportError("Relative import syntax is not "
-                                          "supported by _get_loader()")
-                else:
-                    # Top level module, so stick with default path
-                    sub_name = mod_name
-
-                for importer in sys.meta_path:
-                    loader = importer.find_module(mod_name, path)
-                    if loader is not None:
-                        # Found a metahook to handle the module
-                        break
-                else:
-                    # Handling via the standard path mechanism
-                    loader = _get_path_loader(mod_name, path)
-            finally:
-                imp.release_lock()
-        return loader
-
-
-# This helper is needed due to a missing component in the PEP 302
-# loader protocol (specifically, "get_filename" is non-standard)
-def _get_filename(loader, mod_name):
-    try:
-        get_filename = loader.get_filename
-    except AttributeError:
-        return None
-    else:
-        return get_filename(mod_name)
-
-# ------------------------------------------------------------
-# Done with the import machinery emulation, on with the code!
 
 def _run_code(code, run_globals, init_globals,
               mod_name, mod_fname, mod_loader):
@@ -379,21 +45,17 @@
         restore_module = mod_name in sys.modules
         if restore_module:
             saved_module = sys.modules[mod_name]
-        imp.acquire_lock()
+        sys.argv[0] = mod_fname
+        sys.modules[mod_name] = temp_module
         try:
-            sys.argv[0] = mod_fname
-            sys.modules[mod_name] = temp_module
-            try:
-                _run_code(code, mod_globals, init_globals,
-                          mod_name, mod_fname, mod_loader)
-            finally:
-                sys.argv[0] = saved_argv0
-                if restore_module:
-                    sys.modules[mod_name] = saved_module
-                else:
-                    del sys.modules[mod_name]
+            _run_code(code, mod_globals, init_globals,
+                      mod_name, mod_fname, mod_loader)
         finally:
-            imp.release_lock()
+            sys.argv[0] = saved_argv0
+        if restore_module:
+            sys.modules[mod_name] = saved_module
+        else:
+            del sys.modules[mod_name]
         # Copy the globals of the temporary module, as they
         # may be cleared when the temporary module goes away
         return mod_globals.copy()
@@ -403,13 +65,24 @@
                          mod_name, mod_fname, mod_loader)
 
 
+# This helper is needed due to a missing component in the PEP 302
+# loader protocol (specifically, "get_filename" is non-standard)
+def _get_filename(loader, mod_name):
+    try:
+        get_filename = loader.get_filename
+    except AttributeError:
+        return None
+    else:
+        return get_filename(mod_name)
+
+
 def run_module(mod_name, init_globals=None,
                          run_name=None, alter_sys=False):
     """Execute a module's code without importing it
 
        Returns the resulting top level namespace dictionary
     """
-    loader = _get_loader(mod_name)
+    loader = get_loader(mod_name)
     if loader is None:
         raise ImportError("No module named " + mod_name)
     code = loader.get_code(mod_name)
diff --git a/Lib/setuptools.egg-info/PKG-INFO b/Lib/setuptools.egg-info/PKG-INFO
new file mode 100644
index 0000000..ff5c1a1
--- /dev/null
+++ b/Lib/setuptools.egg-info/PKG-INFO
@@ -0,0 +1,89 @@
+Metadata-Version: 1.0
+Name: setuptools
+Version: 0.7a1dev-r45536
+Summary: Download, build, install, upgrade, and uninstall Python packages -- easily!
+Home-page: http://peak.telecommunity.com/DevCenter/setuptools
+Author: Phillip J. Eby
+Author-email: peak@eby-sarna.com
+License: PSF or ZPL
+Description: ``setuptools`` is a collection of enhancements to the Python ``distutils``
+        (for Python 2.3.5 and up on most platforms; 64-bit platforms require a minimum
+        of Python 2.4) that allow you to more easily build and distribute Python
+        packages, especially ones that have dependencies on other packages.
+        
+        Packages built and distributed using ``setuptools`` look to the user like
+        ordinary Python packages based on the ``distutils``.  Your users don't need to
+        install or even know about setuptools in order to use them, and you don't
+        have to include the entire setuptools package in your distributions.  By
+        including just a single `bootstrap module`_ (an 8K .py file), your package will
+        automatically download and install ``setuptools`` if the user is building your
+        package from source and doesn't have a suitable version already installed.
+        
+        .. _bootstrap module: http://peak.telecommunity.com/dist/ez_setup.py
+        
+        Feature Highlights:
+        
+        * Automatically find/download/install/upgrade dependencies at build time using
+        the `EasyInstall tool <http://peak.telecommunity.com/DevCenter/EasyInstall>`_,
+        which supports downloading via HTTP, FTP, Subversion, and SourceForge, and
+        automatically scans web pages linked from PyPI to find download links.  (It's
+        the closest thing to CPAN currently available for Python.)
+        
+        * Create `Python Eggs <http://peak.telecommunity.com/DevCenter/PythonEggs>`_ -
+        a single-file importable distribution format
+        
+        * Include data files inside your package directories, where your code can
+        actually use them.  (Python 2.4 distutils also supports this feature, but
+        setuptools provides the feature for Python 2.3 packages also, and supports
+        accessing data files in zipped packages too.)
+        
+        * Automatically include all packages in your source tree, without listing them
+        individually in setup.py
+        
+        * Automatically include all relevant files in your source distributions,
+        without needing to create a ``MANIFEST.in`` file, and without having to force
+        regeneration of the ``MANIFEST`` file when your source tree changes.
+        
+        * Automatically generate wrapper scripts or Windows (console and GUI) .exe
+        files for any number of "main" functions in your project.  (Note: this is not
+        a py2exe replacement; the .exe files rely on the local Python installation.)
+        
+        * Transparent Pyrex support, so that your setup.py can list ``.pyx`` files and
+        still work even when the end-user doesn't have Pyrex installed (as long as
+        you include the Pyrex-generated C in your source distribution)
+        
+        * Command aliases - create project-specific, per-user, or site-wide shortcut
+        names for commonly used commands and options
+        
+        * PyPI upload support - upload your source distributions and eggs to PyPI
+        
+        * Deploy your project in "development mode", such that it's available on
+        ``sys.path``, yet can still be edited directly from its source checkout.
+        
+        * Easily extend the distutils with new commands or ``setup()`` arguments, and
+        distribute/reuse your extensions for multiple projects, without copying code.
+        
+        * Create extensible applications and frameworks that automatically discover
+        extensions, using simple "entry points" declared in a project's setup script.
+        
+        In addition to the PyPI downloads, the development version of ``setuptools``
+        is available from the `Python SVN sandbox`_, and in-development versions of the
+        `0.6 branch`_ are available as well.
+        
+        .. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06
+        
+        .. _Python SVN sandbox: http://svn.python.org/projects/sandbox/trunk/setuptools/#egg=setuptools-dev
+        
+        
+Keywords: CPAN PyPI distutils eggs package management
+Platform: UNKNOWN
+Classifier: Development Status :: 3 - Alpha
+Classifier: Intended Audience :: Developers
+Classifier: License :: OSI Approved :: Python Software Foundation License
+Classifier: License :: OSI Approved :: Zope Public License
+Classifier: Operating System :: OS Independent
+Classifier: Programming Language :: Python
+Classifier: Topic :: Software Development :: Libraries :: Python Modules
+Classifier: Topic :: System :: Archiving :: Packaging
+Classifier: Topic :: System :: Systems Administration
+Classifier: Topic :: Utilities
diff --git a/Lib/setuptools.egg-info/entry_points.txt b/Lib/setuptools.egg-info/entry_points.txt
new file mode 100755
index 0000000..0afe2cb
--- /dev/null
+++ b/Lib/setuptools.egg-info/entry_points.txt
@@ -0,0 +1,51 @@
+[distutils.setup_keywords]
+dependency_links = setuptools.dist:assert_string_list
+entry_points = setuptools.dist:check_entry_points
+extras_require = setuptools.dist:check_extras
+package_data = setuptools.dist:check_package_data
+install_requires = setuptools.dist:check_requirements
+include_package_data = setuptools.dist:assert_bool
+exclude_package_data = setuptools.dist:check_package_data
+namespace_packages = setuptools.dist:check_nsp
+test_suite = setuptools.dist:check_test_suite
+eager_resources = setuptools.dist:assert_string_list
+zip_safe = setuptools.dist:assert_bool
+test_loader = setuptools.dist:check_importable
+tests_require = setuptools.dist:check_requirements
+
+[setuptools.file_finders]
+svn_cvs = setuptools.command.sdist:_default_revctrl
+
+[egg_info.writers]
+dependency_links.txt = setuptools.command.egg_info:overwrite_arg
+requires.txt = setuptools.command.egg_info:write_requirements
+PKG-INFO = setuptools.command.egg_info:write_pkg_info
+eager_resources.txt = setuptools.command.egg_info:overwrite_arg
+top_level.txt = setuptools.command.egg_info:write_toplevel_names
+namespace_packages.txt = setuptools.command.egg_info:overwrite_arg
+entry_points.txt = setuptools.command.egg_info:write_entries
+depends.txt = setuptools.command.egg_info:warn_depends_obsolete
+
+[console_scripts]
+easy_install = setuptools.command.easy_install:main
+easy_install-2.5 = setuptools.command.easy_install:main
+
+[distutils.commands]
+bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm
+rotate = setuptools.command.rotate:rotate
+develop = setuptools.command.develop:develop
+setopt = setuptools.command.setopt:setopt
+build_py = setuptools.command.build_py:build_py
+saveopts = setuptools.command.saveopts:saveopts
+egg_info = setuptools.command.egg_info:egg_info
+install_egg_info = setuptools.command.install_egg_info:install_egg_info
+alias = setuptools.command.alias:alias
+easy_install = setuptools.command.easy_install:easy_install
+install_scripts = setuptools.command.install_scripts:install_scripts
+bdist_egg = setuptools.command.bdist_egg:bdist_egg
+install = setuptools.command.install:install
+test = setuptools.command.test:test
+install_lib = setuptools.command.install_lib:install_lib
+build_ext = setuptools.command.build_ext:build_ext
+sdist = setuptools.command.sdist:sdist
+
diff --git a/Lib/setuptools.egg-info/top_level.txt b/Lib/setuptools.egg-info/top_level.txt
new file mode 100644
index 0000000..4577c6a
--- /dev/null
+++ b/Lib/setuptools.egg-info/top_level.txt
@@ -0,0 +1,3 @@
+easy_install
+pkg_resources
+setuptools
diff --git a/Lib/setuptools.egg-info/zip-safe b/Lib/setuptools.egg-info/zip-safe
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/setuptools.egg-info/zip-safe
diff --git a/Lib/setuptools/__init__.py b/Lib/setuptools/__init__.py
new file mode 100644
index 0000000..3921ce2
--- /dev/null
+++ b/Lib/setuptools/__init__.py
@@ -0,0 +1,64 @@
+"""Extensions to the 'distutils' for large or complex distributions"""
+from setuptools.extension import Extension, Library
+from setuptools.dist import Distribution, Feature, _get_unpatched
+import distutils.core, setuptools.command
+from setuptools.depends import Require
+from distutils.core import Command as _Command
+from distutils.util import convert_path
+import os.path
+
+__version__ = '0.7a1'
+__all__ = [
+    'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require',
+    'find_packages'
+]
+
+bootstrap_install_from = None
+
+def find_packages(where='.', exclude=()):
+    """Return a list all Python packages found within directory 'where'
+
+    'where' should be supplied as a "cross-platform" (i.e. URL-style) path; it
+    will be converted to the appropriate local path syntax.  'exclude' is a
+    sequence of package names to exclude; '*' can be used as a wildcard in the
+    names, such that 'foo.*' will exclude all subpackages of 'foo' (but not
+    'foo' itself).
+    """
+    out = []
+    stack=[(convert_path(where), '')]
+    while stack:
+        where,prefix = stack.pop(0)
+        for name in os.listdir(where):
+            fn = os.path.join(where,name)
+            if (os.path.isdir(fn) and
+                os.path.isfile(os.path.join(fn,'__init__.py'))
+            ):
+                out.append(prefix+name); stack.append((fn,prefix+name+'.'))
+    for pat in exclude:
+        from fnmatch import fnmatchcase
+        out = [item for item in out if not fnmatchcase(item,pat)]
+    return out
+
+setup = distutils.core.setup
+
+_Command = _get_unpatched(_Command)
+
+class Command(_Command):
+    __doc__ = _Command.__doc__
+
+    command_consumes_arguments = False
+
+    def __init__(self, dist, **kw):
+        # Add support for keyword arguments
+        _Command.__init__(self,dist)
+        for k,v in kw.items():
+            setattr(self,k,v)
+
+    def reinitialize_command(self, command, reinit_subcommands=0, **kw):
+        cmd = _Command.reinitialize_command(self, command, reinit_subcommands)
+        for k,v in kw.items():
+            setattr(cmd,k,v)    # update command with keywords
+        return cmd
+
+import distutils.core
+distutils.core.Command = Command    # we can't patch distutils.cmd, alas
diff --git a/Lib/setuptools/archive_util.py b/Lib/setuptools/archive_util.py
new file mode 100755
index 0000000..dd9c684
--- /dev/null
+++ b/Lib/setuptools/archive_util.py
@@ -0,0 +1,200 @@
+"""Utilities for extracting common archive formats"""
+
+
+__all__ = [
+    "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter",
+    "UnrecognizedFormat", "extraction_drivers", "unpack_directory",
+]
+
+import zipfile, tarfile, os, shutil
+from pkg_resources import ensure_directory
+from distutils.errors import DistutilsError
+
+class UnrecognizedFormat(DistutilsError):
+    """Couldn't recognize the archive type"""
+
+def default_filter(src,dst):
+    """The default progress/filter callback; returns True for all files"""
+    return dst
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def unpack_archive(filename, extract_dir, progress_filter=default_filter,
+    drivers=None
+):
+    """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat``
+
+    `progress_filter` is a function taking two arguments: a source path
+    internal to the archive ('/'-separated), and a filesystem path where it
+    will be extracted.  The callback must return the desired extract path
+    (which may be the same as the one passed in), or else ``None`` to skip
+    that file or directory.  The callback can thus be used to report on the
+    progress of the extraction, as well as to filter the items extracted or
+    alter their extraction paths.
+
+    `drivers`, if supplied, must be a non-empty sequence of functions with the
+    same signature as this function (minus the `drivers` argument), that raise
+    ``UnrecognizedFormat`` if they do not support extracting the designated
+    archive type.  The `drivers` are tried in sequence until one is found that
+    does not raise an error, or until all are exhausted (in which case
+    ``UnrecognizedFormat`` is raised).  If you do not supply a sequence of
+    drivers, the module's ``extraction_drivers`` constant will be used, which
+    means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that
+    order.
+    """
+    for driver in drivers or extraction_drivers:
+        try:
+            driver(filename, extract_dir, progress_filter)
+        except UnrecognizedFormat:
+            continue
+        else:
+            return
+    else:
+        raise UnrecognizedFormat(
+            "Not a recognized archive type: %s" % filename
+        )
+
+
+
+
+
+
+
+def unpack_directory(filename, extract_dir, progress_filter=default_filter):
+    """"Unpack" a directory, using the same interface as for archives
+
+    Raises ``UnrecognizedFormat`` if `filename` is not a directory
+    """
+    if not os.path.isdir(filename):
+        raise UnrecognizedFormat("%s is not a directory" % (filename,))
+
+    paths = {filename:('',extract_dir)}
+    for base, dirs, files in os.walk(filename):
+        src,dst = paths[base]
+        for d in dirs:
+            paths[os.path.join(base,d)] = src+d+'/', os.path.join(dst,d)
+        for f in files:
+            name = src+f
+            target = os.path.join(dst,f)
+            target = progress_filter(src+f, target)
+            if not target:
+                continue    # skip non-files
+            ensure_directory(target)
+            f = os.path.join(base,f)
+            shutil.copyfile(f, target)
+            shutil.copystat(f, target)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def unpack_zipfile(filename, extract_dir, progress_filter=default_filter):
+    """Unpack zip `filename` to `extract_dir`
+
+    Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined
+    by ``zipfile.is_zipfile()``).  See ``unpack_archive()`` for an explanation
+    of the `progress_filter` argument.
+    """
+
+    if not zipfile.is_zipfile(filename):
+        raise UnrecognizedFormat("%s is not a zip file" % (filename,))
+
+    z = zipfile.ZipFile(filename)
+    try:
+        for info in z.infolist():
+            name = info.filename
+
+            # don't extract absolute paths or ones with .. in them
+            if name.startswith('/') or '..' in name:
+                continue
+
+            target = os.path.join(extract_dir, *name.split('/'))
+            target = progress_filter(name, target)
+            if not target:
+                continue
+            if name.endswith('/'):
+                # directory
+                ensure_directory(target)
+            else:
+                # file
+                ensure_directory(target)
+                data = z.read(info.filename)
+                f = open(target,'wb')
+                try:
+                    f.write(data)
+                finally:
+                    f.close()
+                    del data
+    finally:
+        z.close()
+
+
+def unpack_tarfile(filename, extract_dir, progress_filter=default_filter):
+    """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir`
+
+    Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined
+    by ``tarfile.open()``).  See ``unpack_archive()`` for an explanation
+    of the `progress_filter` argument.
+    """
+
+    try:
+        tarobj = tarfile.open(filename)
+    except tarfile.TarError:
+        raise UnrecognizedFormat(
+            "%s is not a compressed or uncompressed tar file" % (filename,)
+        )
+
+    try:
+        tarobj.chown = lambda *args: None   # don't do any chowning!
+        for member in tarobj:
+            if member.isfile() or member.isdir():
+                name = member.name
+                # don't extract absolute paths or ones with .. in them
+                if not name.startswith('/') and '..' not in name:
+                    dst = os.path.join(extract_dir, *name.split('/'))
+                    dst = progress_filter(name, dst)
+                    if dst:
+                        if dst.endswith(os.sep):
+                            dst = dst[:-1]
+                        tarobj._extract_member(member,dst)  # XXX Ugh
+        return True
+    finally:
+        tarobj.close()
+
+
+
+
+extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile
diff --git a/Lib/setuptools/cli.exe b/Lib/setuptools/cli.exe
new file mode 100755
index 0000000..fc83339
--- /dev/null
+++ b/Lib/setuptools/cli.exe
Binary files differ
diff --git a/Lib/setuptools/command/__init__.py b/Lib/setuptools/command/__init__.py
new file mode 100644
index 0000000..bff53e7
--- /dev/null
+++ b/Lib/setuptools/command/__init__.py
@@ -0,0 +1,19 @@
+__all__ = [
+    'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop',
+    'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts',
+    'sdist', 'setopt', 'test', 'upload', 'install_egg_info', 'install_scripts',
+]
+
+import sys
+if sys.version>='2.5':
+    # In Python 2.5 and above, distutils includes its own upload command
+    __all__.remove('upload')
+
+
+from distutils.command.bdist import bdist
+
+if 'egg' not in bdist.format_commands:
+    bdist.format_command['egg'] = ('bdist_egg', "Python .egg file")
+    bdist.format_commands.append('egg')
+
+del bdist, sys
diff --git a/Lib/setuptools/command/alias.py b/Lib/setuptools/command/alias.py
new file mode 100755
index 0000000..1df474a
--- /dev/null
+++ b/Lib/setuptools/command/alias.py
@@ -0,0 +1,79 @@
+import distutils, os
+from setuptools import Command
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import *
+from setuptools.command.setopt import edit_config, option_base, config_file
+
+def shquote(arg):
+    """Quote an argument for later parsing by shlex.split()"""
+    for c in '"', "'", "\\", "#":
+        if c in arg: return repr(arg)
+    if arg.split()<>[arg]:
+        return repr(arg)
+    return arg
+
+
+class alias(option_base):
+    """Define a shortcut that invokes one or more commands"""
+
+    description = "define a shortcut to invoke one or more commands"
+    command_consumes_arguments = True
+
+    user_options = [
+        ('remove',   'r', 'remove (unset) the alias'),
+    ] + option_base.user_options
+
+    boolean_options = option_base.boolean_options + ['remove']
+
+    def initialize_options(self):
+        option_base.initialize_options(self)
+        self.args = None
+        self.remove = None
+
+    def finalize_options(self):
+        option_base.finalize_options(self)
+        if self.remove and len(self.args)<>1:
+            raise DistutilsOptionError(
+                "Must specify exactly one argument (the alias name) when "
+                "using --remove"
+            )
+
+    def run(self):
+        aliases = self.distribution.get_option_dict('aliases')
+
+        if not self.args:
+            print "Command Aliases"
+            print "---------------"
+            for alias in aliases:
+                print "setup.py alias", format_alias(alias, aliases)
+            return
+
+        elif len(self.args)==1:
+            alias, = self.args
+            if self.remove:
+                command = None
+            elif alias in aliases:
+                print "setup.py alias", format_alias(alias, aliases)
+                return
+            else:
+                print "No alias definition found for %r" % alias
+                return
+        else:
+            alias = self.args[0]
+            command = ' '.join(map(shquote,self.args[1:]))
+
+        edit_config(self.filename, {'aliases': {alias:command}}, self.dry_run)
+
+
+def format_alias(name, aliases):
+    source, command = aliases[name]
+    if source == config_file('global'):
+        source = '--global-config '
+    elif source == config_file('user'):
+        source = '--user-config '
+    elif source == config_file('local'):
+        source = ''
+    else:
+        source = '--filename=%r' % source
+    return source+name+' '+command
diff --git a/Lib/setuptools/command/bdist_egg.py b/Lib/setuptools/command/bdist_egg.py
new file mode 100644
index 0000000..617d88d
--- /dev/null
+++ b/Lib/setuptools/command/bdist_egg.py
@@ -0,0 +1,449 @@
+"""setuptools.command.bdist_egg
+
+Build .egg distributions"""
+
+# This module should be kept compatible with Python 2.3
+import sys, os, marshal
+from setuptools import Command
+from distutils.dir_util import remove_tree, mkpath
+from distutils.sysconfig import get_python_version, get_python_lib
+from distutils import log
+from pkg_resources import get_build_platform, Distribution
+from types import CodeType
+from setuptools.extension import Library
+
+def write_stub(resource, pyfile):
+    f = open(pyfile,'w')
+    f.write('\n'.join([
+        "def __bootstrap__():",
+        "   global __bootstrap__, __loader__, __file__",
+        "   import sys, pkg_resources, imp",
+        "   __file__ = pkg_resources.resource_filename(__name__,%r)"
+            % resource,
+        "   del __bootstrap__, __loader__",
+        "   imp.load_dynamic(__name__,__file__)",
+        "__bootstrap__()",
+        "" # terminal \n
+    ]))
+    f.close()
+
+# stub __init__.py for packages distributed without one
+NS_PKG_STUB = '__import__("pkg_resources").declare_namespace(__name__)'
+
+
+
+
+
+
+
+
+
+
+class bdist_egg(Command):
+
+    description = "create an \"egg\" distribution"
+
+    user_options = [
+        ('bdist-dir=', 'b',
+            "temporary directory for creating the distribution"),
+        ('plat-name=', 'p',
+                     "platform name to embed in generated filenames "
+                     "(default: %s)" % get_build_platform()),
+        ('exclude-source-files', None,
+                     "remove all .py files from the generated egg"),
+        ('keep-temp', 'k',
+                     "keep the pseudo-installation tree around after " +
+                     "creating the distribution archive"),
+        ('dist-dir=', 'd',
+                     "directory to put final built distributions in"),
+        ('skip-build', None,
+                     "skip rebuilding everything (for testing/debugging)"),
+    ]
+
+    boolean_options = [
+        'keep-temp', 'skip-build', 'exclude-source-files'
+    ]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def initialize_options (self):
+        self.bdist_dir = None
+        self.plat_name = None
+        self.keep_temp = 0
+        self.dist_dir = None
+        self.skip_build = 0
+        self.egg_output = None
+        self.exclude_source_files = None
+
+
+    def finalize_options(self):
+        ei_cmd = self.get_finalized_command("egg_info")
+        self.egg_info = ei_cmd.egg_info
+
+        if self.bdist_dir is None:
+            bdist_base = self.get_finalized_command('bdist').bdist_base
+            self.bdist_dir = os.path.join(bdist_base, 'egg')
+
+        if self.plat_name is None:
+            self.plat_name = get_build_platform()
+
+        self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
+
+        if self.egg_output is None:
+
+            # Compute filename of the output egg
+            basename = Distribution(
+                None, None, ei_cmd.egg_name, ei_cmd.egg_version,
+                get_python_version(),
+                self.distribution.has_ext_modules() and self.plat_name
+            ).egg_name()
+
+            self.egg_output = os.path.join(self.dist_dir, basename+'.egg')
+
+
+
+
+
+
+
+
+    def do_install_data(self):
+        # Hack for packages that install data to install's --install-lib
+        self.get_finalized_command('install').install_lib = self.bdist_dir
+
+        site_packages = os.path.normcase(os.path.realpath(get_python_lib()))
+        old, self.distribution.data_files = self.distribution.data_files,[]
+
+        for item in old:
+            if isinstance(item,tuple) and len(item)==2:
+                if os.path.isabs(item[0]):
+                    realpath = os.path.realpath(item[0])
+                    normalized = os.path.normcase(realpath)
+                    if normalized==site_packages or normalized.startswith(
+                        site_packages+os.sep
+                    ):
+                        item = realpath[len(site_packages)+1:], item[1]
+                    # XXX else: raise ???
+            self.distribution.data_files.append(item)
+
+        try:
+            log.info("installing package data to %s" % self.bdist_dir)
+            self.call_command('install_data', force=0, root=None)
+        finally:
+            self.distribution.data_files = old
+
+
+    def get_outputs(self):
+        return [self.egg_output]
+
+
+    def call_command(self,cmdname,**kw):
+        """Invoke reinitialized command `cmdname` with keyword args"""
+        for dirname in INSTALL_DIRECTORY_ATTRS:
+            kw.setdefault(dirname,self.bdist_dir)
+        kw.setdefault('skip_build',self.skip_build)
+        kw.setdefault('dry_run', self.dry_run)
+        cmd = self.reinitialize_command(cmdname, **kw)
+        self.run_command(cmdname)
+        return cmd
+
+
+    def run(self):
+        # Generate metadata first
+        self.run_command("egg_info")
+
+        # We run install_lib before install_data, because some data hacks
+        # pull their data path from the install_lib command.
+        log.info("installing library code to %s" % self.bdist_dir)
+        instcmd = self.get_finalized_command('install')
+        old_root = instcmd.root; instcmd.root = None
+        cmd = self.call_command('install_lib', warn_dir=0)
+        instcmd.root = old_root
+
+        all_outputs, ext_outputs = self.get_ext_outputs()
+        self.stubs = []
+        to_compile = []
+        for (p,ext_name) in enumerate(ext_outputs):
+            filename,ext = os.path.splitext(ext_name)
+            pyfile = os.path.join(self.bdist_dir, filename + '.py')
+            self.stubs.append(pyfile)
+            log.info("creating stub loader for %s" % ext_name)
+            if not self.dry_run:
+                write_stub(os.path.basename(ext_name), pyfile)
+            to_compile.append(pyfile)
+            ext_outputs[p] = ext_name.replace(os.sep,'/')
+
+        to_compile.extend(self.make_init_files())
+        if to_compile:
+            cmd.byte_compile(to_compile)
+
+        if self.distribution.data_files:
+            self.do_install_data()
+
+        # Make the EGG-INFO directory
+        archive_root = self.bdist_dir
+        egg_info = os.path.join(archive_root,'EGG-INFO')
+        self.mkpath(egg_info)
+        if self.distribution.scripts:
+            script_dir = os.path.join(egg_info, 'scripts')
+            log.info("installing scripts to %s" % script_dir)
+            self.call_command('install_scripts',install_dir=script_dir,no_ep=1)
+
+        native_libs = os.path.join(self.egg_info,"native_libs.txt")
+        if all_outputs:
+            log.info("writing %s" % native_libs)
+            if not self.dry_run:
+                libs_file = open(native_libs, 'wt')
+                libs_file.write('\n'.join(all_outputs))
+                libs_file.write('\n')
+                libs_file.close()
+        elif os.path.isfile(native_libs):
+            log.info("removing %s" % native_libs)
+            if not self.dry_run:
+                os.unlink(native_libs)
+
+        for filename in os.listdir(self.egg_info):
+            path = os.path.join(self.egg_info,filename)
+            if os.path.isfile(path):
+                self.copy_file(path,os.path.join(egg_info,filename))
+
+        write_safety_flag(
+            os.path.join(archive_root,'EGG-INFO'), self.zip_safe()
+        )
+
+        if os.path.exists(os.path.join(self.egg_info,'depends.txt')):
+            log.warn(
+                "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n"
+                "Use the install_requires/extras_require setup() args instead."
+            )
+
+        if self.exclude_source_files:
+            self.zap_pyfiles()
+
+        # Make the archive
+        make_zipfile(self.egg_output, archive_root, verbose=self.verbose,
+                          dry_run=self.dry_run)
+        if not self.keep_temp:
+            remove_tree(self.bdist_dir, dry_run=self.dry_run)
+
+        # Add to 'Distribution.dist_files' so that the "upload" command works
+        getattr(self.distribution,'dist_files',[]).append(
+            ('bdist_egg',get_python_version(),self.egg_output))
+
+    def zap_pyfiles(self):
+        log.info("Removing .py files from temporary directory")
+        for base,dirs,files in walk_egg(self.bdist_dir):
+            for name in files:
+                if name.endswith('.py'):
+                    path = os.path.join(base,name)
+                    log.debug("Deleting %s", path)
+                    os.unlink(path)
+
+    def zip_safe(self):
+        safe = getattr(self.distribution,'zip_safe',None)
+        if safe is not None:
+            return safe
+        log.warn("zip_safe flag not set; analyzing archive contents...")
+        return analyze_egg(self.bdist_dir, self.stubs)
+
+    def make_init_files(self):
+        """Create missing package __init__ files"""
+        init_files = []
+        for base,dirs,files in walk_egg(self.bdist_dir):
+            if base==self.bdist_dir:
+                # don't put an __init__ in the root
+                continue
+            for name in files:
+                if name.endswith('.py'):
+                    if '__init__.py' not in files:
+                        pkg = base[len(self.bdist_dir)+1:].replace(os.sep,'.')
+                        if self.distribution.has_contents_for(pkg):
+                            log.warn("Creating missing __init__.py for %s",pkg)
+                            filename = os.path.join(base,'__init__.py')
+                            if not self.dry_run:
+                                f = open(filename,'w'); f.write(NS_PKG_STUB)
+                                f.close()
+                            init_files.append(filename)
+                    break
+            else:
+                # not a package, don't traverse to subdirectories
+                dirs[:] = []
+
+        return init_files
+
+    def get_ext_outputs(self):
+        """Get a list of relative paths to C extensions in the output distro"""
+
+        all_outputs = []
+        ext_outputs = []
+
+        paths = {self.bdist_dir:''}
+        for base, dirs, files in os.walk(self.bdist_dir):
+            for filename in files:
+                if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS:
+                    all_outputs.append(paths[base]+filename)
+            for filename in dirs:
+                paths[os.path.join(base,filename)] = paths[base]+filename+'/'
+
+        if self.distribution.has_ext_modules():
+            build_cmd = self.get_finalized_command('build_ext')
+            for ext in build_cmd.extensions:
+                if isinstance(ext,Library):
+                    continue
+                fullname = build_cmd.get_ext_fullname(ext.name)
+                filename = build_cmd.get_ext_filename(fullname)
+                if not os.path.basename(filename).startswith('dl-'):
+                    if os.path.exists(os.path.join(self.bdist_dir,filename)):
+                        ext_outputs.append(filename)
+
+        return all_outputs, ext_outputs
+
+
+NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split())
+
+
+
+
+
+
+
+
+
+
+
+
+def walk_egg(egg_dir):
+    """Walk an unpacked egg's contents, skipping the metadata directory"""
+    walker = os.walk(egg_dir)
+    base,dirs,files = walker.next()
+    if 'EGG-INFO' in dirs:
+        dirs.remove('EGG-INFO')
+    yield base,dirs,files
+    for bdf in walker:
+        yield bdf
+
+def analyze_egg(egg_dir, stubs):
+    # check for existing flag in EGG-INFO
+    for flag,fn in safety_flags.items():
+        if os.path.exists(os.path.join(egg_dir,'EGG-INFO',fn)):
+            return flag
+
+    safe = True
+    for base, dirs, files in walk_egg(egg_dir):
+        for name in files:
+            if name.endswith('.py') or name.endswith('.pyw'):
+                continue
+            elif name.endswith('.pyc') or name.endswith('.pyo'):
+                # always scan, even if we already know we're not safe
+                safe = scan_module(egg_dir, base, name, stubs) and safe
+    return safe
+
+def write_safety_flag(egg_dir, safe):
+    # Write or remove zip safety flag file(s)
+    for flag,fn in safety_flags.items():
+        fn = os.path.join(egg_dir, fn)
+        if os.path.exists(fn):
+            if safe is None or bool(safe)<>flag:
+                os.unlink(fn)
+        elif safe is not None and bool(safe)==flag:
+            open(fn,'w').close()
+
+safety_flags = {
+    True: 'zip-safe',
+    False: 'not-zip-safe',
+}
+
+def scan_module(egg_dir, base, name, stubs):
+    """Check whether module possibly uses unsafe-for-zipfile stuff"""
+
+    filename = os.path.join(base,name)
+    if filename[:-1] in stubs:
+        return True     # Extension module
+    pkg = base[len(egg_dir)+1:].replace(os.sep,'.')
+    module = pkg+(pkg and '.' or '')+os.path.splitext(name)[0]
+    f = open(filename,'rb'); f.read(8)   # skip magic & date
+    code = marshal.load(f);  f.close()
+    safe = True
+    symbols = dict.fromkeys(iter_symbols(code))
+    for bad in ['__file__', '__path__']:
+        if bad in symbols:
+            log.warn("%s: module references %s", module, bad)
+            safe = False
+    if 'inspect' in symbols:
+        for bad in [
+            'getsource', 'getabsfile', 'getsourcefile', 'getfile'
+            'getsourcelines', 'findsource', 'getcomments', 'getframeinfo',
+            'getinnerframes', 'getouterframes', 'stack', 'trace'
+        ]:
+            if bad in symbols:
+                log.warn("%s: module MAY be using inspect.%s", module, bad)
+                safe = False
+    if '__name__' in symbols and '__main__' in symbols and '.' not in module:
+        if get_python_version()>="2.4":
+            log.warn("%s: top-level module may be 'python -m' script", module)
+            safe = False
+    return safe
+
+def iter_symbols(code):
+    """Yield names and strings used by `code` and its nested code objects"""
+    for name in code.co_names: yield name
+    for const in code.co_consts:
+        if isinstance(const,basestring):
+            yield const
+        elif isinstance(const,CodeType):
+            for name in iter_symbols(const):
+                yield name
+
+# Attribute names of options for commands that might need to be convinced to
+# install to the egg build directory
+
+INSTALL_DIRECTORY_ATTRS = [
+    'install_lib', 'install_dir', 'install_data', 'install_base'
+]
+
+def make_zipfile (zip_filename, base_dir, verbose=0, dry_run=0, compress=None):
+    """Create a zip file from all the files under 'base_dir'.  The output
+    zip file will be named 'base_dir' + ".zip".  Uses either the "zipfile"
+    Python module (if available) or the InfoZIP "zip" utility (if installed
+    and found on the default search path).  If neither tool is available,
+    raises DistutilsExecError.  Returns the name of the output zip file.
+    """
+    import zipfile
+    mkpath(os.path.dirname(zip_filename), dry_run=dry_run)
+    log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir)
+
+    def visit (z, dirname, names):
+        for name in names:
+            path = os.path.normpath(os.path.join(dirname, name))
+            if os.path.isfile(path):
+                p = path[len(base_dir)+1:]
+                if not dry_run:
+                    z.write(path, p)
+                log.debug("adding '%s'" % p)
+
+    if compress is None:
+        compress = (sys.version>="2.4") # avoid 2.3 zipimport bug when 64 bits
+
+    compression = [zipfile.ZIP_STORED, zipfile.ZIP_DEFLATED][bool(compress)]
+    if not dry_run:
+        z = zipfile.ZipFile(zip_filename, "w", compression=compression)
+        os.path.walk(base_dir, visit, z)
+        z.close()
+    else:
+        os.path.walk(base_dir, visit, None)
+
+    return zip_filename
diff --git a/Lib/setuptools/command/bdist_rpm.py b/Lib/setuptools/command/bdist_rpm.py
new file mode 100755
index 0000000..00e07ac
--- /dev/null
+++ b/Lib/setuptools/command/bdist_rpm.py
@@ -0,0 +1,37 @@
+# This is just a kludge so that bdist_rpm doesn't guess wrong about the
+# distribution name and version, if the egg_info command is going to alter
+# them, and another kludge to allow you to build old-style non-egg RPMs
+
+from distutils.command.bdist_rpm import bdist_rpm as _bdist_rpm
+
+class bdist_rpm(_bdist_rpm):
+
+    def initialize_options(self):
+        _bdist_rpm.initialize_options(self)
+        self.no_egg = None
+
+    def run(self):
+        self.run_command('egg_info')    # ensure distro name is up-to-date
+        _bdist_rpm.run(self)
+
+    def _make_spec_file(self):
+        version = self.distribution.get_version()
+        rpmversion = version.replace('-','_')
+        spec = _bdist_rpm._make_spec_file(self)
+        line23 = '%define version '+version
+        line24 = '%define version '+rpmversion
+        spec  = [
+            line.replace(
+                "Source0: %{name}-%{version}.tar",
+                "Source0: %{name}-%{unmangled_version}.tar"
+            ).replace(
+                "setup.py install ",
+                "setup.py install --single-version-externally-managed "
+            ).replace(
+                "%setup",
+                "%setup -n %{name}-%{unmangled_version}"
+            ).replace(line23,line24)
+            for line in spec
+        ]
+        spec.insert(spec.index(line24)+1, "%define unmangled_version "+version)
+        return spec
diff --git a/Lib/setuptools/command/build_ext.py b/Lib/setuptools/command/build_ext.py
new file mode 100644
index 0000000..f8551fb
--- /dev/null
+++ b/Lib/setuptools/command/build_ext.py
@@ -0,0 +1,285 @@
+from distutils.command.build_ext import build_ext as _du_build_ext
+try:
+    # Attempt to use Pyrex for building extensions, if available
+    from Pyrex.Distutils.build_ext import build_ext as _build_ext
+except ImportError:
+    _build_ext = _du_build_ext
+
+import os, sys
+from distutils.file_util import copy_file
+from setuptools.extension import Library
+from distutils.ccompiler import new_compiler
+from distutils.sysconfig import customize_compiler, get_config_var
+get_config_var("LDSHARED")  # make sure _config_vars is initialized
+from distutils.sysconfig import _config_vars
+from distutils import log
+from distutils.errors import *
+
+have_rtld = False
+use_stubs = False
+libtype = 'shared'
+
+if sys.platform == "darwin":
+    use_stubs = True
+elif os.name != 'nt':
+    try:
+        from dl import RTLD_NOW
+        have_rtld = True
+        use_stubs = True
+    except ImportError:
+        pass
+
+def if_dl(s):
+    if have_rtld:
+        return s
+    return ''
+
+
+
+
+
+
+class build_ext(_build_ext):
+    def run(self):
+        """Build extensions in build directory, then copy if --inplace"""
+        old_inplace, self.inplace = self.inplace, 0
+        _build_ext.run(self)
+        self.inplace = old_inplace
+        if old_inplace:
+            self.copy_extensions_to_source()
+
+    def copy_extensions_to_source(self):
+        build_py = self.get_finalized_command('build_py')
+        for ext in self.extensions:
+            fullname = self.get_ext_fullname(ext.name)
+            filename = self.get_ext_filename(fullname)
+            modpath = fullname.split('.')
+            package = '.'.join(modpath[:-1])
+            package_dir = build_py.get_package_dir(package)
+            dest_filename = os.path.join(package_dir,os.path.basename(filename))
+            src_filename = os.path.join(self.build_lib,filename)
+
+            # Always copy, even if source is older than destination, to ensure
+            # that the right extensions for the current Python/platform are
+            # used.
+            copy_file(
+                src_filename, dest_filename, verbose=self.verbose,
+                dry_run=self.dry_run
+            )
+            if ext._needs_stub:
+                self.write_stub(package_dir or os.curdir, ext, True)
+
+
+    if _build_ext is not _du_build_ext:
+        # Workaround for problems using some Pyrex versions w/SWIG and/or 2.4
+        def swig_sources(self, sources, *otherargs):
+            # first do any Pyrex processing
+            sources = _build_ext.swig_sources(self, sources) or sources
+            # Then do any actual SWIG stuff on the remainder
+            return _du_build_ext.swig_sources(self, sources, *otherargs)
+
+
+
+    def get_ext_filename(self, fullname):
+        filename = _build_ext.get_ext_filename(self,fullname)
+        ext = self.ext_map[fullname]
+        if isinstance(ext,Library):
+            fn, ext = os.path.splitext(filename)
+            return self.shlib_compiler.library_filename(fn,libtype)
+        elif use_stubs and ext._links_to_dynamic:
+            d,fn = os.path.split(filename)
+            return os.path.join(d,'dl-'+fn)
+        else:
+            return filename
+
+    def initialize_options(self):
+        _build_ext.initialize_options(self)
+        self.shlib_compiler = None
+        self.shlibs = []
+        self.ext_map = {}
+
+    def finalize_options(self):
+        _build_ext.finalize_options(self)
+        self.extensions = self.extensions or []
+        self.check_extensions_list(self.extensions)
+        self.shlibs = [ext for ext in self.extensions
+                        if isinstance(ext,Library)]
+        if self.shlibs:
+            self.setup_shlib_compiler()
+        for ext in self.extensions:
+            fullname = ext._full_name = self.get_ext_fullname(ext.name)
+            self.ext_map[fullname] = ext
+            ltd = ext._links_to_dynamic = \
+                self.shlibs and self.links_to_dynamic(ext) or False
+            ext._needs_stub = ltd and use_stubs and not isinstance(ext,Library)
+            filename = ext._file_name = self.get_ext_filename(fullname)
+            libdir = os.path.dirname(os.path.join(self.build_lib,filename))
+            if ltd and libdir not in ext.library_dirs:
+                ext.library_dirs.append(libdir)
+            if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs:
+                ext.runtime_library_dirs.append(os.curdir)
+
+
+
+    def setup_shlib_compiler(self):
+        compiler = self.shlib_compiler = new_compiler(
+            compiler=self.compiler, dry_run=self.dry_run, force=self.force
+        )
+        if sys.platform == "darwin":
+            tmp = _config_vars.copy()
+            try:
+                # XXX Help!  I don't have any idea whether these are right...
+                _config_vars['LDSHARED'] = "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup"
+                _config_vars['CCSHARED'] = " -dynamiclib"
+                _config_vars['SO'] = ".dylib"
+                customize_compiler(compiler)
+            finally:
+                _config_vars.clear()
+                _config_vars.update(tmp)
+        else:
+            customize_compiler(compiler)
+
+        if self.include_dirs is not None:
+            compiler.set_include_dirs(self.include_dirs)
+        if self.define is not None:
+            # 'define' option is a list of (name,value) tuples
+            for (name,value) in self.define:
+                compiler.define_macro(name, value)
+        if self.undef is not None:
+            for macro in self.undef:
+                compiler.undefine_macro(macro)
+        if self.libraries is not None:
+            compiler.set_libraries(self.libraries)
+        if self.library_dirs is not None:
+            compiler.set_library_dirs(self.library_dirs)
+        if self.rpath is not None:
+            compiler.set_runtime_library_dirs(self.rpath)
+        if self.link_objects is not None:
+            compiler.set_link_objects(self.link_objects)
+
+        # hack so distutils' build_extension() builds a library instead
+        compiler.link_shared_object = link_shared_object.__get__(compiler)
+
+
+
+    def get_export_symbols(self, ext):
+        if isinstance(ext,Library):
+            return ext.export_symbols
+        return _build_ext.get_export_symbols(self,ext)
+
+    def build_extension(self, ext):
+        _compiler = self.compiler
+        try:
+            if isinstance(ext,Library):
+                self.compiler = self.shlib_compiler
+            _build_ext.build_extension(self,ext)
+            if ext._needs_stub:
+                self.write_stub(
+                    self.get_finalized_command('build_py').build_lib, ext
+                )
+        finally:
+            self.compiler = _compiler
+
+    def links_to_dynamic(self, ext):
+        """Return true if 'ext' links to a dynamic lib in the same package"""
+        # XXX this should check to ensure the lib is actually being built
+        # XXX as dynamic, and not just using a locally-found version or a
+        # XXX static-compiled version
+        libnames = dict.fromkeys([lib._full_name for lib in self.shlibs])
+        pkg = '.'.join(ext._full_name.split('.')[:-1]+[''])
+        for libname in ext.libraries:
+            if pkg+libname in libnames: return True
+        return False
+
+    def get_outputs(self):
+        outputs = _build_ext.get_outputs(self)
+        optimize = self.get_finalized_command('build_py').optimize
+        for ext in self.extensions:
+            if ext._needs_stub:
+                base = os.path.join(self.build_lib, *ext._full_name.split('.'))
+                outputs.append(base+'.py')
+                outputs.append(base+'.pyc')
+                if optimize:
+                    outputs.append(base+'.pyo')
+        return outputs
+
+    def write_stub(self, output_dir, ext, compile=False):
+        log.info("writing stub loader for %s to %s",ext._full_name, output_dir)
+        stub_file = os.path.join(output_dir, *ext._full_name.split('.'))+'.py'
+        if compile and os.path.exists(stub_file):
+            raise DistutilsError(stub_file+" already exists! Please delete.")
+        if not self.dry_run:
+            f = open(stub_file,'w')
+            f.write('\n'.join([
+                "def __bootstrap__():",
+                "   global __bootstrap__, __file__, __loader__",
+                "   import sys, os, pkg_resources, imp"+if_dl(", dl"),
+                "   __file__ = pkg_resources.resource_filename(__name__,%r)"
+                   % os.path.basename(ext._file_name),
+                "   del __bootstrap__",
+                "   if '__loader__' in globals():",
+                "       del __loader__",
+                if_dl("   old_flags = sys.getdlopenflags()"),
+                "   old_dir = os.getcwd()",
+                "   try:",
+                "     os.chdir(os.path.dirname(__file__))",
+                if_dl("     sys.setdlopenflags(dl.RTLD_NOW)"),
+                "     imp.load_dynamic(__name__,__file__)",
+                "   finally:",
+                if_dl("     sys.setdlopenflags(old_flags)"),
+                "     os.chdir(old_dir)",
+                "__bootstrap__()",
+                "" # terminal \n
+            ]))
+            f.close()
+        if compile:
+            from distutils.util import byte_compile
+            byte_compile([stub_file], optimize=0,
+                         force=True, dry_run=self.dry_run)
+            optimize = self.get_finalized_command('install_lib').optimize
+            if optimize > 0:
+                byte_compile([stub_file], optimize=optimize,
+                             force=True, dry_run=self.dry_run)
+            if os.path.exists(stub_file) and not self.dry_run:
+                os.unlink(stub_file)
+
+
+if use_stubs or os.name=='nt':
+    # Build shared libraries
+    #
+    def link_shared_object(self, objects, output_libname, output_dir=None,
+        libraries=None, library_dirs=None, runtime_library_dirs=None,
+        export_symbols=None, debug=0, extra_preargs=None,
+        extra_postargs=None, build_temp=None, target_lang=None
+    ):  self.link(
+            self.SHARED_LIBRARY, objects, output_libname,
+            output_dir, libraries, library_dirs, runtime_library_dirs,
+            export_symbols, debug, extra_preargs, extra_postargs,
+            build_temp, target_lang
+        )
+else:
+    # Build static libraries everywhere else
+    libtype = 'static'
+
+    def link_shared_object(self, objects, output_libname, output_dir=None,
+        libraries=None, library_dirs=None, runtime_library_dirs=None,
+        export_symbols=None, debug=0, extra_preargs=None,
+        extra_postargs=None, build_temp=None, target_lang=None
+    ):
+        # XXX we need to either disallow these attrs on Library instances,
+        #     or warn/abort here if set, or something...
+        #libraries=None, library_dirs=None, runtime_library_dirs=None,
+        #export_symbols=None, extra_preargs=None, extra_postargs=None,
+        #build_temp=None
+
+        assert output_dir is None   # distutils build_ext doesn't pass this
+        output_dir,filename = os.path.split(output_libname)
+        basename, ext = os.path.splitext(filename)
+        if self.library_filename("x").startswith('lib'):
+            # strip 'lib' prefix; this is kludgy if some platform uses
+            # a different prefix
+            basename = basename[3:]
+
+        self.create_static_lib(
+            objects, basename, output_dir, debug, target_lang
+        )
diff --git a/Lib/setuptools/command/build_py.py b/Lib/setuptools/command/build_py.py
new file mode 100644
index 0000000..77a9b23
--- /dev/null
+++ b/Lib/setuptools/command/build_py.py
@@ -0,0 +1,192 @@
+import os.path, sys, fnmatch
+from distutils.command.build_py import build_py as _build_py
+from distutils.util import convert_path
+from glob import glob
+
+class build_py(_build_py):
+    """Enhanced 'build_py' command that includes data files with packages
+
+    The data files are specified via a 'package_data' argument to 'setup()'.
+    See 'setuptools.dist.Distribution' for more details.
+
+    Also, this version of the 'build_py' command allows you to specify both
+    'py_modules' and 'packages' in the same setup operation.
+    """
+    def finalize_options(self):
+        _build_py.finalize_options(self)
+        self.package_data = self.distribution.package_data
+        self.exclude_package_data = self.distribution.exclude_package_data or {}
+        if 'data_files' in self.__dict__: del self.__dict__['data_files']
+
+    def run(self):
+        """Build modules, packages, and copy data files to build directory"""
+        if not self.py_modules and not self.packages:
+            return
+
+        if self.py_modules:
+            self.build_modules()
+
+        if self.packages:
+            self.build_packages()
+            self.build_package_data()
+
+        # Only compile actual .py files, using our base class' idea of what our
+        # output files are.
+        self.byte_compile(_build_py.get_outputs(self, include_bytecode=0))
+
+    def __getattr__(self,attr):
+        if attr=='data_files':  # lazily compute data files
+            self.data_files = files = self._get_data_files(); return files
+        return _build_py.__getattr__(self,attr)
+
+    def _get_data_files(self):
+        """Generate list of '(package,src_dir,build_dir,filenames)' tuples"""
+        self.analyze_manifest()
+        data = []
+        for package in self.packages or ():
+            # Locate package source directory
+            src_dir = self.get_package_dir(package)
+
+            # Compute package build directory
+            build_dir = os.path.join(*([self.build_lib] + package.split('.')))
+
+            # Length of path to strip from found files
+            plen = len(src_dir)+1
+
+            # Strip directory from globbed filenames
+            filenames = [
+                file[plen:] for file in self.find_data_files(package, src_dir)
+                ]
+            data.append( (package, src_dir, build_dir, filenames) )
+        return data
+
+    def find_data_files(self, package, src_dir):
+        """Return filenames for package's data files in 'src_dir'"""
+        globs = (self.package_data.get('', [])
+                 + self.package_data.get(package, []))
+        files = self.manifest_files.get(package, [])[:]
+        for pattern in globs:
+            # Each pattern has to be converted to a platform-specific path
+            files.extend(glob(os.path.join(src_dir, convert_path(pattern))))
+        return self.exclude_data_files(package, src_dir, files)
+
+    def build_package_data(self):
+        """Copy data files into build directory"""
+        lastdir = None
+        for package, src_dir, build_dir, filenames in self.data_files:
+            for filename in filenames:
+                target = os.path.join(build_dir, filename)
+                self.mkpath(os.path.dirname(target))
+                self.copy_file(os.path.join(src_dir, filename), target)
+
+
+    def analyze_manifest(self):
+        self.manifest_files = mf = {}
+        if not self.distribution.include_package_data:
+            return
+        src_dirs = {}
+        for package in self.packages or ():
+            # Locate package source directory
+            src_dirs[assert_relative(self.get_package_dir(package))] = package
+
+        self.run_command('egg_info')
+        ei_cmd = self.get_finalized_command('egg_info')
+        for path in ei_cmd.filelist.files:
+            if path.endswith('.py'):
+                continue
+            d,f = os.path.split(assert_relative(path))
+            prev = None
+            while d and d!=prev and d not in src_dirs:
+                prev = d
+                d, df = os.path.split(d)
+                f = os.path.join(df, f)
+            if d in src_dirs:
+                mf.setdefault(src_dirs[d],[]).append(path)
+
+
+    def get_data_files(self): pass  # kludge 2.4 for lazy computation
+
+    if sys.version<"2.4":    # Python 2.4 already has this code
+        def get_outputs(self, include_bytecode=1):
+            """Return complete list of files copied to the build directory
+
+            This includes both '.py' files and data files, as well as '.pyc'
+            and '.pyo' files if 'include_bytecode' is true.  (This method is
+            needed for the 'install_lib' command to do its job properly, and to
+            generate a correct installation manifest.)
+            """
+            return _build_py.get_outputs(self, include_bytecode) + [
+                os.path.join(build_dir, filename)
+                for package, src_dir, build_dir,filenames in self.data_files
+                for filename in filenames
+                ]
+
+    def check_package(self, package, package_dir):
+        """Check namespace packages' __init__ for declare_namespace"""
+        try:
+            return self.packages_checked[package]
+        except KeyError:
+            pass
+
+        init_py = _build_py.check_package(self, package, package_dir)
+        self.packages_checked[package] = init_py
+
+        if not init_py or not self.distribution.namespace_packages:
+            return init_py
+
+        for pkg in self.distribution.namespace_packages:
+            if pkg==package or pkg.startswith(package+'.'):
+                break
+        else:
+            return init_py
+
+        f = open(init_py,'rU')
+        if 'declare_namespace' not in f.read():
+            from distutils.errors import DistutilsError
+            raise DistutilsError(
+              "Namespace package problem: %s is a namespace package, but its\n"
+              "__init__.py does not call declare_namespace()! Please fix it.\n"
+              '(See the setuptools manual under "Namespace Packages" for '
+              "details.)\n" % (package,)
+            )
+        f.close()
+        return init_py
+
+    def initialize_options(self):
+        self.packages_checked={}
+        _build_py.initialize_options(self)
+
+
+
+
+
+
+
+    def exclude_data_files(self, package, src_dir, files):
+        """Filter filenames for package's data files in 'src_dir'"""
+        globs = (self.exclude_package_data.get('', [])
+                 + self.exclude_package_data.get(package, []))
+        bad = []
+        for pattern in globs:
+            bad.extend(
+                fnmatch.filter(
+                    files, os.path.join(src_dir, convert_path(pattern))
+                )
+            )
+        bad = dict.fromkeys(bad)
+        return [f for f in files if f not in bad]
+
+
+def assert_relative(path):
+    if not os.path.isabs(path):
+        return path
+    from distutils.errors import DistutilsSetupError
+    raise DistutilsSetupError(
+"""Error: setup script specifies an absolute path:
+
+    %s
+
+setup() arguments must *always* be /-separated paths relative to the
+setup.py directory, *never* absolute paths.
+""" % path
+    )
diff --git a/Lib/setuptools/command/develop.py b/Lib/setuptools/command/develop.py
new file mode 100755
index 0000000..7ab5b23
--- /dev/null
+++ b/Lib/setuptools/command/develop.py
@@ -0,0 +1,116 @@
+from setuptools.command.easy_install import easy_install
+from distutils.util import convert_path
+from pkg_resources import Distribution, PathMetadata, normalize_path
+from distutils import log
+from distutils.errors import *
+import sys, os
+
+class develop(easy_install):
+    """Set up package for development"""
+
+    description = "install package in 'development mode'"
+
+    user_options = easy_install.user_options + [
+        ("uninstall", "u", "Uninstall this source package"),
+    ]
+
+    boolean_options = easy_install.boolean_options + ['uninstall']
+
+    command_consumes_arguments = False  # override base
+
+    def run(self):
+        if self.uninstall:
+            self.multi_version = True
+            self.uninstall_link()
+        else:
+            self.install_for_development()
+        self.warn_deprecated_options()
+
+    def initialize_options(self):
+        self.uninstall = None
+        easy_install.initialize_options(self)
+
+
+
+
+
+
+
+
+
+
+    def finalize_options(self):
+        ei = self.get_finalized_command("egg_info")
+        if ei.broken_egg_info:
+            raise DistutilsError(
+            "Please rename %r to %r before using 'develop'"
+            % (ei.egg_info, ei.broken_egg_info)
+            )
+        self.args = [ei.egg_name]
+        easy_install.finalize_options(self)
+        self.egg_link = os.path.join(self.install_dir, ei.egg_name+'.egg-link')
+        self.egg_base = ei.egg_base
+        self.egg_path = os.path.abspath(ei.egg_base)
+
+        # Make a distribution for the package's source
+        self.dist = Distribution(
+            normalize_path(self.egg_path),
+            PathMetadata(self.egg_path, os.path.abspath(ei.egg_info)),
+            project_name = ei.egg_name
+        )
+
+    def install_for_development(self):
+        # Ensure metadata is up-to-date
+        self.run_command('egg_info')
+
+        # Build extensions in-place
+        self.reinitialize_command('build_ext', inplace=1)
+        self.run_command('build_ext')
+
+        self.install_site_py()  # ensure that target dir is site-safe
+
+        # create an .egg-link in the installation dir, pointing to our egg
+        log.info("Creating %s (link to %s)", self.egg_link, self.egg_base)
+        if not self.dry_run:
+            f = open(self.egg_link,"w")
+            f.write(self.egg_path)
+            f.close()
+
+        # postprocess the installed distro, fixing up .pth, installing scripts,
+        # and handling requirements
+        self.process_distribution(None, self.dist)
+
+    def uninstall_link(self):
+        if os.path.exists(self.egg_link):
+            log.info("Removing %s (link to %s)", self.egg_link, self.egg_base)
+            contents = [line.rstrip() for line in file(self.egg_link)]
+            if contents != [self.egg_path]:
+                log.warn("Link points to %s: uninstall aborted", contents)
+                return
+            if not self.dry_run:
+                os.unlink(self.egg_link)
+        if not self.dry_run:
+            self.update_pth(self.dist)  # remove any .pth link to us
+        if self.distribution.scripts:
+            # XXX should also check for entry point scripts!
+            log.warn("Note: you must uninstall or replace scripts manually!")
+
+
+    def install_egg_scripts(self, dist):
+        if dist is not self.dist:
+            # Installing a dependency, so fall back to normal behavior
+            return easy_install.install_egg_scripts(self,dist)
+
+        # create wrapper scripts in the script dir, pointing to dist.scripts
+
+        # new-style...
+        self.install_wrapper_scripts(dist)
+
+        # ...and old-style
+        for script_name in self.distribution.scripts or []:
+            script_path = os.path.abspath(convert_path(script_name))
+            script_name = os.path.basename(script_path)
+            f = open(script_path,'rU')
+            script_text = f.read()
+            f.close()
+            self.install_script(dist, script_name, script_text, script_path)
diff --git a/Lib/setuptools/command/easy_install.py b/Lib/setuptools/command/easy_install.py
new file mode 100755
index 0000000..3ddcec4
--- /dev/null
+++ b/Lib/setuptools/command/easy_install.py
@@ -0,0 +1,1555 @@
+#!python
+"""\
+Easy Install
+------------
+
+A tool for doing automatic download/extract/build of distutils-based Python
+packages.  For detailed documentation, see the accompanying EasyInstall.txt
+file, or visit the `EasyInstall home page`__.
+
+__ http://peak.telecommunity.com/DevCenter/EasyInstall
+"""
+import sys, os.path, zipimport, shutil, tempfile, zipfile, re, stat, random
+from glob import glob
+from setuptools import Command
+from setuptools.sandbox import run_setup
+from distutils import log, dir_util
+from distutils.sysconfig import get_python_lib
+from distutils.errors import DistutilsArgError, DistutilsOptionError, \
+    DistutilsError
+from setuptools.archive_util import unpack_archive
+from setuptools.package_index import PackageIndex, parse_bdist_wininst
+from setuptools.package_index import URL_SCHEME
+from setuptools.command import bdist_egg, egg_info
+from pkg_resources import *
+sys_executable = os.path.normpath(sys.executable)
+
+__all__ = [
+    'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg',
+    'main', 'get_exe_prefixes',
+]
+
+def samefile(p1,p2):
+    if hasattr(os.path,'samefile') and (
+        os.path.exists(p1) and os.path.exists(p2)
+    ):
+        return os.path.samefile(p1,p2)
+    return (
+        os.path.normpath(os.path.normcase(p1)) ==
+        os.path.normpath(os.path.normcase(p2))
+    )
+
+class easy_install(Command):
+    """Manage a download/build/install process"""
+    description = "Find/get/install Python packages"
+    command_consumes_arguments = True
+
+    user_options = [
+        ('prefix=', None, "installation prefix"),
+        ("zip-ok", "z", "install package as a zipfile"),
+        ("multi-version", "m", "make apps have to require() a version"),
+        ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"),
+        ("install-dir=", "d", "install package to DIR"),
+        ("script-dir=", "s", "install scripts to DIR"),
+        ("exclude-scripts", "x", "Don't install scripts"),
+        ("always-copy", "a", "Copy all needed packages to install dir"),
+        ("index-url=", "i", "base URL of Python Package Index"),
+        ("find-links=", "f", "additional URL(s) to search for packages"),
+        ("delete-conflicting", "D", "no longer needed; don't use this"),
+        ("ignore-conflicts-at-my-risk", None,
+            "no longer needed; don't use this"),
+        ("build-directory=", "b",
+            "download/extract/build in DIR; keep the results"),
+        ('optimize=', 'O',
+         "also compile with optimization: -O1 for \"python -O\", "
+         "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"),
+        ('record=', None,
+         "filename in which to record list of installed files"),
+        ('always-unzip', 'Z', "don't install as a zipfile, no matter what"),
+        ('site-dirs=','S',"list of directories where .pth files work"),
+        ('editable', 'e', "Install specified packages in editable form"),
+        ('no-deps', 'N', "don't install dependencies"),
+        ('allow-hosts=', 'H', "pattern(s) that hostnames must match"),
+    ]
+    boolean_options = [
+        'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy',
+        'delete-conflicting', 'ignore-conflicts-at-my-risk', 'editable',
+        'no-deps',
+    ]
+    negative_opt = {'always-unzip': 'zip-ok'}
+    create_index = PackageIndex
+
+
+    def initialize_options(self):
+        self.zip_ok = None
+        self.install_dir = self.script_dir = self.exclude_scripts = None
+        self.index_url = None
+        self.find_links = None
+        self.build_directory = None
+        self.args = None
+        self.optimize = self.record = None
+        self.upgrade = self.always_copy = self.multi_version = None
+        self.editable = self.no_deps = self.allow_hosts = None
+        self.root = self.prefix = self.no_report = None
+
+        # Options not specifiable via command line
+        self.package_index = None
+        self.pth_file = None
+        self.delete_conflicting = None
+        self.ignore_conflicts_at_my_risk = None
+        self.site_dirs = None
+        self.installed_projects = {}
+        self.sitepy_installed = False
+        # Always read easy_install options, even if we are subclassed, or have
+        # an independent instance created.  This ensures that defaults will
+        # always come from the standard configuration file(s)' "easy_install"
+        # section, even if this is a "develop" or "install" command, or some
+        # other embedding.
+        self._dry_run = None
+        self.verbose = self.distribution.verbose
+        self.distribution._set_command_options(
+            self, self.distribution.get_option_dict('easy_install')
+        )
+
+    def delete_blockers(self, blockers):
+        for filename in blockers:
+            if os.path.exists(filename) or os.path.islink(filename):
+                log.info("Deleting %s", filename)
+                if not self.dry_run:
+                    if os.path.isdir(filename) and not os.path.islink(filename):
+                        rmtree(filename)
+                    else:
+                        os.unlink(filename)
+
+    def finalize_options(self):
+        self._expand('install_dir','script_dir','build_directory','site_dirs')
+        # If a non-default installation directory was specified, default the
+        # script directory to match it.
+        if self.script_dir is None:
+            self.script_dir = self.install_dir
+
+        # Let install_dir get set by install_lib command, which in turn
+        # gets its info from the install command, and takes into account
+        # --prefix and --home and all that other crud.
+        self.set_undefined_options('install_lib',
+            ('install_dir','install_dir')
+        )
+        # Likewise, set default script_dir from 'install_scripts.install_dir'
+        self.set_undefined_options('install_scripts',
+            ('install_dir', 'script_dir')
+        )
+        # default --record from the install command
+        self.set_undefined_options('install', ('record', 'record'))
+        normpath = map(normalize_path, sys.path)
+        self.all_site_dirs = get_site_dirs()
+        if self.site_dirs is not None:
+            site_dirs = [
+                os.path.expanduser(s.strip()) for s in self.site_dirs.split(',')
+            ]
+            for d in site_dirs:
+                if not os.path.isdir(d):
+                    log.warn("%s (in --site-dirs) does not exist", d)
+                elif normalize_path(d) not in normpath:
+                    raise DistutilsOptionError(
+                        d+" (in --site-dirs) is not on sys.path"
+                    )
+                else:
+                    self.all_site_dirs.append(normalize_path(d))
+        self.check_site_dir()
+        self.index_url = self.index_url or "http://www.python.org/pypi"
+        self.shadow_path = self.all_site_dirs[:]
+        for path_item in self.install_dir, normalize_path(self.script_dir):
+            if path_item not in self.shadow_path:
+                self.shadow_path.insert(0, path_item)
+
+        if self.allow_hosts is not None:
+            hosts = [s.strip() for s in self.allow_hosts.split(',')]
+        else:
+            hosts = ['*']
+
+        if self.package_index is None:
+            self.package_index = self.create_index(
+                self.index_url, search_path = self.shadow_path, hosts=hosts
+            )
+        self.local_index = Environment(self.shadow_path)
+
+        if self.find_links is not None:
+            if isinstance(self.find_links, basestring):
+                self.find_links = self.find_links.split()
+        else:
+            self.find_links = []
+
+        self.package_index.add_find_links(self.find_links)
+        self.set_undefined_options('install_lib', ('optimize','optimize'))
+        if not isinstance(self.optimize,int):
+            try:
+                self.optimize = int(self.optimize)
+                if not (0 <= self.optimize <= 2): raise ValueError
+            except ValueError:
+                raise DistutilsOptionError("--optimize must be 0, 1, or 2")
+
+        if self.delete_conflicting and self.ignore_conflicts_at_my_risk:
+            raise DistutilsOptionError(
+                "Can't use both --delete-conflicting and "
+                "--ignore-conflicts-at-my-risk at the same time"
+            )
+        if self.editable and not self.build_directory:
+            raise DistutilsArgError(
+                "Must specify a build directory (-b) when using --editable"
+            )
+        if not self.args:
+            raise DistutilsArgError(
+                "No urls, filenames, or requirements specified (see --help)")
+
+        self.outputs = []
+
+    def run(self):
+        if self.verbose<>self.distribution.verbose:
+            log.set_verbosity(self.verbose)
+        try:
+            for spec in self.args:
+                self.easy_install(spec, not self.no_deps)
+            if self.record:
+                outputs = self.outputs
+                if self.root:               # strip any package prefix
+                    root_len = len(self.root)
+                    for counter in xrange(len(outputs)):
+                        outputs[counter] = outputs[counter][root_len:]
+                from distutils import file_util
+                self.execute(
+                    file_util.write_file, (self.record, outputs),
+                    "writing list of installed files to '%s'" %
+                    self.record
+                )
+            self.warn_deprecated_options()
+        finally:
+            log.set_verbosity(self.distribution.verbose)
+
+    def pseudo_tempname(self):
+        """Return a pseudo-tempname base in the install directory.
+        This code is intentionally naive; if a malicious party can write to
+        the target directory you're already in deep doodoo.
+        """
+        try:
+            pid = os.getpid()
+        except:
+            pid = random.randint(0,sys.maxint)
+        return os.path.join(self.install_dir, "test-easy-install-%s" % pid)
+
+    def warn_deprecated_options(self):
+        if self.delete_conflicting or self.ignore_conflicts_at_my_risk:
+            log.warn(
+                "Note: The -D, --delete-conflicting and"
+                " --ignore-conflicts-at-my-risk no longer have any purpose"
+                " and should not be used."
+            )
+
+    def check_site_dir(self):
+        """Verify that self.install_dir is .pth-capable dir, if needed"""
+
+        instdir = normalize_path(self.install_dir)
+        pth_file = os.path.join(instdir,'easy-install.pth')
+
+        # Is it a configured, PYTHONPATH, implicit, or explicit site dir?
+        is_site_dir = instdir in self.all_site_dirs
+
+        if not is_site_dir:
+            # No?  Then directly test whether it does .pth file processing
+            is_site_dir = self.check_pth_processing()
+        else:
+            # make sure we can write to target dir
+            testfile = self.pseudo_tempname()+'.write-test'
+            test_exists = os.path.exists(testfile)
+            try:
+                if test_exists: os.unlink(testfile)
+                open(testfile,'w').close()
+                os.unlink(testfile)
+            except (OSError,IOError):
+                self.cant_write_to_target()
+
+        if not is_site_dir and not self.multi_version:
+            # Can't install non-multi to non-site dir
+            raise DistutilsError(self.no_default_version_msg())
+
+        if is_site_dir:
+            if self.pth_file is None:
+                self.pth_file = PthDistributions(pth_file)
+        else:
+            self.pth_file = None
+
+        PYTHONPATH = os.environ.get('PYTHONPATH','').split(os.pathsep)
+        if instdir not in map(normalize_path, filter(None,PYTHONPATH)):
+            # only PYTHONPATH dirs need a site.py, so pretend it's there
+            self.sitepy_installed = True
+
+        self.install_dir = instdir
+
+
+    def cant_write_to_target(self):
+        msg = """can't create or remove files in install directory
+
+The following error occurred while trying to add or remove files in the
+installation directory:
+
+    %s
+
+The installation directory you specified (via --install-dir, --prefix, or
+the distutils default setting) was:
+
+    %s
+"""     % (sys.exc_info()[1], self.install_dir,)
+
+        if not os.path.exists(self.install_dir):
+            msg += """
+This directory does not currently exist.  Please create it and try again, or
+choose a different installation directory (using the -d or --install-dir
+option).
+"""
+        else:
+            msg += """
+Perhaps your account does not have write access to this directory?  If the
+installation directory is a system-owned directory, you may need to sign in
+as the administrator or "root" account.  If you do not have administrative
+access to this machine, you may wish to choose a different installation
+directory, preferably one that is listed in your PYTHONPATH environment
+variable.
+
+For information on other options, you may wish to consult the
+documentation at:
+
+  http://peak.telecommunity.com/EasyInstall.html
+
+Please make the appropriate changes for your system and try again.
+"""
+        raise DistutilsError(msg)
+
+
+
+
+    def check_pth_processing(self):
+        """Empirically verify whether .pth files are supported in inst. dir"""
+        instdir = self.install_dir
+        log.info("Checking .pth file support in %s", instdir)
+        pth_file = self.pseudo_tempname()+".pth"
+        ok_file = pth_file+'.ok'
+        ok_exists = os.path.exists(ok_file)
+        try:
+            if ok_exists: os.unlink(ok_file)
+            f = open(pth_file,'w')
+        except (OSError,IOError):
+            self.cant_write_to_target()
+        else:
+            try:
+                f.write("import os;open(%r,'w').write('OK')\n" % (ok_file,))
+                f.close(); f=None
+                executable = sys.executable
+                if os.name=='nt':
+                    dirname,basename = os.path.split(executable)
+                    alt = os.path.join(dirname,'pythonw.exe')
+                    if basename.lower()=='python.exe' and os.path.exists(alt):
+                        # use pythonw.exe to avoid opening a console window
+                        executable = alt
+                    if ' ' in executable: executable='"%s"' % executable
+                from distutils.spawn import spawn
+                spawn([executable,'-E','-c','pass'],0)
+
+                if os.path.exists(ok_file):
+                    log.info(
+                        "TEST PASSED: %s appears to support .pth files",
+                        instdir
+                    )
+                    return True
+            finally:
+                if f: f.close()
+                if os.path.exists(ok_file): os.unlink(ok_file)
+                if os.path.exists(pth_file): os.unlink(pth_file)
+        if not self.multi_version:
+            log.warn("TEST FAILED: %s does NOT support .pth files", instdir)
+        return False
+
+    def install_egg_scripts(self, dist):
+        """Write all the scripts for `dist`, unless scripts are excluded"""
+
+        self.install_wrapper_scripts(dist)
+        if self.exclude_scripts or not dist.metadata_isdir('scripts'):
+            return
+
+        for script_name in dist.metadata_listdir('scripts'):
+            self.install_script(
+                dist, script_name,
+                dist.get_metadata('scripts/'+script_name).replace('\r','\n')
+            )
+
+    def add_output(self, path):
+        if os.path.isdir(path):
+            for base, dirs, files in os.walk(path):
+                for filename in files:
+                    self.outputs.append(os.path.join(base,filename))
+        else:
+            self.outputs.append(path)
+
+    def not_editable(self, spec):
+        if self.editable:
+            raise DistutilsArgError(
+                "Invalid argument %r: you can't use filenames or URLs "
+                "with --editable (except via the --find-links option)."
+                % (spec,)
+            )
+
+    def check_editable(self,spec):
+        if not self.editable:
+            return
+
+        if os.path.exists(os.path.join(self.build_directory, spec.key)):
+            raise DistutilsArgError(
+                "%r already exists in %s; can't do a checkout there" %
+                (spec.key, self.build_directory)
+            )
+
+
+
+    def easy_install(self, spec, deps=False):
+        tmpdir = tempfile.mkdtemp(prefix="easy_install-")
+        download = None
+        self.install_site_py()
+
+        try:
+            if not isinstance(spec,Requirement):
+                if URL_SCHEME(spec):
+                    # It's a url, download it to tmpdir and process
+                    self.not_editable(spec)
+                    download = self.package_index.download(spec, tmpdir)
+                    return self.install_item(None, download, tmpdir, deps, True)
+
+                elif os.path.exists(spec):
+                    # Existing file or directory, just process it directly
+                    self.not_editable(spec)
+                    return self.install_item(None, spec, tmpdir, deps, True)
+                else:
+                    spec = parse_requirement_arg(spec)
+
+            self.check_editable(spec)
+            dist = self.package_index.fetch_distribution(
+                spec, tmpdir, self.upgrade, self.editable, not self.always_copy
+            )
+
+            if dist is None:
+                msg = "Could not find suitable distribution for %r" % spec
+                if self.always_copy:
+                    msg+=" (--always-copy skips system and development eggs)"
+                raise DistutilsError(msg)
+            elif dist.precedence==DEVELOP_DIST:
+                # .egg-info dists don't need installing, just process deps
+                self.process_distribution(spec, dist, deps, "Using")
+                return dist
+            else:
+                return self.install_item(spec, dist.location, tmpdir, deps)
+
+        finally:
+            if os.path.exists(tmpdir):
+                rmtree(tmpdir)
+
+    def install_item(self, spec, download, tmpdir, deps, install_needed=False):
+
+        # Installation is also needed if file in tmpdir or is not an egg
+        install_needed = install_needed or os.path.dirname(download) == tmpdir
+        install_needed = install_needed or not download.endswith('.egg')
+
+        log.info("Processing %s", os.path.basename(download))
+
+        if install_needed or self.always_copy:
+            dists = self.install_eggs(spec, download, tmpdir)
+            for dist in dists:
+                self.process_distribution(spec, dist, deps)
+        else:
+            dists = [self.check_conflicts(self.egg_distribution(download))]
+            self.process_distribution(spec, dists[0], deps, "Using")
+
+        if spec is not None:
+            for dist in dists:
+                if dist in spec:
+                    return dist
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def process_distribution(self, requirement, dist, deps=True, *info):
+        self.update_pth(dist)
+        self.package_index.add(dist)
+        self.local_index.add(dist)
+        self.install_egg_scripts(dist)
+        self.installed_projects[dist.key] = dist
+        log.warn(self.installation_report(requirement, dist, *info))
+        if not deps and not self.always_copy:
+            return
+        elif requirement is not None and dist.key != requirement.key:
+            log.warn("Skipping dependencies for %s", dist)
+            return  # XXX this is not the distribution we were looking for
+        elif requirement is None or dist not in requirement:
+            # if we wound up with a different version, resolve what we've got
+            distreq = dist.as_requirement()
+            requirement = requirement or distreq
+            requirement = Requirement(
+                distreq.project_name, distreq.specs, requirement.extras
+            )
+        if dist.has_metadata('dependency_links.txt'):
+            self.package_index.add_find_links(
+                dist.get_metadata_lines('dependency_links.txt')
+            )
+        log.info("Processing dependencies for %s", requirement)
+        try:
+            distros = WorkingSet([]).resolve(
+                [requirement], self.local_index, self.easy_install
+            )
+        except DistributionNotFound, e:
+            raise DistutilsError(
+                "Could not find required distribution %s" % e.args
+            )
+        except VersionConflict, e:
+            raise DistutilsError(
+                "Installed distribution %s conflicts with requirement %s"
+                % e.args
+            )
+        if self.always_copy:
+            # Force all the relevant distros to be copied or activated
+            for dist in distros:
+                if dist.key not in self.installed_projects:
+                    self.easy_install(dist.as_requirement())
+
+    def should_unzip(self, dist):
+        if self.zip_ok is not None:
+            return not self.zip_ok
+        if dist.has_metadata('not-zip-safe'):
+            return True
+        if not dist.has_metadata('zip-safe'):
+            return True
+        return False
+
+    def maybe_move(self, spec, dist_filename, setup_base):
+        dst = os.path.join(self.build_directory, spec.key)
+        if os.path.exists(dst):
+            log.warn(
+               "%r already exists in %s; build directory %s will not be kept",
+               spec.key, self.build_directory, setup_base
+            )
+            return setup_base
+        if os.path.isdir(dist_filename):
+            setup_base = dist_filename
+        else:
+            if os.path.dirname(dist_filename)==setup_base:
+                os.unlink(dist_filename)   # get it out of the tmp dir
+            contents = os.listdir(setup_base)
+            if len(contents)==1:
+                dist_filename = os.path.join(setup_base,contents[0])
+                if os.path.isdir(dist_filename):
+                    # if the only thing there is a directory, move it instead
+                    setup_base = dist_filename
+        ensure_directory(dst); shutil.move(setup_base, dst)
+        return dst
+
+    def install_wrapper_scripts(self, dist):
+        if not self.exclude_scripts:
+            for args in get_script_args(dist):
+                self.write_script(*args)
+
+
+
+
+
+
+    def install_script(self, dist, script_name, script_text, dev_path=None):
+        """Generate a legacy script wrapper and install it"""
+        spec = str(dist.as_requirement())
+
+        if dev_path:
+            script_text = get_script_header(script_text) + (
+                "# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r\n"
+                "__requires__ = %(spec)r\n"
+                "from pkg_resources import require; require(%(spec)r)\n"
+                "del require\n"
+                "__file__ = %(dev_path)r\n"
+                "execfile(__file__)\n"
+            ) % locals()
+        else:
+            script_text = get_script_header(script_text) + (
+                "# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r\n"
+                "__requires__ = %(spec)r\n"
+                "import pkg_resources\n"
+                "pkg_resources.run_script(%(spec)r, %(script_name)r)\n"
+            ) % locals()
+
+        self.write_script(script_name, script_text)
+
+    def write_script(self, script_name, contents, mode="t", blockers=()):
+        """Write an executable file to the scripts directory"""
+        self.delete_blockers(   # clean up old .py/.pyw w/o a script
+            [os.path.join(self.script_dir,x) for x in blockers])
+        log.info("Installing %s script to %s", script_name, self.script_dir)
+        target = os.path.join(self.script_dir, script_name)
+        self.add_output(target)
+
+        if not self.dry_run:
+            ensure_directory(target)
+            f = open(target,"w"+mode)
+            f.write(contents)
+            f.close()
+            try:
+                os.chmod(target,0755)
+            except (AttributeError, os.error):
+                pass
+
+    def install_eggs(self, spec, dist_filename, tmpdir):
+        # .egg dirs or files are already built, so just return them
+        if dist_filename.lower().endswith('.egg'):
+            return [self.install_egg(dist_filename, tmpdir)]
+        elif dist_filename.lower().endswith('.exe'):
+            return [self.install_exe(dist_filename, tmpdir)]
+
+        # Anything else, try to extract and build
+        setup_base = tmpdir
+        if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'):
+            unpack_archive(dist_filename, tmpdir, self.unpack_progress)
+        elif os.path.isdir(dist_filename):
+            setup_base = os.path.abspath(dist_filename)
+
+        if (setup_base.startswith(tmpdir)   # something we downloaded
+            and self.build_directory and spec is not None
+        ):
+            setup_base = self.maybe_move(spec, dist_filename, setup_base)
+
+        # Find the setup.py file
+        setup_script = os.path.join(setup_base, 'setup.py')
+
+        if not os.path.exists(setup_script):
+            setups = glob(os.path.join(setup_base, '*', 'setup.py'))
+            if not setups:
+                raise DistutilsError(
+                    "Couldn't find a setup script in %s" % dist_filename
+                )
+            if len(setups)>1:
+                raise DistutilsError(
+                    "Multiple setup scripts in %s" % dist_filename
+                )
+            setup_script = setups[0]
+
+        # Now run it, and return the result
+        if self.editable:
+            log.warn(self.report_editable(spec, setup_script))
+            return []
+        else:
+            return self.build_and_install(setup_script, setup_base)
+
+    def egg_distribution(self, egg_path):
+        if os.path.isdir(egg_path):
+            metadata = PathMetadata(egg_path,os.path.join(egg_path,'EGG-INFO'))
+        else:
+            metadata = EggMetadata(zipimport.zipimporter(egg_path))
+        return Distribution.from_filename(egg_path,metadata=metadata)
+
+    def install_egg(self, egg_path, tmpdir):
+        destination = os.path.join(self.install_dir,os.path.basename(egg_path))
+        destination = os.path.abspath(destination)
+        if not self.dry_run:
+            ensure_directory(destination)
+
+        dist = self.egg_distribution(egg_path)
+        self.check_conflicts(dist)
+        if not samefile(egg_path, destination):
+            if os.path.isdir(destination) and not os.path.islink(destination):
+                dir_util.remove_tree(destination, dry_run=self.dry_run)
+            elif os.path.exists(destination):
+                self.execute(os.unlink,(destination,),"Removing "+destination)
+            uncache_zipdir(destination)
+            if os.path.isdir(egg_path):
+                if egg_path.startswith(tmpdir):
+                    f,m = shutil.move, "Moving"
+                else:
+                    f,m = shutil.copytree, "Copying"
+            elif self.should_unzip(dist):
+                self.mkpath(destination)
+                f,m = self.unpack_and_compile, "Extracting"
+            elif egg_path.startswith(tmpdir):
+                f,m = shutil.move, "Moving"
+            else:
+                f,m = shutil.copy2, "Copying"
+
+            self.execute(f, (egg_path, destination),
+                (m+" %s to %s") %
+                (os.path.basename(egg_path),os.path.dirname(destination)))
+
+        self.add_output(destination)
+        return self.egg_distribution(destination)
+
+    def install_exe(self, dist_filename, tmpdir):
+        # See if it's valid, get data
+        cfg = extract_wininst_cfg(dist_filename)
+        if cfg is None:
+            raise DistutilsError(
+                "%s is not a valid distutils Windows .exe" % dist_filename
+            )
+        # Create a dummy distribution object until we build the real distro
+        dist = Distribution(None,
+            project_name=cfg.get('metadata','name'),
+            version=cfg.get('metadata','version'), platform="win32"
+        )
+
+        # Convert the .exe to an unpacked egg
+        egg_path = dist.location = os.path.join(tmpdir, dist.egg_name()+'.egg')
+        egg_tmp  = egg_path+'.tmp'
+        egg_info = os.path.join(egg_tmp, 'EGG-INFO')
+        pkg_inf = os.path.join(egg_info, 'PKG-INFO')
+        ensure_directory(pkg_inf)   # make sure EGG-INFO dir exists
+        dist._provider = PathMetadata(egg_tmp, egg_info)    # XXX
+        self.exe_to_egg(dist_filename, egg_tmp)
+
+        # Write EGG-INFO/PKG-INFO
+        if not os.path.exists(pkg_inf):
+            f = open(pkg_inf,'w')
+            f.write('Metadata-Version: 1.0\n')
+            for k,v in cfg.items('metadata'):
+                if k<>'target_version':
+                    f.write('%s: %s\n' % (k.replace('_','-').title(), v))
+            f.close()
+        script_dir = os.path.join(egg_info,'scripts')
+        self.delete_blockers(   # delete entry-point scripts to avoid duping
+            [os.path.join(script_dir,args[0]) for args in get_script_args(dist)]
+        )
+        # Build .egg file from tmpdir
+        bdist_egg.make_zipfile(
+            egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run
+        )
+        # install the .egg
+        return self.install_egg(egg_path, tmpdir)
+
+    def exe_to_egg(self, dist_filename, egg_tmp):
+        """Extract a bdist_wininst to the directories an egg would use"""
+        # Check for .pth file and set up prefix translations
+        prefixes = get_exe_prefixes(dist_filename)
+        to_compile = []
+        native_libs = []
+        top_level = {}
+
+        def process(src,dst):
+            for old,new in prefixes:
+                if src.startswith(old):
+                    src = new+src[len(old):]
+                    parts = src.split('/')
+                    dst = os.path.join(egg_tmp, *parts)
+                    dl = dst.lower()
+                    if dl.endswith('.pyd') or dl.endswith('.dll'):
+                        top_level[os.path.splitext(parts[0])[0]] = 1
+                        native_libs.append(src)
+                    elif dl.endswith('.py') and old!='SCRIPTS/':
+                        top_level[os.path.splitext(parts[0])[0]] = 1
+                        to_compile.append(dst)
+                    return dst
+            if not src.endswith('.pth'):
+                log.warn("WARNING: can't process %s", src)
+            return None
+
+        # extract, tracking .pyd/.dll->native_libs and .py -> to_compile
+        unpack_archive(dist_filename, egg_tmp, process)
+        stubs = []
+        for res in native_libs:
+            if res.lower().endswith('.pyd'):    # create stubs for .pyd's
+                parts = res.split('/')
+                resource, parts[-1] = parts[-1], parts[-1][:-1]
+                pyfile = os.path.join(egg_tmp, *parts)
+                to_compile.append(pyfile); stubs.append(pyfile)
+                bdist_egg.write_stub(resource, pyfile)
+
+        self.byte_compile(to_compile)   # compile .py's
+        bdist_egg.write_safety_flag(os.path.join(egg_tmp,'EGG-INFO'),
+            bdist_egg.analyze_egg(egg_tmp, stubs))  # write zip-safety flag
+
+        for name in 'top_level','native_libs':
+            if locals()[name]:
+                txt = os.path.join(egg_tmp, 'EGG-INFO', name+'.txt')
+                if not os.path.exists(txt):
+                    open(txt,'w').write('\n'.join(locals()[name])+'\n')
+
+    def check_conflicts(self, dist):
+        """Verify that there are no conflicting "old-style" packages"""
+
+        return dist     # XXX temporarily disable until new strategy is stable
+        from imp import find_module, get_suffixes
+        from glob import glob
+
+        blockers = []
+        names = dict.fromkeys(dist._get_metadata('top_level.txt')) # XXX private attr
+
+        exts = {'.pyc':1, '.pyo':1}     # get_suffixes() might leave one out
+        for ext,mode,typ in get_suffixes():
+            exts[ext] = 1
+
+        for path,files in expand_paths([self.install_dir]+self.all_site_dirs):
+            for filename in files:
+                base,ext = os.path.splitext(filename)
+                if base in names:
+                    if not ext:
+                        # no extension, check for package
+                        try:
+                            f, filename, descr = find_module(base, [path])
+                        except ImportError:
+                            continue
+                        else:
+                            if f: f.close()
+                            if filename not in blockers:
+                                blockers.append(filename)
+                    elif ext in exts and base!='site':  # XXX ugh
+                        blockers.append(os.path.join(path,filename))
+        if blockers:
+            self.found_conflicts(dist, blockers)
+
+        return dist
+
+    def found_conflicts(self, dist, blockers):
+        if self.delete_conflicting:
+            log.warn("Attempting to delete conflicting packages:")
+            return self.delete_blockers(blockers)
+
+        msg = """\
+-------------------------------------------------------------------------
+CONFLICT WARNING:
+
+The following modules or packages have the same names as modules or
+packages being installed, and will be *before* the installed packages in
+Python's search path.  You MUST remove all of the relevant files and
+directories before you will be able to use the package(s) you are
+installing:
+
+   %s
+
+""" % '\n   '.join(blockers)
+
+        if self.ignore_conflicts_at_my_risk:
+            msg += """\
+(Note: you can run EasyInstall on '%s' with the
+--delete-conflicting option to attempt deletion of the above files
+and/or directories.)
+""" % dist.project_name
+        else:
+            msg += """\
+Note: you can attempt this installation again with EasyInstall, and use
+either the --delete-conflicting (-D) option or the
+--ignore-conflicts-at-my-risk option, to either delete the above files
+and directories, or to ignore the conflicts, respectively.  Note that if
+you ignore the conflicts, the installed package(s) may not work.
+"""
+        msg += """\
+-------------------------------------------------------------------------
+"""
+        sys.stderr.write(msg)
+        sys.stderr.flush()
+        if not self.ignore_conflicts_at_my_risk:
+            raise DistutilsError("Installation aborted due to conflicts")
+
+    def installation_report(self, req, dist, what="Installed"):
+        """Helpful installation message for display to package users"""
+        msg = "\n%(what)s %(eggloc)s%(extras)s"
+        if self.multi_version and not self.no_report:
+            msg += """
+
+Because this distribution was installed --multi-version or --install-dir,
+before you can import modules from this package in an application, you
+will need to 'import pkg_resources' and then use a 'require()' call
+similar to one of these examples, in order to select the desired version:
+
+    pkg_resources.require("%(name)s")  # latest installed version
+    pkg_resources.require("%(name)s==%(version)s")  # this exact version
+    pkg_resources.require("%(name)s>=%(version)s")  # this version or higher
+"""
+            if self.install_dir not in map(normalize_path,sys.path):
+                msg += """
+
+Note also that the installation directory must be on sys.path at runtime for
+this to work.  (e.g. by being the application's script directory, by being on
+PYTHONPATH, or by being added to sys.path by your code.)
+"""
+        eggloc = dist.location
+        name = dist.project_name
+        version = dist.version
+        extras = '' # TODO: self.report_extras(req, dist)
+        return msg % locals()
+
+    def report_editable(self, spec, setup_script):
+        dirname = os.path.dirname(setup_script)
+        python = sys.executable
+        return """\nExtracted editable version of %(spec)s to %(dirname)s
+
+If it uses setuptools in its setup script, you can activate it in
+"development" mode by going to that directory and running::
+
+    %(python)s setup.py develop
+
+See the setuptools documentation for the "develop" command for more info.
+""" % locals()
+
+    def run_setup(self, setup_script, setup_base, args):
+        sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg)
+        sys.modules.setdefault('distutils.command.egg_info', egg_info)
+
+        args = list(args)
+        if self.verbose>2:
+            v = 'v' * (self.verbose - 1)
+            args.insert(0,'-'+v)
+        elif self.verbose<2:
+            args.insert(0,'-q')
+        if self.dry_run:
+            args.insert(0,'-n')
+        log.info(
+            "Running %s %s", setup_script[len(setup_base)+1:], ' '.join(args)
+        )
+        try:
+            run_setup(setup_script, args)
+        except SystemExit, v:
+            raise DistutilsError("Setup script exited with %s" % (v.args[0],))
+
+    def build_and_install(self, setup_script, setup_base):
+        args = ['bdist_egg', '--dist-dir']
+        dist_dir = tempfile.mkdtemp(
+            prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script)
+        )
+        try:
+            args.append(dist_dir)
+            self.run_setup(setup_script, setup_base, args)
+            all_eggs = Environment([dist_dir])
+            eggs = []
+            for key in all_eggs:
+                for dist in all_eggs[key]:
+                    eggs.append(self.install_egg(dist.location, setup_base))
+            if not eggs and not self.dry_run:
+                log.warn("No eggs found in %s (setup script problem?)",
+                    dist_dir)
+            return eggs
+        finally:
+            rmtree(dist_dir)
+            log.set_verbosity(self.verbose) # restore our log verbosity
+
+    def update_pth(self,dist):
+        if self.pth_file is None:
+            return
+
+        for d in self.pth_file[dist.key]:    # drop old entries
+            if self.multi_version or d.location != dist.location:
+                log.info("Removing %s from easy-install.pth file", d)
+                self.pth_file.remove(d)
+                if d.location in self.shadow_path:
+                    self.shadow_path.remove(d.location)
+
+        if not self.multi_version:
+            if dist.location in self.pth_file.paths:
+                log.info(
+                    "%s is already the active version in easy-install.pth",
+                    dist
+                )
+            else:
+                log.info("Adding %s to easy-install.pth file", dist)
+                self.pth_file.add(dist) # add new entry
+                if dist.location not in self.shadow_path:
+                    self.shadow_path.append(dist.location)
+
+        if not self.dry_run:
+
+            self.pth_file.save()
+
+            if dist.key=='setuptools':
+                # Ensure that setuptools itself never becomes unavailable!
+                # XXX should this check for latest version?
+                filename = os.path.join(self.install_dir,'setuptools.pth')
+                if os.path.islink(filename): os.unlink(filename)
+                f = open(filename, 'wt')
+                f.write(self.pth_file.make_relative(dist.location)+'\n')
+                f.close()
+
+    def unpack_progress(self, src, dst):
+        # Progress filter for unpacking
+        log.debug("Unpacking %s to %s", src, dst)
+        return dst     # only unpack-and-compile skips files for dry run
+
+    def unpack_and_compile(self, egg_path, destination):
+        to_compile = []
+
+        def pf(src,dst):
+            if dst.endswith('.py') and not src.startswith('EGG-INFO/'):
+                to_compile.append(dst)
+            self.unpack_progress(src,dst)
+            return not self.dry_run and dst or None
+
+        unpack_archive(egg_path, destination, pf)
+        self.byte_compile(to_compile)
+
+
+    def byte_compile(self, to_compile):
+        from distutils.util import byte_compile
+        try:
+            # try to make the byte compile messages quieter
+            log.set_verbosity(self.verbose - 1)
+
+            byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run)
+            if self.optimize:
+                byte_compile(
+                    to_compile, optimize=self.optimize, force=1,
+                    dry_run=self.dry_run
+                )
+        finally:
+            log.set_verbosity(self.verbose)     # restore original verbosity
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def no_default_version_msg(self):
+        return """bad install directory or PYTHONPATH
+
+You are attempting to install a package to a directory that is not
+on PYTHONPATH and which Python does not read ".pth" files from.  The
+installation directory you specified (via --install-dir, --prefix, or
+the distutils default setting) was:
+
+    %s
+
+and your PYTHONPATH environment variable currently contains:
+
+    %r
+
+Here are some of your options for correcting the problem:
+
+* You can choose a different installation directory, i.e., one that is
+  on PYTHONPATH or supports .pth files
+
+* You can add the installation directory to the PYTHONPATH environment
+  variable.  (It must then also be on PYTHONPATH whenever you run
+  Python and want to use the package(s) you are installing.)
+
+* You can set up the installation directory to support ".pth" files by
+  using one of the approaches described here:
+
+  http://peak.telecommunity.com/EasyInstall.html#custom-installation-locations
+
+Please make the appropriate changes for your system and try again.""" % (
+        self.install_dir, os.environ.get('PYTHONPATH','')
+    )
+
+
+
+
+
+
+
+
+
+
+    def install_site_py(self):
+        """Make sure there's a site.py in the target dir, if needed"""
+
+        if self.sitepy_installed:
+            return  # already did it, or don't need to
+
+        sitepy = os.path.join(self.install_dir, "site.py")
+        source = resource_string("setuptools", "site-patch.py")
+        current = ""
+
+        if os.path.exists(sitepy):
+            log.debug("Checking existing site.py in %s", self.install_dir)
+            current = open(sitepy,'rb').read()
+            if not current.startswith('def __boot():'):
+                raise DistutilsError(
+                    "%s is not a setuptools-generated site.py; please"
+                    " remove it." % sitepy
+                )
+
+        if current != source:
+            log.info("Creating %s", sitepy)
+            if not self.dry_run:
+                ensure_directory(sitepy)
+                f = open(sitepy,'wb')
+                f.write(source)
+                f.close()
+            self.byte_compile([sitepy])
+
+        self.sitepy_installed = True
+
+
+
+
+
+
+
+
+
+
+
+
+    INSTALL_SCHEMES = dict(
+        posix = dict(
+            install_dir = '$base/lib/python$py_version_short/site-packages',
+            script_dir  = '$base/bin',
+        ),
+    )
+
+    DEFAULT_SCHEME = dict(
+        install_dir = '$base/Lib/site-packages',
+        script_dir  = '$base/Scripts',
+    )
+
+    def _expand(self, *attrs):
+        config_vars = self.get_finalized_command('install').config_vars
+
+        if self.prefix:
+            # Set default install_dir/scripts from --prefix
+            config_vars = config_vars.copy()
+            config_vars['base'] = self.prefix
+            scheme = self.INSTALL_SCHEMES.get(os.name,self.DEFAULT_SCHEME)
+            for attr,val in scheme.items():
+                if getattr(self,attr,None) is None:
+                    setattr(self,attr,val)
+
+        from distutils.util import subst_vars
+        for attr in attrs:
+            val = getattr(self, attr)
+            if val is not None:
+                val = subst_vars(val, config_vars)
+                if os.name == 'posix':
+                    val = os.path.expanduser(val)
+                setattr(self, attr, val)
+
+
+
+
+
+
+
+
+
+def get_site_dirs():
+    # return a list of 'site' dirs
+    sitedirs = filter(None,os.environ.get('PYTHONPATH','').split(os.pathsep))
+    prefixes = [sys.prefix]
+    if sys.exec_prefix != sys.prefix:
+        prefixes.append(sys.exec_prefix)
+    for prefix in prefixes:
+        if prefix:
+            if sys.platform in ('os2emx', 'riscos'):
+                sitedirs.append(os.path.join(prefix, "Lib", "site-packages"))
+            elif os.sep == '/':
+                sitedirs.extend([os.path.join(prefix,
+                                         "lib",
+                                         "python" + sys.version[:3],
+                                         "site-packages"),
+                            os.path.join(prefix, "lib", "site-python")])
+            else:
+                sitedirs.extend(
+                    [prefix, os.path.join(prefix, "lib", "site-packages")]
+                )
+            if sys.platform == 'darwin':
+                # for framework builds *only* we add the standard Apple
+                # locations. Currently only per-user, but /Library and
+                # /Network/Library could be added too
+                if 'Python.framework' in prefix:
+                    home = os.environ.get('HOME')
+                    if home:
+                        sitedirs.append(
+                            os.path.join(home,
+                                         'Library',
+                                         'Python',
+                                         sys.version[:3],
+                                         'site-packages'))
+    for plat_specific in (0,1):
+        site_lib = get_python_lib(plat_specific)
+        if site_lib not in sitedirs: sitedirs.append(site_lib)
+
+    sitedirs = map(normalize_path, sitedirs)
+    return sitedirs
+
+
+def expand_paths(inputs):
+    """Yield sys.path directories that might contain "old-style" packages"""
+
+    seen = {}
+
+    for dirname in inputs:
+        dirname = normalize_path(dirname)
+        if dirname in seen:
+            continue
+
+        seen[dirname] = 1
+        if not os.path.isdir(dirname):
+            continue
+
+        files = os.listdir(dirname)
+        yield dirname, files
+
+        for name in files:
+            if not name.endswith('.pth'):
+                # We only care about the .pth files
+                continue
+            if name in ('easy-install.pth','setuptools.pth'):
+                # Ignore .pth files that we control
+                continue
+
+            # Read the .pth file
+            f = open(os.path.join(dirname,name))
+            lines = list(yield_lines(f))
+            f.close()
+
+            # Yield existing non-dupe, non-import directory lines from it
+            for line in lines:
+                if not line.startswith("import"):
+                    line = normalize_path(line.rstrip())
+                    if line not in seen:
+                        seen[line] = 1
+                        if not os.path.isdir(line):
+                            continue
+                        yield line, os.listdir(line)
+
+
+def extract_wininst_cfg(dist_filename):
+    """Extract configuration data from a bdist_wininst .exe
+
+    Returns a ConfigParser.RawConfigParser, or None
+    """
+    f = open(dist_filename,'rb')
+    try:
+        endrec = zipfile._EndRecData(f)
+        if endrec is None:
+            return None
+
+        prepended = (endrec[9] - endrec[5]) - endrec[6]
+        if prepended < 12:  # no wininst data here
+            return None
+        f.seek(prepended-12)
+
+        import struct, StringIO, ConfigParser
+        tag, cfglen, bmlen = struct.unpack("<iii",f.read(12))
+        if tag not in (0x1234567A, 0x1234567B):
+            return None     # not a valid tag
+
+        f.seek(prepended-(12+cfglen+bmlen))
+        cfg = ConfigParser.RawConfigParser({'version':'','target_version':''})
+        try:
+            cfg.readfp(StringIO.StringIO(f.read(cfglen).split(chr(0),1)[0]))
+        except ConfigParser.Error:
+            return None
+        if not cfg.has_section('metadata') or not cfg.has_section('Setup'):
+            return None
+        return cfg
+
+    finally:
+        f.close()
+
+
+
+
+
+
+
+
+def get_exe_prefixes(exe_filename):
+    """Get exe->egg path translations for a given .exe file"""
+
+    prefixes = [
+        ('PURELIB/', ''),
+        ('PLATLIB/', ''),
+        ('SCRIPTS/', 'EGG-INFO/scripts/')
+    ]
+    z = zipfile.ZipFile(exe_filename)
+    try:
+        for info in z.infolist():
+            name = info.filename
+            parts = name.split('/')
+            if len(parts)==3 and parts[2]=='PKG-INFO':
+                if parts[1].endswith('.egg-info'):
+                    prefixes.insert(0,('/'.join(parts[:2]), 'EGG-INFO/'))
+                    break
+            if len(parts)<>2 or not name.endswith('.pth'):
+                continue
+            if name.endswith('-nspkg.pth'):
+                continue
+            if parts[0] in ('PURELIB','PLATLIB'):
+                for pth in yield_lines(z.read(name)):
+                    pth = pth.strip().replace('\\','/')
+                    if not pth.startswith('import'):
+                        prefixes.append((('%s/%s/' % (parts[0],pth)), ''))
+    finally:
+        z.close()
+
+    prefixes.sort(); prefixes.reverse()
+    return prefixes
+
+
+def parse_requirement_arg(spec):
+    try:
+        return Requirement.parse(spec)
+    except ValueError:
+        raise DistutilsError(
+            "Not a URL, existing file, or requirement spec: %r" % (spec,)
+        )
+
+class PthDistributions(Environment):
+    """A .pth file with Distribution paths in it"""
+
+    dirty = False
+
+    def __init__(self, filename):
+        self.filename = filename
+        self.basedir = normalize_path(os.path.dirname(self.filename))
+        self._load(); Environment.__init__(self, [], None, None)
+        for path in yield_lines(self.paths):
+            map(self.add, find_distributions(path, True))
+
+    def _load(self):
+        self.paths = []
+        saw_import = False
+        seen = {}
+        if os.path.isfile(self.filename):
+            for line in open(self.filename,'rt'):
+                if line.startswith('import'):
+                    saw_import = True
+                    continue
+                path = line.rstrip()
+                self.paths.append(path)
+                if not path.strip() or path.strip().startswith('#'):
+                    continue
+                # skip non-existent paths, in case somebody deleted a package
+                # manually, and duplicate paths as well
+                path = self.paths[-1] = normalize_path(
+                    os.path.join(self.basedir,path)
+                )
+                if not os.path.exists(path) or path in seen:
+                    self.paths.pop()    # skip it
+                    self.dirty = True   # we cleaned up, so we're dirty now :)
+                    continue
+                seen[path] = 1
+
+        if self.paths and not saw_import:
+            self.dirty = True   # ensure anything we touch has import wrappers
+        while self.paths and not self.paths[-1].strip():
+            self.paths.pop()
+
+    def save(self):
+        """Write changed .pth file back to disk"""
+        if not self.dirty:
+            return
+
+        data = '\n'.join(map(self.make_relative,self.paths))
+        if data:
+            log.debug("Saving %s", self.filename)
+            data = (
+                "import sys; sys.__plen = len(sys.path)\n"
+                "%s\n"
+                "import sys; new=sys.path[sys.__plen:];"
+                " del sys.path[sys.__plen:];"
+                " p=getattr(sys,'__egginsert',0); sys.path[p:p]=new;"
+                " sys.__egginsert = p+len(new)\n"
+            ) % data
+
+            if os.path.islink(self.filename):
+                os.unlink(self.filename)
+            f = open(self.filename,'wb')
+            f.write(data); f.close()
+
+        elif os.path.exists(self.filename):
+            log.debug("Deleting empty %s", self.filename)
+            os.unlink(self.filename)
+
+        self.dirty = False
+
+    def add(self,dist):
+        """Add `dist` to the distribution map"""
+        if dist.location not in self.paths:
+            self.paths.append(dist.location); self.dirty = True
+        Environment.add(self,dist)
+
+    def remove(self,dist):
+        """Remove `dist` from the distribution map"""
+        while dist.location in self.paths:
+            self.paths.remove(dist.location); self.dirty = True
+        Environment.remove(self,dist)
+
+
+    def make_relative(self,path):
+        if normalize_path(os.path.dirname(path))==self.basedir:
+            return os.path.basename(path)
+        return path
+
+
+def get_script_header(script_text, executable=sys_executable):
+    """Create a #! line, getting options (if any) from script_text"""
+    from distutils.command.build_scripts import first_line_re
+    first, rest = (script_text+'\n').split('\n',1)
+    match = first_line_re.match(first)
+    options = ''
+    if match:
+        script_text = rest
+        options = match.group(1) or ''
+        if options:
+            options = ' '+options
+    return "#!%(executable)s%(options)s\n" % locals()
+
+
+def auto_chmod(func, arg, exc):
+    if func is os.remove and os.name=='nt':
+        os.chmod(arg, stat.S_IWRITE)
+        return func(arg)
+    exc = sys.exc_info()
+    raise exc[0], (exc[1][0], exc[1][1] + (" %s %s" % (func,arg)))
+
+
+def uncache_zipdir(path):
+    """Ensure that the zip directory cache doesn't have stale info for path"""
+    from zipimport import _zip_directory_cache as zdc
+    if path in zdc:
+        del zdc[path]
+    else:
+        path = normalize_path(path)
+        for p in zdc:
+            if normalize_path(p)==path:
+                del zdc[p]
+                return
+
+
+def get_script_args(dist, executable=sys_executable):
+    """Yield write_script() argument tuples for a distribution's entrypoints"""
+    spec = str(dist.as_requirement())
+    header = get_script_header("", executable)
+    for group in 'console_scripts', 'gui_scripts':
+        for name,ep in dist.get_entry_map(group).items():
+            script_text = (
+                "# EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r\n"
+                "__requires__ = %(spec)r\n"
+                "import sys\n"
+                "from pkg_resources import load_entry_point\n"
+                "\n"
+                "sys.exit(\n"
+                "   load_entry_point(%(spec)r, %(group)r, %(name)r)()\n"
+                ")\n"
+            ) % locals()
+            if sys.platform=='win32':
+                # On Windows, add a .py extension and an .exe launcher
+                if group=='gui_scripts':
+                    ext, launcher = '-script.pyw', 'gui.exe'
+                    old = ['.pyw']
+                    new_header = re.sub('(?i)python.exe','pythonw.exe',header)
+                else:
+                    ext, launcher = '-script.py', 'cli.exe'
+                    old = ['.py','.pyc','.pyo']
+                    new_header = re.sub('(?i)pythonw.exe','pythonw.exe',header)
+
+                if os.path.exists(new_header[2:-1]):
+                    hdr = new_header
+                else:
+                    hdr = header
+                yield (name+ext, hdr+script_text, 't', [name+x for x in old])
+                yield (
+                    name+'.exe', resource_string('setuptools', launcher),
+                    'b' # write in binary mode
+                )
+            else:
+                # On other platforms, we assume the right thing to do is to
+                # just write the stub with no extension.
+                yield (name, header+script_text)
+
+def rmtree(path, ignore_errors=False, onerror=auto_chmod):
+    """Recursively delete a directory tree.
+
+    This code is taken from the Python 2.4 version of 'shutil', because
+    the 2.3 version doesn't really work right.
+    """
+    if ignore_errors:
+        def onerror(*args):
+            pass
+    elif onerror is None:
+        def onerror(*args):
+            raise
+    names = []
+    try:
+        names = os.listdir(path)
+    except os.error, err:
+        onerror(os.listdir, path, sys.exc_info())
+    for name in names:
+        fullname = os.path.join(path, name)
+        try:
+            mode = os.lstat(fullname).st_mode
+        except os.error:
+            mode = 0
+        if stat.S_ISDIR(mode):
+            rmtree(fullname, ignore_errors, onerror)
+        else:
+            try:
+                os.remove(fullname)
+            except os.error, err:
+                onerror(os.remove, fullname, sys.exc_info())
+    try:
+        os.rmdir(path)
+    except os.error:
+        onerror(os.rmdir, path, sys.exc_info())
+
+
+
+
+
+
+
+def main(argv=None, **kw):
+    from setuptools import setup
+    from setuptools.dist import Distribution
+    import distutils.core
+
+    USAGE = """\
+usage: %(script)s [options] requirement_or_url ...
+   or: %(script)s --help
+"""
+
+    def gen_usage (script_name):
+        script = os.path.basename(script_name)
+        return USAGE % vars()
+
+    def with_ei_usage(f):
+        old_gen_usage = distutils.core.gen_usage
+        try:
+            distutils.core.gen_usage = gen_usage
+            return f()
+        finally:
+            distutils.core.gen_usage = old_gen_usage
+
+    class DistributionWithoutHelpCommands(Distribution):
+        def _show_help(self,*args,**kw):
+            with_ei_usage(lambda: Distribution._show_help(self,*args,**kw))
+
+    if argv is None:
+        argv = sys.argv[1:]
+
+    with_ei_usage(lambda:
+        setup(
+            script_args = ['-q','easy_install', '-v']+argv,
+            script_name = sys.argv[0] or 'easy_install',
+            distclass=DistributionWithoutHelpCommands, **kw
+        )
+    )
diff --git a/Lib/setuptools/command/egg_info.py b/Lib/setuptools/command/egg_info.py
new file mode 100755
index 0000000..b68fb39
--- /dev/null
+++ b/Lib/setuptools/command/egg_info.py
@@ -0,0 +1,365 @@
+"""setuptools.command.egg_info
+
+Create a distribution's .egg-info directory and contents"""
+
+# This module should be kept compatible with Python 2.3
+import os, re
+from setuptools import Command
+from distutils.errors import *
+from distutils import log
+from setuptools.command.sdist import sdist
+from distutils import file_util
+from distutils.util import convert_path
+from distutils.filelist import FileList
+from pkg_resources import parse_requirements, safe_name, parse_version, \
+    safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename
+from sdist import walk_revctrl
+
+class egg_info(Command):
+    description = "create a distribution's .egg-info directory"
+
+    user_options = [
+        ('egg-base=', 'e', "directory containing .egg-info directories"
+                           " (default: top of the source tree)"),
+        ('tag-svn-revision', 'r',
+            "Add subversion revision ID to version number"),
+        ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
+        ('tag-build=', 'b', "Specify explicit tag to add to version number"),
+    ]
+
+    boolean_options = ['tag-date','tag-svn-revision']
+
+    def initialize_options (self):
+        self.egg_name = None
+        self.egg_version = None
+        self.egg_base = None
+        self.egg_info = None
+        self.tag_build = None
+        self.tag_svn_revision = 0
+        self.tag_date = 0
+        self.broken_egg_info = False
+
+    def finalize_options (self):
+        self.egg_name = safe_name(self.distribution.get_name())
+        self.egg_version = self.tagged_version()
+
+        try:
+            list(
+                parse_requirements('%s==%s' % (self.egg_name,self.egg_version))
+            )
+        except ValueError:
+            raise DistutilsOptionError(
+                "Invalid distribution name or version syntax: %s-%s" %
+                (self.egg_name,self.egg_version)
+            )
+
+        if self.egg_base is None:
+            dirs = self.distribution.package_dir
+            self.egg_base = (dirs or {}).get('',os.curdir)
+
+        self.ensure_dirname('egg_base')
+        self.egg_info = to_filename(self.egg_name)+'.egg-info'
+        if self.egg_base != os.curdir:
+            self.egg_info = os.path.join(self.egg_base, self.egg_info)
+        if '-' in self.egg_name: self.check_broken_egg_info()
+
+        # Set package version for the benefit of dumber commands
+        # (e.g. sdist, bdist_wininst, etc.)
+        #
+        self.distribution.metadata.version = self.egg_version
+
+        # If we bootstrapped around the lack of a PKG-INFO, as might be the
+        # case in a fresh checkout, make sure that any special tags get added
+        # to the version info
+        #
+        pd = self.distribution._patched_dist
+        if pd is not None and pd.key==self.egg_name.lower():
+            pd._version = self.egg_version
+            pd._parsed_version = parse_version(self.egg_version)
+            self.distribution._patched_dist = None
+
+
+
+    def write_or_delete_file(self, what, filename, data, force=False):
+        """Write `data` to `filename` or delete if empty
+
+        If `data` is non-empty, this routine is the same as ``write_file()``.
+        If `data` is empty but not ``None``, this is the same as calling
+        ``delete_file(filename)`.  If `data` is ``None``, then this is a no-op
+        unless `filename` exists, in which case a warning is issued about the
+        orphaned file (if `force` is false), or deleted (if `force` is true).
+        """
+        if data:
+            self.write_file(what, filename, data)
+        elif os.path.exists(filename):
+            if data is None and not force:
+                log.warn(
+                    "%s not set in setup(), but %s exists", what, filename
+                )
+                return
+            else:
+                self.delete_file(filename)
+
+    def write_file(self, what, filename, data):
+        """Write `data` to `filename` (if not a dry run) after announcing it
+
+        `what` is used in a log message to identify what is being written
+        to the file.
+        """
+        log.info("writing %s to %s", what, filename)
+        if not self.dry_run:
+            f = open(filename, 'wb')
+            f.write(data)
+            f.close()
+
+    def delete_file(self, filename):
+        """Delete `filename` (if not a dry run) after announcing it"""
+        log.info("deleting %s", filename)
+        if not self.dry_run:
+            os.unlink(filename)
+
+
+
+
+    def run(self):
+        self.mkpath(self.egg_info)
+        installer = self.distribution.fetch_build_egg
+        for ep in iter_entry_points('egg_info.writers'):
+            writer = ep.load(installer=installer)
+            writer(self, ep.name, os.path.join(self.egg_info,ep.name))
+        self.find_sources()
+
+    def tagged_version(self):
+        version = self.distribution.get_version()
+        if self.tag_build:
+            version+=self.tag_build
+        if self.tag_svn_revision and (
+            os.path.exists('.svn') or os.path.exists('PKG-INFO')
+        ):  version += '-r%s' % self.get_svn_revision()
+        if self.tag_date:
+            import time; version += time.strftime("-%Y%m%d")
+        return safe_version(version)
+
+    def get_svn_revision(self):
+        revision = 0
+        urlre = re.compile('url="([^"]+)"')
+        revre = re.compile('committed-rev="(\d+)"')
+        for base,dirs,files in os.walk(os.curdir):
+            if '.svn' not in dirs:
+                dirs[:] = []
+                continue    # no sense walking uncontrolled subdirs
+            dirs.remove('.svn')
+            f = open(os.path.join(base,'.svn','entries'))
+            data = f.read()
+            f.close()
+            dirurl = urlre.search(data).group(1)    # get repository URL
+            if base==os.curdir:
+                base_url = dirurl+'/'   # save the root url
+            elif not dirurl.startswith(base_url):
+                dirs[:] = []
+                continue    # not part of the same svn tree, skip it
+            for match in revre.finditer(data):
+                revision = max(revision, int(match.group(1)))
+        return str(revision or get_pkg_info_revision())
+
+    def find_sources(self):
+        """Generate SOURCES.txt manifest file"""
+        manifest_filename = os.path.join(self.egg_info,"SOURCES.txt")
+        mm = manifest_maker(self.distribution)
+        mm.manifest = manifest_filename
+        mm.run()
+        self.filelist = mm.filelist
+
+    def check_broken_egg_info(self):
+        bei = self.egg_name+'.egg-info'
+        if self.egg_base != os.curdir:
+            bei = os.path.join(self.egg_base, bei)
+        if os.path.exists(bei):
+            log.warn(
+                "-"*78+'\n'
+                "Note: Your current .egg-info directory has a '-' in its name;"
+                '\nthis will not work correctly with "setup.py develop".\n\n'
+                'Please rename %s to %s to correct this problem.\n'+'-'*78,
+                bei, self.egg_info
+            )
+            self.broken_egg_info = self.egg_info
+            self.egg_info = bei     # make it work for now
+
+class FileList(FileList):
+    """File list that accepts only existing, platform-independent paths"""
+
+    def append(self, item):
+        path = convert_path(item)
+        if os.path.exists(path):
+            self.files.append(path)
+
+
+
+
+
+
+
+
+
+
+
+class manifest_maker(sdist):
+
+    template = "MANIFEST.in"
+
+    def initialize_options (self):
+        self.use_defaults = 1
+        self.prune = 1
+        self.manifest_only = 1
+        self.force_manifest = 1
+
+    def finalize_options(self):
+        pass
+
+    def run(self):
+        self.filelist = FileList()
+        if not os.path.exists(self.manifest):
+            self.write_manifest()   # it must exist so it'll get in the list
+        self.filelist.findall()
+        self.add_defaults()
+        if os.path.exists(self.template):
+            self.read_template()
+        self.prune_file_list()
+        self.filelist.sort()
+        self.filelist.remove_duplicates()
+        self.write_manifest()
+
+    def write_manifest (self):
+        """Write the file list in 'self.filelist' (presumably as filled in
+        by 'add_defaults()' and 'read_template()') to the manifest file
+        named by 'self.manifest'.
+        """
+        files = self.filelist.files
+        if os.sep!='/':
+            files = [f.replace(os.sep,'/') for f in files]
+        self.execute(file_util.write_file, (self.manifest, files),
+                     "writing manifest file '%s'" % self.manifest)
+
+
+
+
+
+    def add_defaults(self):
+        sdist.add_defaults(self)
+        self.filelist.append(self.template)
+        self.filelist.append(self.manifest)
+        rcfiles = list(walk_revctrl())
+        if rcfiles:
+            self.filelist.extend(rcfiles)
+        elif os.path.exists(self.manifest):
+            self.read_manifest()
+        ei_cmd = self.get_finalized_command('egg_info')
+        self.filelist.include_pattern("*", prefix=ei_cmd.egg_info)
+
+    def prune_file_list (self):
+        build = self.get_finalized_command('build')
+        base_dir = self.distribution.get_fullname()
+        self.filelist.exclude_pattern(None, prefix=build.build_base)
+        self.filelist.exclude_pattern(None, prefix=base_dir)
+        sep = re.escape(os.sep)
+        self.filelist.exclude_pattern(sep+r'(RCS|CVS|\.svn)'+sep, is_regex=1)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def write_pkg_info(cmd, basename, filename):
+    log.info("writing %s", filename)
+    if not cmd.dry_run:
+        metadata = cmd.distribution.metadata
+        metadata.version, oldver = cmd.egg_version, metadata.version
+        metadata.name, oldname   = cmd.egg_name, metadata.name
+        try:
+            # write unescaped data to PKG-INFO, so older pkg_resources
+            # can still parse it
+            metadata.write_pkg_info(cmd.egg_info)
+        finally:
+            metadata.name, metadata.version = oldname, oldver
+
+        safe = getattr(cmd.distribution,'zip_safe',None)
+        import bdist_egg; bdist_egg.write_safety_flag(cmd.egg_info, safe)
+
+def warn_depends_obsolete(cmd, basename, filename):
+    if os.path.exists(filename):
+        log.warn(
+            "WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
+            "Use the install_requires/extras_require setup() args instead."
+        )
+
+
+def write_requirements(cmd, basename, filename):
+    dist = cmd.distribution
+    data = ['\n'.join(yield_lines(dist.install_requires or ()))]
+    for extra,reqs in (dist.extras_require or {}).items():
+        data.append('\n\n[%s]\n%s' % (extra, '\n'.join(yield_lines(reqs))))
+    cmd.write_or_delete_file("requirements", filename, ''.join(data))
+
+def write_toplevel_names(cmd, basename, filename):
+    pkgs = dict.fromkeys(
+        [k.split('.',1)[0]
+            for k in cmd.distribution.iter_distribution_names()
+        ]
+    )
+    cmd.write_file("top-level names", filename, '\n'.join(pkgs)+'\n')
+
+
+
+def overwrite_arg(cmd, basename, filename):
+    write_arg(cmd, basename, filename, True)
+
+def write_arg(cmd, basename, filename, force=False):
+    argname = os.path.splitext(basename)[0]
+    value = getattr(cmd.distribution, argname, None)
+    if value is not None:
+        value = '\n'.join(value)+'\n'
+    cmd.write_or_delete_file(argname, filename, value, force)
+
+def write_entries(cmd, basename, filename):
+    ep = cmd.distribution.entry_points
+
+    if isinstance(ep,basestring) or ep is None:
+        data = ep
+    elif ep is not None:
+        data = []
+        for section, contents in ep.items():
+            if not isinstance(contents,basestring):
+                contents = EntryPoint.parse_group(section, contents)
+                contents = '\n'.join(map(str,contents.values()))
+            data.append('[%s]\n%s\n\n' % (section,contents))
+        data = ''.join(data)
+
+    cmd.write_or_delete_file('entry points', filename, data, True)
+
+def get_pkg_info_revision():
+    # See if we can get a -r### off of PKG-INFO, in case this is an sdist of
+    # a subversion revision
+    #
+    if os.path.exists('PKG-INFO'):
+        f = open('PKG-INFO','rU')
+        for line in f:
+            match = re.match(r"Version:.*-r(\d+)\s*$", line)
+            if match:
+                return int(match.group(1))
+    return 0
diff --git a/Lib/setuptools/command/install.py b/Lib/setuptools/command/install.py
new file mode 100644
index 0000000..bfb9af5
--- /dev/null
+++ b/Lib/setuptools/command/install.py
@@ -0,0 +1,101 @@
+import setuptools, sys
+from distutils.command.install import install as _install
+from distutils.errors import DistutilsArgError
+
+class install(_install):
+    """Use easy_install to install the package, w/dependencies"""
+
+    user_options = _install.user_options + [
+        ('old-and-unmanageable', None, "Try not to use this!"),
+        ('single-version-externally-managed', None,
+            "used by system package builders to create 'flat' eggs"),
+    ]
+    boolean_options = _install.boolean_options + [
+        'old-and-unmanageable', 'single-version-externally-managed',
+    ]
+    new_commands = [
+        ('install_egg_info', lambda self: True),
+        ('install_scripts',  lambda self: True),
+    ]
+    _nc = dict(new_commands)
+    sub_commands = [
+        cmd for cmd in _install.sub_commands if cmd[0] not in _nc
+    ] + new_commands
+
+    def initialize_options(self):
+        _install.initialize_options(self)
+        self.old_and_unmanageable = None
+        self.single_version_externally_managed = None
+        self.no_compile = None  # make DISTUTILS_DEBUG work right!
+
+    def finalize_options(self):
+        _install.finalize_options(self)
+        if self.root:
+            self.single_version_externally_managed = True
+        elif self.single_version_externally_managed:
+            if not self.root and not self.record:
+                raise DistutilsArgError(
+                    "You must specify --record or --root when building system"
+                    " packages"
+                )
+
+    def handle_extra_path(self):
+        # We always ignore extra_path, because we install as .egg or .egg-info
+        self.path_file = None
+        self.extra_dirs = ''
+
+    def run(self):
+        # Explicit request for old-style install?  Just do it
+        if self.old_and_unmanageable or self.single_version_externally_managed:
+            return _install.run(self)
+
+        # Attempt to detect whether we were called from setup() or by another
+        # command.  If we were called by setup(), our caller will be the
+        # 'run_command' method in 'distutils.dist', and *its* caller will be
+        # the 'run_commands' method.  If we were called any other way, our
+        # immediate caller *might* be 'run_command', but it won't have been
+        # called by 'run_commands'.  This is slightly kludgy, but seems to
+        # work.
+        #
+        caller = sys._getframe(2)
+        caller_module = caller.f_globals.get('__name__','')
+        caller_name = caller.f_code.co_name
+
+        if caller_module != 'distutils.dist' or caller_name!='run_commands':
+            # We weren't called from the command line or setup(), so we
+            # should run in backward-compatibility mode to support bdist_*
+            # commands.
+            _install.run(self)
+        else:
+            self.do_egg_install()
+
+
+
+
+
+
+
+
+
+
+
+
+    def do_egg_install(self):
+
+        from setuptools.command.easy_install import easy_install
+
+        cmd = easy_install(
+            self.distribution, args="x", root=self.root, record=self.record,
+        )
+        cmd.ensure_finalized()  # finalize before bdist_egg munges install cmd
+
+        self.run_command('bdist_egg')
+        args = [self.distribution.get_command_obj('bdist_egg').egg_output]
+
+        if setuptools.bootstrap_install_from:
+            # Bootstrap self-installation of setuptools
+            args.insert(0, setuptools.bootstrap_install_from)
+
+        cmd.args = args
+        cmd.run()
+        setuptools.bootstrap_install_from = None
diff --git a/Lib/setuptools/command/install_egg_info.py b/Lib/setuptools/command/install_egg_info.py
new file mode 100755
index 0000000..193e91a
--- /dev/null
+++ b/Lib/setuptools/command/install_egg_info.py
@@ -0,0 +1,81 @@
+from setuptools import Command
+from setuptools.archive_util import unpack_archive
+from distutils import log, dir_util
+import os, shutil, pkg_resources
+
+class install_egg_info(Command):
+    """Install an .egg-info directory for the package"""
+
+    description = "Install an .egg-info directory for the package"
+
+    user_options = [
+        ('install-dir=', 'd', "directory to install to"),
+    ]
+
+    def initialize_options(self):
+        self.install_dir = None
+
+    def finalize_options(self):
+        self.set_undefined_options('install_lib',('install_dir','install_dir'))
+        ei_cmd = self.get_finalized_command("egg_info")
+        basename = pkg_resources.Distribution(
+            None, None, ei_cmd.egg_name, ei_cmd.egg_version
+        ).egg_name()+'.egg-info'
+        self.source = ei_cmd.egg_info
+        self.target = os.path.join(self.install_dir, basename)
+        self.outputs = [self.target]
+
+    def run(self):
+        self.run_command('egg_info')
+        target = self.target
+        if os.path.isdir(self.target) and not os.path.islink(self.target):
+            dir_util.remove_tree(self.target, dry_run=self.dry_run)
+        elif os.path.exists(self.target):
+            self.execute(os.unlink,(self.target,),"Removing "+self.target)
+        if not self.dry_run:
+            pkg_resources.ensure_directory(self.target)
+        self.execute(self.copytree, (),
+            "Copying %s to %s" % (self.source, self.target)
+        )
+        self.install_namespaces()
+
+    def get_outputs(self):
+        return self.outputs
+
+    def copytree(self):
+        # Copy the .egg-info tree to site-packages
+        def skimmer(src,dst):
+            # filter out source-control directories; note that 'src' is always
+            # a '/'-separated path, regardless of platform.  'dst' is a
+            # platform-specific path.
+            for skip in '.svn/','CVS/':
+                if src.startswith(skip) or '/'+skip in src:
+                    return None
+            self.outputs.append(dst)
+            log.debug("Copying %s to %s", src, dst)
+            return dst
+        unpack_archive(self.source, self.target, skimmer)
+
+    def install_namespaces(self):
+        nsp = (self.distribution.namespace_packages or [])[:]
+        if not nsp: return
+        nsp.sort()  # set up shorter names first
+        filename,ext = os.path.splitext(self.target)
+        filename += '-nspkg.pth'; self.outputs.append(filename)
+        log.info("Installing %s",filename)
+        if not self.dry_run:
+            f = open(filename,'wb')
+            for pkg in nsp:
+                pth = tuple(pkg.split('.'))
+                f.write(
+                    "import sys,new,os; "
+                    "p = os.path.join(sys._getframe(1).f_locals['sitedir'], "
+                        "*%(pth)r); "
+                    "ie = os.path.exists(os.path.join(p,'__init__.py')); "
+                    "m = not ie and "
+                        "sys.modules.setdefault(%(pkg)r,new.module(%(pkg)r)); "
+                    "mp = (m or []) and m.__dict__.setdefault('__path__',[]); "
+                    "(p not in mp) and mp.append(p)\n"
+                    % locals()
+                )
+            f.close()
diff --git a/Lib/setuptools/command/install_lib.py b/Lib/setuptools/command/install_lib.py
new file mode 100644
index 0000000..96c8dfe
--- /dev/null
+++ b/Lib/setuptools/command/install_lib.py
@@ -0,0 +1,76 @@
+from distutils.command.install_lib import install_lib as _install_lib
+import os
+
+class install_lib(_install_lib):
+    """Don't add compiled flags to filenames of non-Python files"""
+
+    def _bytecode_filenames (self, py_filenames):
+        bytecode_files = []
+        for py_file in py_filenames:
+            if not py_file.endswith('.py'):
+                continue
+            if self.compile:
+                bytecode_files.append(py_file + "c")
+            if self.optimize > 0:
+                bytecode_files.append(py_file + "o")
+
+        return bytecode_files
+
+    def run(self):
+        self.build()
+        outfiles = self.install()
+        if outfiles is not None:
+            # always compile, in case we have any extension stubs to deal with
+            self.byte_compile(outfiles)
+
+    def get_exclusions(self):
+        exclude = {}
+        nsp = self.distribution.namespace_packages
+
+        if (nsp and self.get_finalized_command('install')
+               .single_version_externally_managed
+        ):
+            for pkg in nsp:
+                parts = pkg.split('.')
+                while parts:
+                    pkgdir = os.path.join(self.install_dir, *parts)
+                    for f in '__init__.py', '__init__.pyc', '__init__.pyo':
+                        exclude[os.path.join(pkgdir,f)] = 1
+                    parts.pop()
+        return exclude
+
+    def copy_tree(
+        self, infile, outfile,
+        preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1
+    ):
+        assert preserve_mode and preserve_times and not preserve_symlinks
+        exclude = self.get_exclusions()
+
+        if not exclude:
+            return _install_lib.copy_tree(self, infile, outfile)
+
+        # Exclude namespace package __init__.py* files from the output
+
+        from setuptools.archive_util import unpack_directory
+        from distutils import log
+
+        outfiles = []
+
+        def pf(src, dst):
+            if dst in exclude:
+                log.warn("Skipping installation of %s (namespace package)",dst)
+                return False
+
+            log.info("copying %s -> %s", src, os.path.dirname(dst))
+            outfiles.append(dst)
+            return dst
+
+        unpack_directory(infile, outfile, pf)
+        return outfiles
+
+    def get_outputs(self):
+        outputs = _install_lib.get_outputs(self)
+        exclude = self.get_exclusions()
+        if exclude:
+            return [f for f in outputs if f not in exclude]
+        return outputs
diff --git a/Lib/setuptools/command/install_scripts.py b/Lib/setuptools/command/install_scripts.py
new file mode 100755
index 0000000..69558bf
--- /dev/null
+++ b/Lib/setuptools/command/install_scripts.py
@@ -0,0 +1,56 @@
+from distutils.command.install_scripts import install_scripts \
+     as _install_scripts
+from easy_install import get_script_args, sys_executable
+from pkg_resources import Distribution, PathMetadata, ensure_directory
+import os
+from distutils import log
+
+class install_scripts(_install_scripts):
+    """Do normal script install, plus any egg_info wrapper scripts"""
+
+    def initialize_options(self):
+        _install_scripts.initialize_options(self)
+        self.no_ep = False
+
+    def run(self):
+        self.run_command("egg_info")
+        if self.distribution.scripts:
+            _install_scripts.run(self)  # run first to set up self.outfiles
+        else:
+            self.outfiles = []
+        if self.no_ep:
+            # don't install entry point scripts into .egg file!
+            return
+
+        ei_cmd = self.get_finalized_command("egg_info")
+        dist = Distribution(
+            ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info),
+            ei_cmd.egg_name, ei_cmd.egg_version,
+        )
+        bs_cmd = self.get_finalized_command('build_scripts')
+        executable = getattr(bs_cmd,'executable',sys_executable)
+
+        for args in get_script_args(dist, executable):
+            self.write_script(*args)
+
+
+
+
+
+
+
+    def write_script(self, script_name, contents, mode="t", *ignored):
+        """Write an executable file to the scripts directory"""
+        log.info("Installing %s script to %s", script_name, self.install_dir)
+        target = os.path.join(self.install_dir, script_name)
+        self.outfiles.append(target)
+
+        if not self.dry_run:
+            ensure_directory(target)
+            f = open(target,"w"+mode)
+            f.write(contents)
+            f.close()
+            try:
+                os.chmod(target,0755)
+            except (AttributeError, os.error):
+                pass
diff --git a/Lib/setuptools/command/rotate.py b/Lib/setuptools/command/rotate.py
new file mode 100755
index 0000000..8aab312
--- /dev/null
+++ b/Lib/setuptools/command/rotate.py
@@ -0,0 +1,57 @@
+import distutils, os
+from setuptools import Command
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import *
+
+class rotate(Command):
+    """Delete older distributions"""
+
+    description = "delete older distributions, keeping N newest files"
+    user_options = [
+        ('match=',    'm', "patterns to match (required)"),
+        ('dist-dir=', 'd', "directory where the distributions are"),
+        ('keep=',     'k', "number of matching distributions to keep"),
+    ]
+
+    boolean_options = []
+
+    def initialize_options(self):
+        self.match = None
+        self.dist_dir = None
+        self.keep = None
+
+    def finalize_options(self):
+        if self.match is None:
+            raise DistutilsOptionError(
+                "Must specify one or more (comma-separated) match patterns "
+                "(e.g. '.zip' or '.egg')"
+            )
+        if self.keep is None:
+            raise DistutilsOptionError("Must specify number of files to keep")
+        try:
+            self.keep = int(self.keep)
+        except ValueError:
+            raise DistutilsOptionError("--keep must be an integer")
+        if isinstance(self.match, basestring):
+            self.match = [
+                convert_path(p.strip()) for p in self.match.split(',')
+            ]
+        self.set_undefined_options('bdist',('dist_dir', 'dist_dir'))
+
+    def run(self):
+        self.run_command("egg_info")
+        from glob import glob
+        for pattern in self.match:
+            pattern = self.distribution.get_name()+'*'+pattern
+            files = glob(os.path.join(self.dist_dir,pattern))
+            files = [(os.path.getmtime(f),f) for f in files]
+            files.sort()
+            files.reverse()
+
+            log.info("%d file(s) matching %s", len(files), pattern)
+            files = files[self.keep:]
+            for (t,f) in files:
+                log.info("Deleting %s", f)
+                if not self.dry_run:
+                    os.unlink(f)
diff --git a/Lib/setuptools/command/saveopts.py b/Lib/setuptools/command/saveopts.py
new file mode 100755
index 0000000..9c58d72
--- /dev/null
+++ b/Lib/setuptools/command/saveopts.py
@@ -0,0 +1,24 @@
+import distutils, os
+from setuptools import Command
+from setuptools.command.setopt import edit_config, option_base
+
+class saveopts(option_base):
+    """Save command-line options to a file"""
+
+    description = "save supplied options to setup.cfg or other config file"
+
+    def run(self):
+        dist = self.distribution
+        commands = dist.command_options.keys()
+        settings = {}
+
+        for cmd in commands:
+
+            if cmd=='saveopts':
+                continue    # don't save our own options!
+
+            for opt,(src,val) in dist.get_option_dict(cmd).items():
+                if src=="command line":
+                    settings.setdefault(cmd,{})[opt] = val
+
+        edit_config(self.filename, settings, self.dry_run)
diff --git a/Lib/setuptools/command/sdist.py b/Lib/setuptools/command/sdist.py
new file mode 100755
index 0000000..829cd3c
--- /dev/null
+++ b/Lib/setuptools/command/sdist.py
@@ -0,0 +1,163 @@
+from distutils.command.sdist import sdist as _sdist
+from distutils.util import convert_path
+import os, re, sys, pkg_resources
+
+entities = [
+    ("&lt;","<"), ("&gt;", ">"), ("&quot;", '"'), ("&apos;", "'"),
+    ("&amp;", "&")
+]
+
+def unescape(data):
+    for old,new in entities:
+        data = data.replace(old,new)
+    return data
+
+def re_finder(pattern, postproc=None):
+    def find(dirname, filename):
+        f = open(filename,'rU')
+        data = f.read()
+        f.close()
+        for match in pattern.finditer(data):
+            path = match.group(1)
+            if postproc:
+                path = postproc(path)
+            yield joinpath(dirname,path)
+    return find
+
+def joinpath(prefix,suffix):
+    if not prefix:
+        return suffix
+    return os.path.join(prefix,suffix)
+
+
+
+
+
+
+
+
+
+
+
+def walk_revctrl(dirname=''):
+    """Find all files under revision control"""
+    for ep in pkg_resources.iter_entry_points('setuptools.file_finders'):
+        for item in ep.load()(dirname):
+            yield item
+
+def _default_revctrl(dirname=''):
+    for path, finder in finders:
+        path = joinpath(dirname,path)
+        if os.path.isfile(path):
+            for path in finder(dirname,path):
+                if os.path.isfile(path):
+                    yield path
+                elif os.path.isdir(path):
+                    for item in _default_revctrl(path):
+                        yield item
+
+def externals_finder(dirname, filename):
+    """Find any 'svn:externals' directories"""
+    found = False
+    f = open(filename,'rb')
+    for line in iter(f.readline, ''):    # can't use direct iter!
+        parts = line.split()
+        if len(parts)==2:
+            kind,length = parts
+            data = f.read(int(length))
+            if kind=='K' and data=='svn:externals':
+                found = True
+            elif kind=='V' and found:
+                f.close()
+                break
+    else:
+        f.close()
+        return
+
+    for line in data.splitlines():
+        parts = line.split()
+        if parts:
+            yield joinpath(dirname, parts[0])
+
+
+finders = [
+    (convert_path('CVS/Entries'),
+        re_finder(re.compile(r"^\w?/([^/]+)/", re.M))),
+    (convert_path('.svn/entries'),
+        re_finder(
+            re.compile(r'name="([^"]+)"(?![^>]+deleted="true")', re.I),
+            unescape
+        )
+    ),
+    (convert_path('.svn/dir-props'), externals_finder),
+]
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class sdist(_sdist):
+    """Smart sdist that finds anything supported by revision control"""
+
+    user_options = [
+        ('formats=', None,
+         "formats for source distribution (comma-separated list)"),
+        ('keep-temp', 'k',
+         "keep the distribution tree around after creating " +
+         "archive file(s)"),
+        ('dist-dir=', 'd',
+         "directory to put the source distribution archive(s) in "
+         "[default: dist]"),
+        ]
+
+    negative_opt = {}
+
+    def run(self):
+        self.run_command('egg_info')
+        ei_cmd = self.get_finalized_command('egg_info')
+        self.filelist = ei_cmd.filelist
+        self.filelist.append(os.path.join(ei_cmd.egg_info,'SOURCES.txt'))
+
+        self.check_metadata()
+        self.make_distribution()
+
+        dist_files = getattr(self.distribution,'dist_files',[])
+        for file in self.archive_files:
+            data = ('sdist', '', file)
+            if data not in dist_files:
+                dist_files.append(data)
+
+    def read_template(self):
+        try:
+            _sdist.read_template(self)
+        except:
+            # grody hack to close the template file (MANIFEST.in)
+            # this prevents easy_install's attempt at deleting the file from
+            # dying and thus masking the real error
+            sys.exc_info()[2].tb_next.tb_frame.f_locals['template'].close()
+            raise
diff --git a/Lib/setuptools/command/setopt.py b/Lib/setuptools/command/setopt.py
new file mode 100755
index 0000000..e0c1058
--- /dev/null
+++ b/Lib/setuptools/command/setopt.py
@@ -0,0 +1,158 @@
+import distutils, os
+from setuptools import Command
+from distutils.util import convert_path
+from distutils import log
+from distutils.errors import *
+
+__all__ = ['config_file', 'edit_config', 'option_base', 'setopt']
+
+
+def config_file(kind="local"):
+    """Get the filename of the distutils, local, global, or per-user config
+
+    `kind` must be one of "local", "global", or "user"
+    """
+    if kind=='local':
+        return 'setup.cfg'
+    if kind=='global':
+        return os.path.join(
+            os.path.dirname(distutils.__file__),'distutils.cfg'
+        )
+    if kind=='user':
+        dot = os.name=='posix' and '.' or ''
+        return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot))
+    raise ValueError(
+        "config_file() type must be 'local', 'global', or 'user'", kind
+    )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def edit_config(filename, settings, dry_run=False):
+    """Edit a configuration file to include `settings`
+
+    `settings` is a dictionary of dictionaries or ``None`` values, keyed by
+    command/section name.  A ``None`` value means to delete the entire section,
+    while a dictionary lists settings to be changed or deleted in that section.
+    A setting of ``None`` means to delete that setting.
+    """
+    from ConfigParser import RawConfigParser
+    log.debug("Reading configuration from %s", filename)
+    opts = RawConfigParser()
+    opts.read([filename])
+    for section, options in settings.items():
+        if options is None:
+            log.info("Deleting section [%s] from %s", section, filename)
+            opts.remove_section(section)
+        else:
+            if not opts.has_section(section):
+                log.debug("Adding new section [%s] to %s", section, filename)
+                opts.add_section(section)
+            for option,value in options.items():
+                if value is None:
+                    log.debug("Deleting %s.%s from %s",
+                        section, option, filename
+                    )
+                    opts.remove_option(section,option)
+                    if not opts.options(section):
+                        log.info("Deleting empty [%s] section from %s",
+                                  section, filename)
+                        opts.remove_section(section)
+                else:
+                    log.debug(
+                        "Setting %s.%s to %r in %s",
+                        section, option, value, filename
+                    )
+                    opts.set(section,option,value)
+
+    log.info("Writing %s", filename)
+    if not dry_run:
+        f = open(filename,'w'); opts.write(f); f.close()
+
+class option_base(Command):
+    """Abstract base class for commands that mess with config files"""
+
+    user_options = [
+        ('global-config', 'g',
+                 "save options to the site-wide distutils.cfg file"),
+        ('user-config', 'u',
+                 "save options to the current user's pydistutils.cfg file"),
+        ('filename=', 'f',
+                 "configuration file to use (default=setup.cfg)"),
+    ]
+
+    boolean_options = [
+        'global-config', 'user-config',
+    ]
+
+    def initialize_options(self):
+        self.global_config = None
+        self.user_config   = None
+        self.filename = None
+
+    def finalize_options(self):
+        filenames = []
+        if self.global_config:
+            filenames.append(config_file('global'))
+        if self.user_config:
+            filenames.append(config_file('user'))
+        if self.filename is not None:
+            filenames.append(self.filename)
+        if not filenames:
+            filenames.append(config_file('local'))
+        if len(filenames)>1:
+            raise DistutilsOptionError(
+                "Must specify only one configuration file option",
+                filenames
+            )
+        self.filename, = filenames
+
+
+
+
+class setopt(option_base):
+    """Save command-line options to a file"""
+
+    description = "set an option in setup.cfg or another config file"
+
+    user_options = [
+        ('command=', 'c', 'command to set an option for'),
+        ('option=',  'o',  'option to set'),
+        ('set-value=',   's', 'value of the option'),
+        ('remove',   'r', 'remove (unset) the value'),
+    ] + option_base.user_options
+
+    boolean_options = option_base.boolean_options + ['remove']
+
+    def initialize_options(self):
+        option_base.initialize_options(self)
+        self.command = None
+        self.option = None
+        self.set_value = None
+        self.remove = None
+
+    def finalize_options(self):
+        option_base.finalize_options(self)
+        if self.command is None or self.option is None:
+            raise DistutilsOptionError("Must specify --command *and* --option")
+        if self.set_value is None and not self.remove:
+            raise DistutilsOptionError("Must specify --set-value or --remove")
+
+    def run(self):
+        edit_config(
+            self.filename, {
+                self.command: {self.option.replace('-','_'):self.set_value}
+            },
+            self.dry_run
+        )
diff --git a/Lib/setuptools/command/test.py b/Lib/setuptools/command/test.py
new file mode 100644
index 0000000..01fca35
--- /dev/null
+++ b/Lib/setuptools/command/test.py
@@ -0,0 +1,119 @@
+from setuptools import Command
+from distutils.errors import DistutilsOptionError
+import sys
+from pkg_resources import *
+from unittest import TestLoader, main
+
+class ScanningLoader(TestLoader):
+
+    def loadTestsFromModule(self, module):
+        """Return a suite of all tests cases contained in the given module
+
+        If the module is a package, load tests from all the modules in it.
+        If the module has an ``additional_tests`` function, call it and add
+        the return value to the tests.
+        """
+        tests = []
+        if module.__name__!='setuptools.tests.doctest':  # ugh
+            tests.append(TestLoader.loadTestsFromModule(self,module))
+
+        if hasattr(module, "additional_tests"):
+            tests.append(module.additional_tests())
+
+        if hasattr(module, '__path__'):
+            for file in resource_listdir(module.__name__, ''):
+                if file.endswith('.py') and file!='__init__.py':
+                    submodule = module.__name__+'.'+file[:-3]
+                else:
+                    if resource_exists(
+                        module.__name__, file+'/__init__.py'
+                    ):
+                        submodule = module.__name__+'.'+file
+                    else:
+                        continue
+                tests.append(self.loadTestsFromName(submodule))
+
+        if len(tests)!=1:
+            return self.suiteClass(tests)
+        else:
+            return tests[0] # don't create a nested suite for only one return
+
+
+class test(Command):
+
+    """Command to run unit tests after in-place build"""
+
+    description = "run unit tests after in-place build"
+
+    user_options = [
+        ('test-module=','m', "Run 'test_suite' in specified module"),
+        ('test-suite=','s',
+            "Test suite to run (e.g. 'some_module.test_suite')"),
+    ]
+
+    def initialize_options(self):
+        self.test_suite = None
+        self.test_module = None
+        self.test_loader = None
+
+
+    def finalize_options(self):
+
+        if self.test_suite is None:
+            if self.test_module is None:
+                self.test_suite = self.distribution.test_suite
+            else:
+                self.test_suite = self.test_module+".test_suite"
+        elif self.test_module:
+            raise DistutilsOptionError(
+                "You may specify a module or a suite, but not both"
+            )
+
+        self.test_args = [self.test_suite]
+
+        if self.verbose:
+            self.test_args.insert(0,'--verbose')
+        if self.test_loader is None:
+            self.test_loader = getattr(self.distribution,'test_loader',None)
+        if self.test_loader is None:
+            self.test_loader = "setuptools.command.test:ScanningLoader"
+
+
+
+    def run(self):
+        # Ensure metadata is up-to-date
+        self.run_command('egg_info')
+
+        # Build extensions in-place
+        self.reinitialize_command('build_ext', inplace=1)
+        self.run_command('build_ext')
+
+        if self.distribution.tests_require:
+            self.distribution.fetch_build_eggs(self.distribution.tests_require)
+
+        if self.test_suite:
+            cmd = ' '.join(self.test_args)
+            if self.dry_run:
+                self.announce('skipping "unittest %s" (dry run)' % cmd)
+            else:
+                self.announce('running "unittest %s"' % cmd)
+                self.run_tests()
+
+
+    def run_tests(self):
+        import unittest
+        old_path = sys.path[:]
+        ei_cmd = self.get_finalized_command("egg_info")
+        path_item = normalize_path(ei_cmd.egg_base)
+        metadata = PathMetadata(
+            path_item, normalize_path(ei_cmd.egg_info)
+        )
+        dist = Distribution(path_item, metadata, project_name=ei_cmd.egg_name)
+        working_set.add(dist)
+        require(str(dist.as_requirement()))
+        loader_ep = EntryPoint.parse("x="+self.test_loader)
+        loader_class = loader_ep.load(require=False)
+        unittest.main(
+            None, None, [unittest.__file__]+self.test_args,
+            testLoader = loader_class()
+        )
diff --git a/Lib/setuptools/command/upload.py b/Lib/setuptools/command/upload.py
new file mode 100755
index 0000000..644c400
--- /dev/null
+++ b/Lib/setuptools/command/upload.py
@@ -0,0 +1,178 @@
+"""distutils.command.upload
+
+Implements the Distutils 'upload' subcommand (upload package to PyPI)."""
+
+from distutils.errors import *
+from distutils.core import Command
+from distutils.spawn import spawn
+from distutils import log
+from md5 import md5
+import os
+import socket
+import platform
+import ConfigParser
+import httplib
+import base64
+import urlparse
+import cStringIO as StringIO
+
+class upload(Command):
+
+    description = "upload binary package to PyPI"
+
+    DEFAULT_REPOSITORY = 'http://www.python.org/pypi'
+
+    user_options = [
+        ('repository=', 'r',
+         "url of repository [default: %s]" % DEFAULT_REPOSITORY),
+        ('show-response', None,
+         'display full response text from server'),
+        ('sign', 's',
+         'sign files to upload using gpg'),
+        ('identity=', 'i', 'GPG identity used to sign files'),
+        ]
+    boolean_options = ['show-response', 'sign']
+
+    def initialize_options(self):
+        self.username = ''
+        self.password = ''
+        self.repository = ''
+        self.show_response = 0
+        self.sign = False
+        self.identity = None
+
+    def finalize_options(self):
+        if self.identity and not self.sign:
+            raise DistutilsOptionError(
+                "Must use --sign for --identity to have meaning"
+            )
+        if os.environ.has_key('HOME'):
+            rc = os.path.join(os.environ['HOME'], '.pypirc')
+            if os.path.exists(rc):
+                self.announce('Using PyPI login from %s' % rc)
+                config = ConfigParser.ConfigParser({
+                        'username':'',
+                        'password':'',
+                        'repository':''})
+                config.read(rc)
+                if not self.repository:
+                    self.repository = config.get('server-login', 'repository')
+                if not self.username:
+                    self.username = config.get('server-login', 'username')
+                if not self.password:
+                    self.password = config.get('server-login', 'password')
+        if not self.repository:
+            self.repository = self.DEFAULT_REPOSITORY
+
+    def run(self):
+        if not self.distribution.dist_files:
+            raise DistutilsOptionError("No dist file created in earlier command")
+        for command, pyversion, filename in self.distribution.dist_files:
+            self.upload_file(command, pyversion, filename)
+
+    def upload_file(self, command, pyversion, filename):
+        # Sign if requested
+        if self.sign:
+            gpg_args = ["gpg", "--detach-sign", "-a", filename]
+            if self.identity:
+                gpg_args[2:2] = ["--local-user", self.identity]
+            spawn(gpg_args,
+                  dry_run=self.dry_run)
+
+        # Fill in the data
+        content = open(filename,'rb').read()
+        basename = os.path.basename(filename)
+        comment = ''
+        if command=='bdist_egg' and self.distribution.has_ext_modules():
+            comment = "built on %s" % platform.platform(terse=1)
+        data = {
+            ':action':'file_upload',
+            'protcol_version':'1',
+            'name':self.distribution.get_name(),
+            'version':self.distribution.get_version(),
+            'content':(basename,content),
+            'filetype':command,
+            'pyversion':pyversion,
+            'md5_digest':md5(content).hexdigest(),
+            }
+        if command == 'bdist_rpm':
+            dist, version, id = platform.dist()
+            if dist:
+                comment = 'built for %s %s' % (dist, version)
+        elif command == 'bdist_dumb':
+            comment = 'built for %s' % platform.platform(terse=1)
+        data['comment'] = comment
+
+        if self.sign:
+            data['gpg_signature'] = (os.path.basename(filename) + ".asc",
+                                     open(filename+".asc").read())
+
+        # set up the authentication
+        auth = "Basic " + base64.encodestring(self.username + ":" + self.password).strip()
+
+        # Build up the MIME payload for the POST data
+        boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
+        sep_boundary = '\n--' + boundary
+        end_boundary = sep_boundary + '--'
+        body = StringIO.StringIO()
+        for key, value in data.items():
+            # handle multiple entries for the same name
+            if type(value) != type([]):
+                value = [value]
+            for value in value:
+                if type(value) is tuple:
+                    fn = ';filename="%s"' % value[0]
+                    value = value[1]
+                else:
+                    fn = ""
+                value = str(value)
+                body.write(sep_boundary)
+                body.write('\nContent-Disposition: form-data; name="%s"'%key)
+                body.write(fn)
+                body.write("\n\n")
+                body.write(value)
+                if value and value[-1] == '\r':
+                    body.write('\n')  # write an extra newline (lurve Macs)
+        body.write(end_boundary)
+        body.write("\n")
+        body = body.getvalue()
+
+        self.announce("Submitting %s to %s" % (filename, self.repository), log.INFO)
+
+        # build the Request
+        # We can't use urllib2 since we need to send the Basic
+        # auth right with the first request
+        schema, netloc, url, params, query, fragments = \
+            urlparse.urlparse(self.repository)
+        assert not params and not query and not fragments
+        if schema == 'http':
+            http = httplib.HTTPConnection(netloc)
+        elif schema == 'https':
+            http = httplib.HTTPSConnection(netloc)
+        else:
+            raise AssertionError, "unsupported schema "+schema
+
+        data = ''
+        loglevel = log.INFO
+        try:
+            http.connect()
+            http.putrequest("POST", url)
+            http.putheader('Content-type',
+                           'multipart/form-data; boundary=%s'%boundary)
+            http.putheader('Content-length', str(len(body)))
+            http.putheader('Authorization', auth)
+            http.endheaders()
+            http.send(body)
+        except socket.error, e:
+            self.announce(e.msg, log.ERROR)
+            return
+
+        r = http.getresponse()
+        if r.status == 200:
+            self.announce('Server response (%s): %s' % (r.status, r.reason),
+                          log.INFO)
+        else:
+            self.announce('Upload failed (%s): %s' % (r.status, r.reason),
+                          log.ERROR)
+        if self.show_response:
+            print '-'*75, r.read(), '-'*75
diff --git a/Lib/setuptools/depends.py b/Lib/setuptools/depends.py
new file mode 100644
index 0000000..68d8194
--- /dev/null
+++ b/Lib/setuptools/depends.py
@@ -0,0 +1,239 @@
+from __future__ import generators
+import sys, imp, marshal
+from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN
+from distutils.version import StrictVersion, LooseVersion
+
+__all__ = [
+    'Require', 'find_module', 'get_module_constant', 'extract_constant'
+]
+
+class Require:
+    """A prerequisite to building or installing a distribution"""
+
+    def __init__(self,name,requested_version,module,homepage='',
+        attribute=None,format=None
+    ):
+
+        if format is None and requested_version is not None:
+            format = StrictVersion
+
+        if format is not None:
+            requested_version = format(requested_version)
+            if attribute is None:
+                attribute = '__version__'
+
+        self.__dict__.update(locals())
+        del self.self
+
+
+    def full_name(self):
+        """Return full package/distribution name, w/version"""
+        if self.requested_version is not None:
+            return '%s-%s' % (self.name,self.requested_version)
+        return self.name
+
+
+    def version_ok(self,version):
+        """Is 'version' sufficiently up-to-date?"""
+        return self.attribute is None or self.format is None or \
+            str(version)<>"unknown" and version >= self.requested_version
+
+
+    def get_version(self, paths=None, default="unknown"):
+
+        """Get version number of installed module, 'None', or 'default'
+
+        Search 'paths' for module.  If not found, return 'None'.  If found,
+        return the extracted version attribute, or 'default' if no version
+        attribute was specified, or the value cannot be determined without
+        importing the module.  The version is formatted according to the
+        requirement's version format (if any), unless it is 'None' or the
+        supplied 'default'.
+        """
+
+        if self.attribute is None:
+            try:
+                f,p,i = find_module(self.module,paths)
+                if f: f.close()
+                return default
+            except ImportError:
+                return None
+
+        v = get_module_constant(self.module,self.attribute,default,paths)
+
+        if v is not None and v is not default and self.format is not None:
+            return self.format(v)
+
+        return v
+
+
+    def is_present(self,paths=None):
+        """Return true if dependency is present on 'paths'"""
+        return self.get_version(paths) is not None
+
+
+    def is_current(self,paths=None):
+        """Return true if dependency is present and up-to-date on 'paths'"""
+        version = self.get_version(paths)
+        if version is None:
+            return False
+        return self.version_ok(version)
+
+
+def _iter_code(code):
+
+    """Yield '(op,arg)' pair for each operation in code object 'code'"""
+
+    from array import array
+    from dis import HAVE_ARGUMENT, EXTENDED_ARG
+
+    bytes = array('b',code.co_code)
+    eof = len(code.co_code)
+
+    ptr = 0
+    extended_arg = 0
+
+    while ptr<eof:
+
+        op = bytes[ptr]
+
+        if op>=HAVE_ARGUMENT:
+
+            arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg
+            ptr += 3
+
+            if op==EXTENDED_ARG:
+                extended_arg = arg * 65536L
+                continue
+
+        else:
+            arg = None
+            ptr += 1
+
+        yield op,arg
+
+
+
+
+
+
+
+
+
+
+def find_module(module, paths=None):
+    """Just like 'imp.find_module()', but with package support"""
+
+    parts = module.split('.')
+
+    while parts:
+        part = parts.pop(0)
+        f, path, (suffix,mode,kind) = info = imp.find_module(part, paths)
+
+        if kind==PKG_DIRECTORY:
+            parts = parts or ['__init__']
+            paths = [path]
+
+        elif parts:
+            raise ImportError("Can't find %r in %s" % (parts,module))
+
+    return info
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+def get_module_constant(module, symbol, default=-1, paths=None):
+
+    """Find 'module' by searching 'paths', and extract 'symbol'
+
+    Return 'None' if 'module' does not exist on 'paths', or it does not define
+    'symbol'.  If the module defines 'symbol' as a constant, return the
+    constant.  Otherwise, return 'default'."""
+
+    try:
+        f, path, (suffix,mode,kind) = find_module(module,paths)
+    except ImportError:
+        # Module doesn't exist
+        return None
+
+    try:
+        if kind==PY_COMPILED:
+            f.read(8)   # skip magic & date
+            code = marshal.load(f)
+        elif kind==PY_FROZEN:
+            code = imp.get_frozen_object(module)
+        elif kind==PY_SOURCE:
+            code = compile(f.read(), path, 'exec')
+        else:
+            # Not something we can parse; we'll have to import it.  :(
+            if module not in sys.modules:
+                imp.load_module(module,f,path,(suffix,mode,kind))
+            return getattr(sys.modules[module],symbol,None)
+
+    finally:
+        if f:
+            f.close()
+
+    return extract_constant(code,symbol,default)
+
+
+
+
+
+
+
+
+def extract_constant(code,symbol,default=-1):
+
+    """Extract the constant value of 'symbol' from 'code'
+
+    If the name 'symbol' is bound to a constant value by the Python code
+    object 'code', return that value.  If 'symbol' is bound to an expression,
+    return 'default'.  Otherwise, return 'None'.
+
+    Return value is based on the first assignment to 'symbol'.  'symbol' must
+    be a global, or at least a non-"fast" local in the code block.  That is,
+    only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
+    must be present in 'code.co_names'.
+    """
+
+    if symbol not in code.co_names:
+        # name's not there, can't possibly be an assigment
+        return None
+
+    name_idx = list(code.co_names).index(symbol)
+
+    STORE_NAME = 90
+    STORE_GLOBAL = 97
+    LOAD_CONST = 100
+
+    const = default
+
+    for op, arg in _iter_code(code):
+
+        if op==LOAD_CONST:
+            const = code.co_consts[arg]
+        elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL):
+            return const
+        else:
+            const = default
diff --git a/Lib/setuptools/dist.py b/Lib/setuptools/dist.py
new file mode 100644
index 0000000..f0417c1
--- /dev/null
+++ b/Lib/setuptools/dist.py
@@ -0,0 +1,798 @@
+__all__ = ['Distribution']
+
+from distutils.core import Distribution as _Distribution
+from setuptools.depends import Require
+from setuptools.command.install import install
+from setuptools.command.sdist import sdist
+from setuptools.command.install_lib import install_lib
+from distutils.errors import DistutilsOptionError, DistutilsPlatformError
+from distutils.errors import DistutilsSetupError
+import setuptools, pkg_resources, distutils.core, distutils.dist, distutils.cmd
+import os
+
+def _get_unpatched(cls):
+    """Protect against re-patching the distutils if reloaded
+
+    Also ensures that no other distutils extension monkeypatched the distutils
+    first.
+    """
+    while cls.__module__.startswith('setuptools'):
+        cls, = cls.__bases__
+    if not cls.__module__.startswith('distutils'):
+        raise AssertionError(
+            "distutils has already been patched by %r" % cls
+        )
+    return cls
+
+_Distribution = _get_unpatched(_Distribution)
+
+sequence = tuple, list
+
+def check_importable(dist, attr, value):
+    try:
+        ep = pkg_resources.EntryPoint.parse('x='+value)
+        assert not ep.extras
+    except (TypeError,ValueError,AttributeError,AssertionError):
+        raise DistutilsSetupError(
+            "%r must be importable 'module:attrs' string (got %r)"
+            % (attr,value)
+        )
+
+
+def assert_string_list(dist, attr, value):
+    """Verify that value is a string list or None"""
+    try:
+        assert ''.join(value)!=value
+    except (TypeError,ValueError,AttributeError,AssertionError):
+        raise DistutilsSetupError(
+            "%r must be a list of strings (got %r)" % (attr,value)
+        )
+
+def check_nsp(dist, attr, value):
+    """Verify that namespace packages are valid"""
+    assert_string_list(dist,attr,value)
+
+    for nsp in value:
+        if not dist.has_contents_for(nsp):
+            raise DistutilsSetupError(
+                "Distribution contains no modules or packages for " +
+                "namespace package %r" % nsp
+            )
+
+def check_extras(dist, attr, value):
+    """Verify that extras_require mapping is valid"""
+    try:
+        for k,v in value.items():
+            list(pkg_resources.parse_requirements(v))
+    except (TypeError,ValueError,AttributeError):
+        raise DistutilsSetupError(
+            "'extras_require' must be a dictionary whose values are "
+            "strings or lists of strings containing valid project/version "
+            "requirement specifiers."
+        )
+
+def assert_bool(dist, attr, value):
+    """Verify that value is True, False, 0, or 1"""
+    if bool(value) != value:
+        raise DistutilsSetupError(
+            "%r must be a boolean value (got %r)" % (attr,value)
+        )
+
+
+
+def check_requirements(dist, attr, value):
+    """Verify that install_requires is a valid requirements list"""
+    try:
+        list(pkg_resources.parse_requirements(value))
+    except (TypeError,ValueError):
+        raise DistutilsSetupError(
+            "%r must be a string or list of strings "
+            "containing valid project/version requirement specifiers" % (attr,)
+        )
+
+def check_entry_points(dist, attr, value):
+    """Verify that entry_points map is parseable"""
+    try:
+        pkg_resources.EntryPoint.parse_map(value)
+    except ValueError, e:
+        raise DistutilsSetupError(e)
+
+
+def check_test_suite(dist, attr, value):
+    if not isinstance(value,basestring):
+        raise DistutilsSetupError("test_suite must be a string")
+
+
+def check_package_data(dist, attr, value):
+    """Verify that value is a dictionary of package names to glob lists"""
+    if isinstance(value,dict):
+        for k,v in value.items():
+            if not isinstance(k,str): break
+            try: iter(v)
+            except TypeError:
+                break
+        else:
+            return
+    raise DistutilsSetupError(
+        attr+" must be a dictionary mapping package names to lists of "
+        "wildcard patterns"
+    )
+
+
+
+
+class Distribution(_Distribution):
+    """Distribution with support for features, tests, and package data
+
+    This is an enhanced version of 'distutils.dist.Distribution' that
+    effectively adds the following new optional keyword arguments to 'setup()':
+
+     'install_requires' -- a string or sequence of strings specifying project
+        versions that the distribution requires when installed, in the format
+        used by 'pkg_resources.require()'.  They will be installed
+        automatically when the package is installed.  If you wish to use
+        packages that are not available in PyPI, or want to give your users an
+        alternate download location, you can add a 'find_links' option to the
+        '[easy_install]' section of your project's 'setup.cfg' file, and then
+        setuptools will scan the listed web pages for links that satisfy the
+        requirements.
+
+     'extras_require' -- a dictionary mapping names of optional "extras" to the
+        additional requirement(s) that using those extras incurs. For example,
+        this::
+
+            extras_require = dict(reST = ["docutils>=0.3", "reSTedit"])
+
+        indicates that the distribution can optionally provide an extra
+        capability called "reST", but it can only be used if docutils and
+        reSTedit are installed.  If the user installs your package using
+        EasyInstall and requests one of your extras, the corresponding
+        additional requirements will be installed if needed.
+
+     'features' -- a dictionary mapping option names to 'setuptools.Feature'
+        objects.  Features are a portion of the distribution that can be
+        included or excluded based on user options, inter-feature dependencies,
+        and availability on the current system.  Excluded features are omitted
+        from all setup commands, including source and binary distributions, so
+        you can create multiple distributions from the same source tree.
+        Feature names should be valid Python identifiers, except that they may
+        contain the '-' (minus) sign.  Features can be included or excluded
+        via the command line options '--with-X' and '--without-X', where 'X' is
+        the name of the feature.  Whether a feature is included by default, and
+        whether you are allowed to control this from the command line, is
+        determined by the Feature object.  See the 'Feature' class for more
+        information.
+
+     'test_suite' -- the name of a test suite to run for the 'test' command.
+        If the user runs 'python setup.py test', the package will be installed,
+        and the named test suite will be run.  The format is the same as
+        would be used on a 'unittest.py' command line.  That is, it is the
+        dotted name of an object to import and call to generate a test suite.
+
+     'package_data' -- a dictionary mapping package names to lists of filenames
+        or globs to use to find data files contained in the named packages.
+        If the dictionary has filenames or globs listed under '""' (the empty
+        string), those names will be searched for in every package, in addition
+        to any names for the specific package.  Data files found using these
+        names/globs will be installed along with the package, in the same
+        location as the package.  Note that globs are allowed to reference
+        the contents of non-package subdirectories, as long as you use '/' as
+        a path separator.  (Globs are automatically converted to
+        platform-specific paths at runtime.)
+
+    In addition to these new keywords, this class also has several new methods
+    for manipulating the distribution's contents.  For example, the 'include()'
+    and 'exclude()' methods can be thought of as in-place add and subtract
+    commands that add or remove packages, modules, extensions, and so on from
+    the distribution.  They are used by the feature subsystem to configure the
+    distribution for the included and excluded features.
+    """
+
+    _patched_dist = None
+
+    def patch_missing_pkg_info(self, attrs):
+        # Fake up a replacement for the data that would normally come from
+        # PKG-INFO, but which might not yet be built if this is a fresh
+        # checkout.
+        #
+        if not attrs or 'name' not in attrs or 'version' not in attrs:
+            return
+        key = pkg_resources.safe_name(str(attrs['name'])).lower()
+        dist = pkg_resources.working_set.by_key.get(key)
+        if dist is not None and not dist.has_metadata('PKG-INFO'):
+            dist._version = pkg_resources.safe_version(str(attrs['version']))
+            self._patched_dist = dist
+
+    def __init__ (self, attrs=None):
+        have_package_data = hasattr(self, "package_data")
+        if not have_package_data:
+            self.package_data = {}
+        self.require_features = []
+        self.features = {}
+        self.dist_files = []
+        self.patch_missing_pkg_info(attrs)
+        # Make sure we have any eggs needed to interpret 'attrs'
+        if attrs and 'dependency_links' in attrs:
+            self.dependency_links = attrs.pop('dependency_links')
+            assert_string_list(self,'dependency_links',self.dependency_links)
+        if attrs and 'setup_requires' in attrs:
+            self.fetch_build_eggs(attrs.pop('setup_requires'))
+        for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+            if not hasattr(self,ep.name):
+                setattr(self,ep.name,None)
+        _Distribution.__init__(self,attrs)
+        if isinstance(self.metadata.version, (int,long,float)):
+            # Some people apparently take "version number" too literally :)
+            self.metadata.version = str(self.metadata.version)
+
+    def parse_command_line(self):
+        """Process features after parsing command line options"""
+        result = _Distribution.parse_command_line(self)
+        if self.features:
+            self._finalize_features()
+        return result
+
+    def _feature_attrname(self,name):
+        """Convert feature name to corresponding option attribute name"""
+        return 'with_'+name.replace('-','_')
+
+    def fetch_build_eggs(self, requires):
+        """Resolve pre-setup requirements"""
+        from pkg_resources import working_set, parse_requirements
+        for dist in working_set.resolve(
+            parse_requirements(requires), installer=self.fetch_build_egg
+        ):
+            working_set.add(dist)
+
+    def finalize_options(self):
+        _Distribution.finalize_options(self)
+        if self.features:
+            self._set_global_opts_from_features()
+
+        for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'):
+            value = getattr(self,ep.name,None)
+            if value is not None:
+                ep.require(installer=self.fetch_build_egg)
+                ep.load()(self, ep.name, value)
+
+    def fetch_build_egg(self, req):
+        """Fetch an egg needed for building"""
+        try:
+            cmd = self._egg_fetcher
+        except AttributeError:
+            from setuptools.command.easy_install import easy_install
+            dist = self.__class__({'script_args':['easy_install']})
+            dist.parse_config_files()
+            opts = dist.get_option_dict('easy_install')
+            keep = (
+                'find_links', 'site_dirs', 'index_url', 'optimize',
+                'site_dirs', 'allow_hosts'
+            )
+            for key in opts.keys():
+                if key not in keep:
+                    del opts[key]   # don't use any other settings
+            if self.dependency_links:
+                links = self.dependency_links[:]
+                if 'find_links' in opts:
+                    links = opts['find_links'][1].split() + links
+                opts['find_links'] = ('setup', links)
+            cmd = easy_install(
+                dist, args=["x"], install_dir=os.curdir, exclude_scripts=True,
+                always_copy=False, build_directory=None, editable=False,
+                upgrade=False, multi_version=True, no_report = True
+            )
+            cmd.ensure_finalized()
+            self._egg_fetcher = cmd
+        return cmd.easy_install(req)
+
+    def _set_global_opts_from_features(self):
+        """Add --with-X/--without-X options based on optional features"""
+
+        go = []
+        no = self.negative_opt.copy()
+
+        for name,feature in self.features.items():
+            self._set_feature(name,None)
+            feature.validate(self)
+
+            if feature.optional:
+                descr = feature.description
+                incdef = ' (default)'
+                excdef=''
+                if not feature.include_by_default():
+                    excdef, incdef = incdef, excdef
+
+                go.append(('with-'+name, None, 'include '+descr+incdef))
+                go.append(('without-'+name, None, 'exclude '+descr+excdef))
+                no['without-'+name] = 'with-'+name
+
+        self.global_options = self.feature_options = go + self.global_options
+        self.negative_opt = self.feature_negopt = no
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def _finalize_features(self):
+        """Add/remove features and resolve dependencies between them"""
+
+        # First, flag all the enabled items (and thus their dependencies)
+        for name,feature in self.features.items():
+            enabled = self.feature_is_included(name)
+            if enabled or (enabled is None and feature.include_by_default()):
+                feature.include_in(self)
+                self._set_feature(name,1)
+
+        # Then disable the rest, so that off-by-default features don't
+        # get flagged as errors when they're required by an enabled feature
+        for name,feature in self.features.items():
+            if not self.feature_is_included(name):
+                feature.exclude_from(self)
+                self._set_feature(name,0)
+
+
+    def get_command_class(self, command):
+        """Pluggable version of get_command_class()"""
+        if command in self.cmdclass:
+            return self.cmdclass[command]
+
+        for ep in pkg_resources.iter_entry_points('distutils.commands',command):
+            ep.require(installer=self.fetch_build_egg)
+            self.cmdclass[command] = cmdclass = ep.load()
+            return cmdclass
+        else:
+            return _Distribution.get_command_class(self, command)
+
+    def print_commands(self):
+        for ep in pkg_resources.iter_entry_points('distutils.commands'):
+            if ep.name not in self.cmdclass:
+                cmdclass = ep.load(False) # don't require extras, we're not running
+                self.cmdclass[ep.name] = cmdclass
+        return _Distribution.print_commands(self)
+
+
+
+
+
+    def _set_feature(self,name,status):
+        """Set feature's inclusion status"""
+        setattr(self,self._feature_attrname(name),status)
+
+    def feature_is_included(self,name):
+        """Return 1 if feature is included, 0 if excluded, 'None' if unknown"""
+        return getattr(self,self._feature_attrname(name))
+
+    def include_feature(self,name):
+        """Request inclusion of feature named 'name'"""
+
+        if self.feature_is_included(name)==0:
+            descr = self.features[name].description
+            raise DistutilsOptionError(
+               descr + " is required, but was excluded or is not available"
+           )
+        self.features[name].include_in(self)
+        self._set_feature(name,1)
+
+    def include(self,**attrs):
+        """Add items to distribution that are named in keyword arguments
+
+        For example, 'dist.exclude(py_modules=["x"])' would add 'x' to
+        the distribution's 'py_modules' attribute, if it was not already
+        there.
+
+        Currently, this method only supports inclusion for attributes that are
+        lists or tuples.  If you need to add support for adding to other
+        attributes in this or a subclass, you can add an '_include_X' method,
+        where 'X' is the name of the attribute.  The method will be called with
+        the value passed to 'include()'.  So, 'dist.include(foo={"bar":"baz"})'
+        will try to call 'dist._include_foo({"bar":"baz"})', which can then
+        handle whatever special inclusion logic is needed.
+        """
+        for k,v in attrs.items():
+            include = getattr(self, '_include_'+k, None)
+            if include:
+                include(v)
+            else:
+                self._include_misc(k,v)
+
+    def exclude_package(self,package):
+        """Remove packages, modules, and extensions in named package"""
+
+        pfx = package+'.'
+        if self.packages:
+            self.packages = [
+                p for p in self.packages
+                    if p<>package and not p.startswith(pfx)
+            ]
+
+        if self.py_modules:
+            self.py_modules = [
+                p for p in self.py_modules
+                    if p<>package and not p.startswith(pfx)
+            ]
+
+        if self.ext_modules:
+            self.ext_modules = [
+                p for p in self.ext_modules
+                    if p.name<>package and not p.name.startswith(pfx)
+            ]
+
+
+    def has_contents_for(self,package):
+        """Return true if 'exclude_package(package)' would do something"""
+
+        pfx = package+'.'
+
+        for p in self.iter_distribution_names():
+            if p==package or p.startswith(pfx):
+                return True
+
+
+
+
+
+
+
+
+
+
+    def _exclude_misc(self,name,value):
+        """Handle 'exclude()' for list/tuple attrs without a special handler"""
+        if not isinstance(value,sequence):
+            raise DistutilsSetupError(
+                "%s: setting must be a list or tuple (%r)" % (name, value)
+            )
+        try:
+            old = getattr(self,name)
+        except AttributeError:
+            raise DistutilsSetupError(
+                "%s: No such distribution setting" % name
+            )
+        if old is not None and not isinstance(old,sequence):
+            raise DistutilsSetupError(
+                name+": this setting cannot be changed via include/exclude"
+            )
+        elif old:
+            setattr(self,name,[item for item in old if item not in value])
+
+    def _include_misc(self,name,value):
+        """Handle 'include()' for list/tuple attrs without a special handler"""
+
+        if not isinstance(value,sequence):
+            raise DistutilsSetupError(
+                "%s: setting must be a list (%r)" % (name, value)
+            )
+        try:
+            old = getattr(self,name)
+        except AttributeError:
+            raise DistutilsSetupError(
+                "%s: No such distribution setting" % name
+            )
+        if old is None:
+            setattr(self,name,value)
+        elif not isinstance(old,sequence):
+            raise DistutilsSetupError(
+                name+": this setting cannot be changed via include/exclude"
+            )
+        else:
+            setattr(self,name,old+[item for item in value if item not in old])
+
+    def exclude(self,**attrs):
+        """Remove items from distribution that are named in keyword arguments
+
+        For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from
+        the distribution's 'py_modules' attribute.  Excluding packages uses
+        the 'exclude_package()' method, so all of the package's contained
+        packages, modules, and extensions are also excluded.
+
+        Currently, this method only supports exclusion from attributes that are
+        lists or tuples.  If you need to add support for excluding from other
+        attributes in this or a subclass, you can add an '_exclude_X' method,
+        where 'X' is the name of the attribute.  The method will be called with
+        the value passed to 'exclude()'.  So, 'dist.exclude(foo={"bar":"baz"})'
+        will try to call 'dist._exclude_foo({"bar":"baz"})', which can then
+        handle whatever special exclusion logic is needed.
+        """
+        for k,v in attrs.items():
+            exclude = getattr(self, '_exclude_'+k, None)
+            if exclude:
+                exclude(v)
+            else:
+                self._exclude_misc(k,v)
+
+    def _exclude_packages(self,packages):
+        if not isinstance(packages,sequence):
+            raise DistutilsSetupError(
+                "packages: setting must be a list or tuple (%r)" % (packages,)
+            )
+        map(self.exclude_package, packages)
+
+
+
+
+
+
+
+
+
+
+
+
+    def _parse_command_opts(self, parser, args):
+        # Remove --with-X/--without-X options when processing command args
+        self.global_options = self.__class__.global_options
+        self.negative_opt = self.__class__.negative_opt
+
+        # First, expand any aliases
+        command = args[0]
+        aliases = self.get_option_dict('aliases')
+        while command in aliases:
+            src,alias = aliases[command]
+            del aliases[command]    # ensure each alias can expand only once!
+            import shlex
+            args[:1] = shlex.split(alias,True)
+            command = args[0]
+
+        nargs = _Distribution._parse_command_opts(self, parser, args)
+
+        # Handle commands that want to consume all remaining arguments
+        cmd_class = self.get_command_class(command)
+        if getattr(cmd_class,'command_consumes_arguments',None):
+            self.get_option_dict(command)['args'] = ("command line", nargs)
+            if nargs is not None:
+                return []
+
+        return nargs
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def get_cmdline_options(self):
+        """Return a '{cmd: {opt:val}}' map of all command-line options
+
+        Option names are all long, but do not include the leading '--', and
+        contain dashes rather than underscores.  If the option doesn't take
+        an argument (e.g. '--quiet'), the 'val' is 'None'.
+
+        Note that options provided by config files are intentionally excluded.
+        """
+
+        d = {}
+
+        for cmd,opts in self.command_options.items():
+
+            for opt,(src,val) in opts.items():
+
+                if src != "command line":
+                    continue
+
+                opt = opt.replace('_','-')
+
+                if val==0:
+                    cmdobj = self.get_command_obj(cmd)
+                    neg_opt = self.negative_opt.copy()
+                    neg_opt.update(getattr(cmdobj,'negative_opt',{}))
+                    for neg,pos in neg_opt.items():
+                        if pos==opt:
+                            opt=neg
+                            val=None
+                            break
+                    else:
+                        raise AssertionError("Shouldn't be able to get here")
+
+                elif val==1:
+                    val = None
+
+                d.setdefault(cmd,{})[opt] = val
+
+        return d
+
+
+    def iter_distribution_names(self):
+        """Yield all packages, modules, and extension names in distribution"""
+
+        for pkg in self.packages or ():
+            yield pkg
+
+        for module in self.py_modules or ():
+            yield module
+
+        for ext in self.ext_modules or ():
+            if isinstance(ext,tuple):
+                name,buildinfo = ext
+                yield name
+            else:
+                yield ext.name
+
+# Install it throughout the distutils
+for module in distutils.dist, distutils.core, distutils.cmd:
+    module.Distribution = Distribution
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class Feature:
+    """A subset of the distribution that can be excluded if unneeded/wanted
+
+    Features are created using these keyword arguments:
+
+      'description' -- a short, human readable description of the feature, to
+         be used in error messages, and option help messages.
+
+      'standard' -- if true, the feature is included by default if it is
+         available on the current system.  Otherwise, the feature is only
+         included if requested via a command line '--with-X' option, or if
+         another included feature requires it.  The default setting is 'False'.
+
+      'available' -- if true, the feature is available for installation on the
+         current system.  The default setting is 'True'.
+
+      'optional' -- if true, the feature's inclusion can be controlled from the
+         command line, using the '--with-X' or '--without-X' options.  If
+         false, the feature's inclusion status is determined automatically,
+         based on 'availabile', 'standard', and whether any other feature
+         requires it.  The default setting is 'True'.
+
+      'require_features' -- a string or sequence of strings naming features
+         that should also be included if this feature is included.  Defaults to
+         empty list.  May also contain 'Require' objects that should be
+         added/removed from the distribution.
+
+      'remove' -- a string or list of strings naming packages to be removed
+         from the distribution if this feature is *not* included.  If the
+         feature *is* included, this argument is ignored.  This argument exists
+         to support removing features that "crosscut" a distribution, such as
+         defining a 'tests' feature that removes all the 'tests' subpackages
+         provided by other features.  The default for this argument is an empty
+         list.  (Note: the named package(s) or modules must exist in the base
+         distribution when the 'setup()' function is initially called.)
+
+      other keywords -- any other keyword arguments are saved, and passed to
+         the distribution's 'include()' and 'exclude()' methods when the
+         feature is included or excluded, respectively.  So, for example, you
+         could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be
+         added or removed from the distribution as appropriate.
+
+    A feature must include at least one 'requires', 'remove', or other
+    keyword argument.  Otherwise, it can't affect the distribution in any way.
+    Note also that you can subclass 'Feature' to create your own specialized
+    feature types that modify the distribution in other ways when included or
+    excluded.  See the docstrings for the various methods here for more detail.
+    Aside from the methods, the only feature attributes that distributions look
+    at are 'description' and 'optional'.
+    """
+    def __init__(self, description, standard=False, available=True,
+        optional=True, require_features=(), remove=(), **extras
+    ):
+
+        self.description = description
+        self.standard = standard
+        self.available = available
+        self.optional = optional
+        if isinstance(require_features,(str,Require)):
+            require_features = require_features,
+
+        self.require_features = [
+            r for r in require_features if isinstance(r,str)
+        ]
+        er = [r for r in require_features if not isinstance(r,str)]
+        if er: extras['require_features'] = er
+
+        if isinstance(remove,str):
+            remove = remove,
+        self.remove = remove
+        self.extras = extras
+
+        if not remove and not require_features and not extras:
+            raise DistutilsSetupError(
+                "Feature %s: must define 'require_features', 'remove', or at least one"
+                " of 'packages', 'py_modules', etc."
+            )
+
+    def include_by_default(self):
+        """Should this feature be included by default?"""
+        return self.available and self.standard
+
+    def include_in(self,dist):
+
+        """Ensure feature and its requirements are included in distribution
+
+        You may override this in a subclass to perform additional operations on
+        the distribution.  Note that this method may be called more than once
+        per feature, and so should be idempotent.
+
+        """
+
+        if not self.available:
+            raise DistutilsPlatformError(
+                self.description+" is required,"
+                "but is not available on this platform"
+            )
+
+        dist.include(**self.extras)
+
+        for f in self.require_features:
+            dist.include_feature(f)
+
+
+
+    def exclude_from(self,dist):
+
+        """Ensure feature is excluded from distribution
+
+        You may override this in a subclass to perform additional operations on
+        the distribution.  This method will be called at most once per
+        feature, and only after all included features have been asked to
+        include themselves.
+        """
+
+        dist.exclude(**self.extras)
+
+        if self.remove:
+            for item in self.remove:
+                dist.exclude_package(item)
+
+
+
+    def validate(self,dist):
+
+        """Verify that feature makes sense in context of distribution
+
+        This method is called by the distribution just before it parses its
+        command line.  It checks to ensure that the 'remove' attribute, if any,
+        contains only valid package/module names that are present in the base
+        distribution when 'setup()' is called.  You may override it in a
+        subclass to perform any other required validation of the feature
+        against a target distribution.
+        """
+
+        for item in self.remove:
+            if not dist.has_contents_for(item):
+                raise DistutilsSetupError(
+                    "%s wants to be able to remove %s, but the distribution"
+                    " doesn't contain any packages or modules under %s"
+                    % (self.description, item, item)
+                )
diff --git a/Lib/setuptools/extension.py b/Lib/setuptools/extension.py
new file mode 100644
index 0000000..cfcf55b
--- /dev/null
+++ b/Lib/setuptools/extension.py
@@ -0,0 +1,35 @@
+from distutils.core import Extension as _Extension
+from dist import _get_unpatched
+_Extension = _get_unpatched(_Extension)
+
+try:
+    from Pyrex.Distutils.build_ext import build_ext
+except ImportError:
+    have_pyrex = False
+else:
+    have_pyrex = True
+
+
+class Extension(_Extension):
+    """Extension that uses '.c' files in place of '.pyx' files"""
+
+    if not have_pyrex:
+        # convert .pyx extensions to .c
+        def __init__(self,*args,**kw):
+            _Extension.__init__(self,*args,**kw)
+            sources = []
+            for s in self.sources:
+                if s.endswith('.pyx'):
+                    sources.append(s[:-3]+'c')
+                else:
+                    sources.append(s)
+            self.sources = sources
+
+class Library(Extension):
+    """Just like a regular Extension, but built as a library instead"""
+
+import sys, distutils.core, distutils.extension
+distutils.core.Extension = Extension
+distutils.extension.Extension = Extension
+if 'distutils.command.build_ext' in sys.modules:
+    sys.modules['distutils.command.build_ext'].Extension = Extension
diff --git a/Lib/setuptools/gui.exe b/Lib/setuptools/gui.exe
new file mode 100755
index 0000000..63ff35f
--- /dev/null
+++ b/Lib/setuptools/gui.exe
Binary files differ
diff --git a/Lib/setuptools/package_index.py b/Lib/setuptools/package_index.py
new file mode 100755
index 0000000..107e222
--- /dev/null
+++ b/Lib/setuptools/package_index.py
@@ -0,0 +1,674 @@
+"""PyPI and direct package downloading"""
+
+import sys, os.path, re, urlparse, urllib2, shutil, random, socket
+from pkg_resources import *
+from distutils import log
+from distutils.errors import DistutilsError
+from md5 import md5
+from fnmatch import translate
+
+EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$')
+HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I)
+# this is here to fix emacs' cruddy broken syntax highlighting
+PYPI_MD5 = re.compile(
+    '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a href="[^?]+\?:action=show_md5'
+    '&amp;digest=([0-9a-f]{32})">md5</a>\\)'
+)
+
+URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match
+EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split()
+
+__all__ = [
+    'PackageIndex', 'distros_for_url', 'parse_bdist_wininst',
+    'interpret_distro_name',
+]
+
+
+def parse_bdist_wininst(name):
+    """Return (base,pyversion) or (None,None) for possible .exe name"""
+
+    lower = name.lower()
+    base, py_ver = None, None
+
+    if lower.endswith('.exe'):
+        if lower.endswith('.win32.exe'):
+            base = name[:-10]
+        elif lower.startswith('.win32-py',-16):
+            py_ver = name[-7:-4]
+            base = name[:-16]
+
+    return base,py_ver
+
+def egg_info_for_url(url):
+    scheme, server, path, parameters, query, fragment = urlparse.urlparse(url)
+    base = urllib2.unquote(path.split('/')[-1])
+    if '#' in base: base, fragment = base.split('#',1)
+    return base,fragment
+
+def distros_for_url(url, metadata=None):
+    """Yield egg or source distribution objects that might be found at a URL"""
+    base, fragment = egg_info_for_url(url)
+    dists = distros_for_location(url, base, metadata)
+    if fragment and not dists:
+        match = EGG_FRAGMENT.match(fragment)
+        if match:
+            return interpret_distro_name(
+                url, match.group(1), metadata, precedence = CHECKOUT_DIST
+            )
+    return dists
+
+def distros_for_location(location, basename, metadata=None):
+    """Yield egg or source distribution objects based on basename"""
+    if basename.endswith('.egg.zip'):
+        basename = basename[:-4]    # strip the .zip
+    if basename.endswith('.egg'):   # only one, unambiguous interpretation
+        return [Distribution.from_location(location, basename, metadata)]
+
+    if basename.endswith('.exe'):
+        win_base, py_ver = parse_bdist_wininst(basename)
+        if win_base is not None:
+            return interpret_distro_name(
+                location, win_base, metadata, py_ver, BINARY_DIST, "win32"
+            )
+
+    # Try source distro extensions (.zip, .tgz, etc.)
+    #
+    for ext in EXTENSIONS:
+        if basename.endswith(ext):
+            basename = basename[:-len(ext)]
+            return interpret_distro_name(location, basename, metadata)
+    return []  # no extension matched
+
+
+def distros_for_filename(filename, metadata=None):
+    """Yield possible egg or source distribution objects based on a filename"""
+    return distros_for_location(
+        normalize_path(filename), os.path.basename(filename), metadata
+    )
+
+
+def interpret_distro_name(location, basename, metadata,
+    py_version=None, precedence=SOURCE_DIST, platform=None
+):
+    """Generate alternative interpretations of a source distro name
+
+    Note: if `location` is a filesystem filename, you should call
+    ``pkg_resources.normalize_path()`` on it before passing it to this
+    routine!
+    """
+
+    # Generate alternative interpretations of a source distro name
+    # Because some packages are ambiguous as to name/versions split
+    # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc.
+    # So, we generate each possible interepretation (e.g. "adns, python-1.1.0"
+    # "adns-python, 1.1.0", and "adns-python-1.1.0, no version").  In practice,
+    # the spurious interpretations should be ignored, because in the event
+    # there's also an "adns" package, the spurious "python-1.1.0" version will
+    # compare lower than any numeric version number, and is therefore unlikely
+    # to match a request for it.  It's still a potential problem, though, and
+    # in the long run PyPI and the distutils should go for "safe" names and
+    # versions in distribution archive names (sdist and bdist).
+
+    parts = basename.split('-')
+    for p in range(1,len(parts)+1):
+        yield Distribution(
+            location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]),
+            py_version=py_version, precedence = precedence,
+            platform = platform
+        )
+
+
+
+
+
+class PackageIndex(Environment):
+    """A distribution index that scans web pages for download URLs"""
+
+    def __init__(self,index_url="http://www.python.org/pypi",hosts=('*',),*args,**kw):
+        Environment.__init__(self,*args,**kw)
+        self.index_url = index_url + "/"[:not index_url.endswith('/')]
+        self.scanned_urls = {}
+        self.fetched_urls = {}
+        self.package_pages = {}
+        self.allows = re.compile('|'.join(map(translate,hosts))).match
+        self.to_scan = []
+
+    def process_url(self, url, retrieve=False):
+        """Evaluate a URL as a possible download, and maybe retrieve it"""
+        url = fix_sf_url(url)
+        if url in self.scanned_urls and not retrieve:
+            return
+        self.scanned_urls[url] = True
+        if not URL_SCHEME(url):
+            self.process_filename(url)
+            return
+        else:
+            dists = list(distros_for_url(url))
+            if dists:
+                if not self.url_ok(url):
+                    return
+                self.debug("Found link: %s", url)
+
+        if dists or not retrieve or url in self.fetched_urls:
+            map(self.add, dists)
+            return  # don't need the actual page
+
+        if not self.url_ok(url):
+            self.fetched_urls[url] = True
+            return
+
+        self.info("Reading %s", url)
+        f = self.open_url(url)
+        self.fetched_urls[url] = self.fetched_urls[f.url] = True
+
+
+        if 'html' not in f.headers['content-type'].lower():
+            f.close()   # not html, we can't process it
+            return
+
+        base = f.url     # handle redirects
+        page = f.read()
+        f.close()
+        if url.startswith(self.index_url):
+            page = self.process_index(url, page)
+
+        for match in HREF.finditer(page):
+            link = urlparse.urljoin(base, match.group(1))
+            self.process_url(link)
+
+    def process_filename(self, fn, nested=False):
+        # process filenames or directories
+        if not os.path.exists(fn):
+            self.warn("Not found: %s", url)
+            return
+
+        if os.path.isdir(fn) and not nested:
+            path = os.path.realpath(fn)
+            for item in os.listdir(path):
+                self.process_filename(os.path.join(path,item), True)
+
+        dists = distros_for_filename(fn)
+        if dists:
+            self.debug("Found: %s", fn)
+            map(self.add, dists)
+
+    def url_ok(self, url, fatal=False):
+        if self.allows(urlparse.urlparse(url)[1]):
+            return True
+        msg = "\nLink to % s ***BLOCKED*** by --allow-hosts\n"
+        if fatal:
+            raise DistutilsError(msg % url)
+        else:
+            self.warn(msg, url)
+
+
+
+    def process_index(self,url,page):
+        """Process the contents of a PyPI page"""
+        def scan(link):
+            # Process a URL to see if it's for a package page
+            if link.startswith(self.index_url):
+                parts = map(
+                    urllib2.unquote, link[len(self.index_url):].split('/')
+                )
+                if len(parts)==2:
+                    # it's a package page, sanitize and index it
+                    pkg = safe_name(parts[0])
+                    ver = safe_version(parts[1])
+                    self.package_pages.setdefault(pkg.lower(),{})[link] = True
+                    return to_filename(pkg), to_filename(ver)
+            return None, None
+
+        if url==self.index_url or 'Index of Packages</title>' in page:
+            # process an index page into the package-page index
+            for match in HREF.finditer(page):
+                scan( urlparse.urljoin(url, match.group(1)) )
+        else:
+            pkg,ver = scan(url)   # ensure this page is in the page index
+            # process individual package page
+            for tag in ("<th>Home Page", "<th>Download URL"):
+                pos = page.find(tag)
+                if pos!=-1:
+                    match = HREF.search(page,pos)
+                    if match:
+                        # Process the found URL
+                        new_url = urlparse.urljoin(url, match.group(1))
+                        base, frag = egg_info_for_url(new_url)
+                        if base.endswith('.py') and not frag:
+                            if pkg and ver:
+                                new_url+='#egg=%s-%s' % (pkg,ver)
+                            else:
+                                self.need_version_info(url)
+                        self.scan_url(new_url)
+        return PYPI_MD5.sub(
+            lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page
+        )
+
+    def need_version_info(self, url):
+        self.scan_all(
+            "Page at %s links to .py file(s) without version info; an index "
+            "scan is required.", url
+        )
+
+    def scan_all(self, msg=None, *args):
+        if self.index_url not in self.fetched_urls:
+            if msg: self.warn(msg,*args)
+            self.warn(
+                "Scanning index of all packages (this may take a while)"
+            )
+        self.scan_url(self.index_url)
+
+    def find_packages(self, requirement):
+        self.scan_url(self.index_url + requirement.unsafe_name+'/')
+
+        if not self.package_pages.get(requirement.key):
+            # Fall back to safe version of the name
+            self.scan_url(self.index_url + requirement.project_name+'/')
+
+        if not self.package_pages.get(requirement.key):
+            # We couldn't find the target package, so search the index page too
+            self.warn(
+                "Couldn't find index page for %r (maybe misspelled?)",
+                requirement.unsafe_name
+            )
+            self.scan_all()
+
+        for url in self.package_pages.get(requirement.key,()):
+            # scan each page that might be related to the desired package
+            self.scan_url(url)
+
+    def obtain(self, requirement, installer=None):
+        self.prescan(); self.find_packages(requirement)
+        for dist in self[requirement.key]:
+            if dist in requirement:
+                return dist
+            self.debug("%s does not match %s", requirement, dist)
+        return super(PackageIndex, self).obtain(requirement,installer)
+
+    def check_md5(self, cs, info, filename, tfp):
+        if re.match('md5=[0-9a-f]{32}$', info):
+            self.debug("Validating md5 checksum for %s", filename)
+            if cs.hexdigest()<>info[4:]:
+                tfp.close()
+                os.unlink(filename)
+                raise DistutilsError(
+                    "MD5 validation failed for "+os.path.basename(filename)+
+                    "; possible download problem?"
+                )
+
+    def add_find_links(self, urls):
+        """Add `urls` to the list that will be prescanned for searches"""
+        for url in urls:
+            if (
+                self.to_scan is None        # if we have already "gone online"
+                or not URL_SCHEME(url)      # or it's a local file/directory
+                or url.startswith('file:')
+                or list(distros_for_url(url))   # or a direct package link
+            ):
+                # then go ahead and process it now
+                self.scan_url(url)
+            else:
+                # otherwise, defer retrieval till later
+                self.to_scan.append(url)
+
+    def prescan(self):
+        """Scan urls scheduled for prescanning (e.g. --find-links)"""
+        if self.to_scan:
+            map(self.scan_url, self.to_scan)
+        self.to_scan = None     # from now on, go ahead and process immediately
+
+
+
+
+
+
+
+
+
+
+    def download(self, spec, tmpdir):
+        """Locate and/or download `spec` to `tmpdir`, returning a local path
+
+        `spec` may be a ``Requirement`` object, or a string containing a URL,
+        an existing local filename, or a project/version requirement spec
+        (i.e. the string form of a ``Requirement`` object).  If it is the URL
+        of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one
+        that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is
+        automatically created alongside the downloaded file.
+
+        If `spec` is a ``Requirement`` object or a string containing a
+        project/version requirement spec, this method returns the location of
+        a matching distribution (possibly after downloading it to `tmpdir`).
+        If `spec` is a locally existing file or directory name, it is simply
+        returned unchanged.  If `spec` is a URL, it is downloaded to a subpath
+        of `tmpdir`, and the local filename is returned.  Various errors may be
+        raised if a problem occurs during downloading.
+        """
+        if not isinstance(spec,Requirement):
+            scheme = URL_SCHEME(spec)
+            if scheme:
+                # It's a url, download it to tmpdir
+                found = self._download_url(scheme.group(1), spec, tmpdir)
+                base, fragment = egg_info_for_url(spec)
+                if base.endswith('.py'):
+                    found = self.gen_setup(found,fragment,tmpdir)
+                return found
+            elif os.path.exists(spec):
+                # Existing file or directory, just return it
+                return spec
+            else:
+                try:
+                    spec = Requirement.parse(spec)
+                except ValueError:
+                    raise DistutilsError(
+                        "Not a URL, existing file, or requirement spec: %r" %
+                        (spec,)
+                    )
+        return getattr(self.fetch_distribution(spec, tmpdir),'location',None)
+
+
+    def fetch_distribution(self,
+        requirement, tmpdir, force_scan=False, source=False, develop_ok=False
+    ):
+        """Obtain a distribution suitable for fulfilling `requirement`
+
+        `requirement` must be a ``pkg_resources.Requirement`` instance.
+        If necessary, or if the `force_scan` flag is set, the requirement is
+        searched for in the (online) package index as well as the locally
+        installed packages.  If a distribution matching `requirement` is found,
+        the returned distribution's ``location`` is the value you would have
+        gotten from calling the ``download()`` method with the matching
+        distribution's URL or filename.  If no matching distribution is found,
+        ``None`` is returned.
+
+        If the `source` flag is set, only source distributions and source
+        checkout links will be considered.  Unless the `develop_ok` flag is
+        set, development and system eggs (i.e., those using the ``.egg-info``
+        format) will be ignored.
+        """
+
+        # process a Requirement
+        self.info("Searching for %s", requirement)
+        skipped = {}
+
+        def find(req):
+            # Find a matching distribution; may be called more than once
+
+            for dist in self[req.key]:
+
+                if dist.precedence==DEVELOP_DIST and not develop_ok:
+                    if dist not in skipped:
+                        self.warn("Skipping development or system egg: %s",dist)
+                        skipped[dist] = 1
+                    continue
+
+                if dist in req and (dist.precedence<=SOURCE_DIST or not source):
+                    self.info("Best match: %s", dist)
+                    return dist.clone(
+                        location=self.download(dist.location, tmpdir)
+                    )
+
+        if force_scan:
+            self.prescan()
+            self.find_packages(requirement)
+
+        dist = find(requirement)
+        if dist is None and self.to_scan is not None:
+            self.prescan()
+            dist = find(requirement)
+
+        if dist is None and not force_scan:
+            self.find_packages(requirement)
+            dist = find(requirement)
+
+        if dist is None:
+            self.warn(
+                "No local packages or download links found for %s%s",
+                (source and "a source distribution of " or ""),
+                requirement,
+            )
+        return dist
+
+    def fetch(self, requirement, tmpdir, force_scan=False, source=False):
+        """Obtain a file suitable for fulfilling `requirement`
+
+        DEPRECATED; use the ``fetch_distribution()`` method now instead.  For
+        backward compatibility, this routine is identical but returns the
+        ``location`` of the downloaded distribution instead of a distribution
+        object.
+        """
+        dist = self.fetch_distribution(requirement,tmpdir,force_scan,source)
+        if dist is not None:
+            return dist.location
+        return None
+
+
+
+
+
+
+
+
+    def gen_setup(self, filename, fragment, tmpdir):
+        match = EGG_FRAGMENT.match(fragment); #import pdb; pdb.set_trace()
+        dists = match and [d for d in
+            interpret_distro_name(filename, match.group(1), None) if d.version
+        ] or []
+
+        if len(dists)==1:   # unambiguous ``#egg`` fragment
+            basename = os.path.basename(filename)
+
+            # Make sure the file has been downloaded to the temp dir.
+            if os.path.dirname(filename) != tmpdir:
+                dst = os.path.join(tmpdir, basename)
+                from setuptools.command.easy_install import samefile
+                if not samefile(filename, dst):
+                    shutil.copy2(filename, dst)
+                    filename=dst
+
+            file = open(os.path.join(tmpdir, 'setup.py'), 'w')
+            file.write(
+                "from setuptools import setup\n"
+                "setup(name=%r, version=%r, py_modules=[%r])\n"
+                % (
+                    dists[0].project_name, dists[0].version,
+                    os.path.splitext(basename)[0]
+                )
+            )
+            file.close()
+            return filename
+
+        elif match:
+            raise DistutilsError(
+                "Can't unambiguously interpret project/version identifier %r; "
+                "any dashes in the name or version should be escaped using "
+                "underscores. %r" % (fragment,dists)
+            )
+        else:
+            raise DistutilsError(
+                "Can't process plain .py files without an '#egg=name-version'"
+                " suffix to enable automatic setup script generation."
+            )
+
+    dl_blocksize = 8192
+    def _download_to(self, url, filename):
+        self.url_ok(url,True)   # raises error if not allowed
+        self.info("Downloading %s", url)
+        # Download the file
+        fp, tfp, info = None, None, None
+        try:
+            if '#' in url:
+                url, info = url.split('#', 1)
+            fp = self.open_url(url)
+            if isinstance(fp, urllib2.HTTPError):
+                raise DistutilsError(
+                    "Can't download %s: %s %s" % (url, fp.code,fp.msg)
+                )
+            cs = md5()
+            headers = fp.info()
+            blocknum = 0
+            bs = self.dl_blocksize
+            size = -1
+            if "content-length" in headers:
+                size = int(headers["Content-Length"])
+                self.reporthook(url, filename, blocknum, bs, size)
+            tfp = open(filename,'wb')
+            while True:
+                block = fp.read(bs)
+                if block:
+                    cs.update(block)
+                    tfp.write(block)
+                    blocknum += 1
+                    self.reporthook(url, filename, blocknum, bs, size)
+                else:
+                    break
+            if info: self.check_md5(cs, info, filename, tfp)
+            return headers
+        finally:
+            if fp: fp.close()
+            if tfp: tfp.close()
+
+    def reporthook(self, url, filename, blocknum, blksize, size):
+        pass    # no-op
+
+    def retry_sf_download(self, url, filename):
+        try:
+            return self._download_to(url, filename)
+        except:
+            scheme, server, path, param, query, frag = urlparse.urlparse(url)
+            if server!='dl.sourceforge.net':
+                raise
+
+        mirror = get_sf_ip()
+
+        while _sf_mirrors:
+            self.warn("Download failed: %s", sys.exc_info()[1])
+            url = urlparse.urlunparse((scheme, mirror, path, param, '', frag))
+            try:
+                return self._download_to(url, filename)
+            except:
+                _sf_mirrors.remove(mirror)  # don't retry the same mirror
+                mirror = get_sf_ip()
+
+        raise   # fail if no mirror works
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+    def open_url(self, url):
+        try:
+            return urllib2.urlopen(url)
+        except urllib2.HTTPError, v:
+            return v
+        except urllib2.URLError, v:
+            raise DistutilsError("Download error: %s" % v.reason)
+
+
+    def _download_url(self, scheme, url, tmpdir):
+
+        # Determine download filename
+        #
+        name = filter(None,urlparse.urlparse(url)[2].split('/'))
+        if name:
+            name = name[-1]
+            while '..' in name:
+                name = name.replace('..','.').replace('\\','_')
+        else:
+            name = "__downloaded__"    # default if URL has no path contents
+
+        if name.endswith('.egg.zip'):
+            name = name[:-4]    # strip the extra .zip before download
+
+        filename = os.path.join(tmpdir,name)
+
+        # Download the file
+        #
+        if scheme=='svn' or scheme.startswith('svn+'):
+            return self._download_svn(url, filename)
+        else:
+            headers = self.retry_sf_download(url, filename)
+            if 'html' in headers['content-type'].lower():
+                return self._download_html(url, headers, filename, tmpdir)
+            else:
+                return filename
+
+    def scan_url(self, url):
+        self.process_url(url, True)
+
+
+    def _download_html(self, url, headers, filename, tmpdir):
+        file = open(filename)
+        for line in file:
+            if line.strip():
+                # Check for a subversion index page
+                if re.search(r'<title>Revision \d+:', line):
+                    # it's a subversion index page:
+                    file.close()
+                    os.unlink(filename)
+                    return self._download_svn(url, filename)
+                break   # not an index page
+        file.close()
+        os.unlink(filename)
+        raise DistutilsError("Unexpected HTML page found at "+url)
+
+    def _download_svn(self, url, filename):
+        url = url.split('#',1)[0]   # remove any fragment for svn's sake
+        self.info("Doing subversion checkout from %s to %s", url, filename)
+        os.system("svn checkout -q %s %s" % (url, filename))
+        return filename
+
+    def debug(self, msg, *args):
+        log.debug(msg, *args)
+
+    def info(self, msg, *args):
+        log.info(msg, *args)
+
+    def warn(self, msg, *args):
+        log.warn(msg, *args)
+
+
+
+
+
+
+
+
+
+
+
+
+def fix_sf_url(url):
+    scheme, server, path, param, query, frag = urlparse.urlparse(url)
+    if server!='prdownloads.sourceforge.net':
+        return url
+    return urlparse.urlunparse(
+        (scheme, 'dl.sourceforge.net', 'sourceforge'+path, param, '', frag)
+    )
+
+_sf_mirrors = []
+
+def get_sf_ip():
+    if not _sf_mirrors:
+        try:
+            _sf_mirrors[:] = socket.gethostbyname_ex('dl.sourceforge.net')[-1]
+        except socket.error:
+            # DNS-bl0ck1n9 f1r3w4llz sUx0rs!
+            _sf_mirrors[:] = ['dl.sourceforge.net']
+    return random.choice(_sf_mirrors)
diff --git a/Lib/setuptools/sandbox.py b/Lib/setuptools/sandbox.py
new file mode 100755
index 0000000..606944b
--- /dev/null
+++ b/Lib/setuptools/sandbox.py
@@ -0,0 +1,203 @@
+import os, sys, __builtin__, tempfile
+_os = sys.modules[os.name]
+_open = open
+from distutils.errors import DistutilsError
+__all__ = [
+    "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup",
+]
+
+def run_setup(setup_script, args):
+    """Run a distutils setup script, sandboxed in its directory"""
+
+    old_dir = os.getcwd()
+    save_argv = sys.argv[:]
+    save_path = sys.path[:]
+    setup_dir = os.path.abspath(os.path.dirname(setup_script))
+    temp_dir = os.path.join(setup_dir,'temp')
+    if not os.path.isdir(temp_dir): os.makedirs(temp_dir)
+    save_tmp = tempfile.tempdir
+
+    try:
+        tempfile.tempdir = temp_dir
+        os.chdir(setup_dir)
+        try:
+            sys.argv[:] = [setup_script]+list(args)
+            sys.path.insert(0, setup_dir)
+            DirectorySandbox(setup_dir).run(
+                lambda: execfile(
+                    "setup.py",
+                    {'__file__':setup_script, '__name__':'__main__'}
+                )
+            )
+        except SystemExit, v:
+            if v.args and v.args[0]:
+                raise
+            # Normal exit, just return
+    finally:
+        os.chdir(old_dir)
+        sys.path[:] = save_path
+        sys.argv[:] = save_argv
+        tempfile.tempdir = save_tmp
+
+class AbstractSandbox:
+    """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts"""
+
+    _active = False
+
+    def __init__(self):
+        self._attrs = [
+            name for name in dir(_os)
+                if not name.startswith('_') and hasattr(self,name)
+        ]
+
+    def _copy(self, source):
+        for name in self._attrs:
+            setattr(os, name, getattr(source,name))
+
+    def run(self, func):
+        """Run 'func' under os sandboxing"""
+        try:
+            self._copy(self)
+            __builtin__.open = __builtin__.file = self._open
+            self._active = True
+            return func()
+        finally:
+            self._active = False
+            __builtin__.open = __builtin__.file = _open
+            self._copy(_os)
+
+
+    def _mk_dual_path_wrapper(name):
+        original = getattr(_os,name)
+        def wrap(self,src,dst,*args,**kw):
+            if self._active:
+                src,dst = self._remap_pair(name,src,dst,*args,**kw)
+            return original(src,dst,*args,**kw)
+        return wrap
+
+
+    for name in ["rename", "link", "symlink"]:
+        if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name)
+
+
+    def _mk_single_path_wrapper(name, original=None):
+        original = original or getattr(_os,name)
+        def wrap(self,path,*args,**kw):
+            if self._active:
+                path = self._remap_input(name,path,*args,**kw)
+            return original(path,*args,**kw)
+        return wrap
+
+    _open = _mk_single_path_wrapper('file', _open)
+    for name in [
+        "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir",
+        "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat",
+        "startfile", "mkfifo", "mknod", "pathconf", "access"
+    ]:
+        if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name)
+
+
+    def _mk_single_with_return(name):
+        original = getattr(_os,name)
+        def wrap(self,path,*args,**kw):
+            if self._active:
+                path = self._remap_input(name,path,*args,**kw)
+                return self._remap_output(name, original(path,*args,**kw))
+            return original(path,*args,**kw)
+        return wrap
+
+    for name in ['readlink', 'tempnam']:
+        if hasattr(_os,name): locals()[name] = _mk_single_with_return(name)
+
+    def _mk_query(name):
+        original = getattr(_os,name)
+        def wrap(self,*args,**kw):
+            retval = original(*args,**kw)
+            if self._active:
+                return self._remap_output(name, retval)
+            return retval
+        return wrap
+
+    for name in ['getcwd', 'tmpnam']:
+        if hasattr(_os,name): locals()[name] = _mk_query(name)
+
+    def _validate_path(self,path):
+        """Called to remap or validate any path, whether input or output"""
+        return path
+
+    def _remap_input(self,operation,path,*args,**kw):
+        """Called for path inputs"""
+        return self._validate_path(path)
+
+    def _remap_output(self,operation,path):
+        """Called for path outputs"""
+        return self._validate_path(path)
+
+    def _remap_pair(self,operation,src,dst,*args,**kw):
+        """Called for path pairs like rename, link, and symlink operations"""
+        return (
+            self._remap_input(operation+'-from',src,*args,**kw),
+            self._remap_input(operation+'-to',dst,*args,**kw)
+        )
+
+
+class DirectorySandbox(AbstractSandbox):
+    """Restrict operations to a single subdirectory - pseudo-chroot"""
+
+    write_ops = dict.fromkeys([
+        "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir",
+        "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam",
+    ])
+
+    def __init__(self,sandbox):
+        self._sandbox = os.path.normcase(os.path.realpath(sandbox))
+        self._prefix = os.path.join(self._sandbox,'')
+        AbstractSandbox.__init__(self)
+
+    def _violation(self, operation, *args, **kw):
+        raise SandboxViolation(operation, args, kw)
+
+    def _open(self, path, mode='r', *args, **kw):
+        if mode not in ('r', 'rt', 'rb', 'rU') and not self._ok(path):
+            self._violation("open", path, mode, *args, **kw)
+        return _open(path,mode,*args,**kw)
+
+    def tmpnam(self):
+        self._violation("tmpnam")
+
+    def _ok(self,path):
+        active = self._active
+        try:
+            self._active = False
+            realpath = os.path.normcase(os.path.realpath(path))
+            if realpath==self._sandbox or realpath.startswith(self._prefix):
+                return True
+        finally:
+            self._active = active
+
+    def _remap_input(self,operation,path,*args,**kw):
+        """Called for path inputs"""
+        if operation in self.write_ops and not self._ok(path):
+            self._violation(operation, os.path.realpath(path), *args, **kw)
+        return path
+
+    def _remap_pair(self,operation,src,dst,*args,**kw):
+        """Called for path pairs like rename, link, and symlink operations"""
+        if not self._ok(src) or not self._ok(dst):
+            self._violation(operation, src, dst, *args, **kw)
+        return (src,dst)
+
+
+class SandboxViolation(DistutilsError):
+    """A setup script attempted to modify the filesystem outside the sandbox"""
+
+    def __str__(self):
+        return """SandboxViolation: %s%r %s
+
+The package setup script has attempted to modify files on your system
+that are not within the EasyInstall build area, and has been aborted.
+
+This package cannot be safely installed by EasyInstall, and may not
+support alternate installation locations even if you run its setup
+script by hand.  Please inform the package's author and the EasyInstall
+maintainers to find out if a fix or workaround is available.""" % self.args
diff --git a/Lib/setuptools/site-patch.py b/Lib/setuptools/site-patch.py
new file mode 100755
index 0000000..b1b27b9
--- /dev/null
+++ b/Lib/setuptools/site-patch.py
@@ -0,0 +1,74 @@
+def __boot():
+    import sys, imp, os, os.path
+    PYTHONPATH = os.environ.get('PYTHONPATH')
+    if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH):
+        PYTHONPATH = []
+    else:
+        PYTHONPATH = PYTHONPATH.split(os.pathsep)
+
+    pic = getattr(sys,'path_importer_cache',{})
+    stdpath = sys.path[len(PYTHONPATH):]
+    mydir = os.path.dirname(__file__)
+    #print "searching",stdpath,sys.path
+
+    for item in stdpath:
+        if item==mydir or not item:
+            continue    # skip if current dir. on Windows, or my own directory
+        importer = pic.get(item)
+        if importer is not None:
+            loader = importer.find_module('site')
+            if loader is not None:
+                # This should actually reload the current module
+                loader.load_module('site')
+                break
+        else:
+            try:
+                stream, path, descr = imp.find_module('site',[item])
+            except ImportError:
+                continue
+            if stream is None:
+                continue
+            try:
+                # This should actually reload the current module
+                imp.load_module('site',stream,path,descr)
+            finally:
+                stream.close()
+            break
+    else:
+        raise ImportError("Couldn't find the real 'site' module")
+
+    #print "loaded", __file__
+
+    known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp
+
+    oldpos = getattr(sys,'__egginsert',0)   # save old insertion position
+    sys.__egginsert = 0                     # and reset the current one
+
+    for item in PYTHONPATH:
+        addsitedir(item)
+
+    sys.__egginsert += oldpos           # restore effective old position
+
+    d,nd = makepath(stdpath[0])
+    insert_at = None
+    new_path = []
+
+    for item in sys.path:
+        p,np = makepath(item)
+
+        if np==nd and insert_at is None:
+            # We've hit the first 'system' path entry, so added entries go here
+            insert_at = len(new_path)
+
+        if np in known_paths or insert_at is None:
+            new_path.append(item)
+        else:
+            # new path after the insert point, back-insert it
+            new_path.insert(insert_at, item)
+            insert_at += 1
+
+    sys.path[:] = new_path
+
+if __name__=='site':
+    __boot()
+    del __boot
diff --git a/Lib/setuptools/tests/__init__.py b/Lib/setuptools/tests/__init__.py
new file mode 100644
index 0000000..8a767dc
--- /dev/null
+++ b/Lib/setuptools/tests/__init__.py
@@ -0,0 +1,364 @@
+"""Tests for the 'setuptools' package"""
+
+from unittest import TestSuite, TestCase, makeSuite, defaultTestLoader
+import distutils.core, distutils.cmd
+from distutils.errors import DistutilsOptionError, DistutilsPlatformError
+from distutils.errors import DistutilsSetupError
+import setuptools, setuptools.dist
+from setuptools import Feature
+from distutils.core import Extension
+from setuptools.depends import extract_constant, get_module_constant
+from setuptools.depends import find_module, Require
+from distutils.version import StrictVersion, LooseVersion
+from distutils.util import convert_path
+import sys, os.path
+
+def additional_tests():
+    import doctest
+    return doctest.DocFileSuite(
+        'api_tests.txt', optionflags=doctest.ELLIPSIS, package=__name__,
+    )
+
+
+def makeSetup(**args):
+    """Return distribution from 'setup(**args)', without executing commands"""
+
+    distutils.core._setup_stop_after = "commandline"
+
+    # Don't let system command line leak into tests!
+    args.setdefault('script_args',['install'])
+
+    try:
+        return setuptools.setup(**args)
+    finally:
+        distutils.core_setup_stop_after = None
+
+
+
+
+
+
+
+class DependsTests(TestCase):
+
+    def testExtractConst(self):
+
+        from setuptools.depends import extract_constant
+
+        def f1():
+            global x,y,z
+            x = "test"
+            y = z
+
+        # unrecognized name
+        self.assertEqual(extract_constant(f1.func_code,'q', -1), None)
+
+        # constant assigned
+        self.assertEqual(extract_constant(f1.func_code,'x', -1), "test")
+
+        # expression assigned
+        self.assertEqual(extract_constant(f1.func_code,'y', -1), -1)
+
+        # recognized name, not assigned
+        self.assertEqual(extract_constant(f1.func_code,'z', -1), None)
+
+
+    def testFindModule(self):
+        self.assertRaises(ImportError, find_module, 'no-such.-thing')
+        self.assertRaises(ImportError, find_module, 'setuptools.non-existent')
+        f,p,i = find_module('setuptools.tests'); f.close()
+
+    def testModuleExtract(self):
+        from distutils import __version__
+        self.assertEqual(
+            get_module_constant('distutils','__version__'), __version__
+        )
+        self.assertEqual(
+            get_module_constant('sys','version'), sys.version
+        )
+        self.assertEqual(
+            get_module_constant('setuptools.tests','__doc__'),__doc__
+        )
+
+    def testRequire(self):
+
+        req = Require('Distutils','1.0.3','distutils')
+
+        self.assertEqual(req.name, 'Distutils')
+        self.assertEqual(req.module, 'distutils')
+        self.assertEqual(req.requested_version, '1.0.3')
+        self.assertEqual(req.attribute, '__version__')
+        self.assertEqual(req.full_name(), 'Distutils-1.0.3')
+
+        from distutils import __version__
+        self.assertEqual(req.get_version(), __version__)
+        self.failUnless(req.version_ok('1.0.9'))
+        self.failIf(req.version_ok('0.9.1'))
+        self.failIf(req.version_ok('unknown'))
+
+        self.failUnless(req.is_present())
+        self.failUnless(req.is_current())
+
+        req = Require('Distutils 3000','03000','distutils',format=LooseVersion)
+        self.failUnless(req.is_present())
+        self.failIf(req.is_current())
+        self.failIf(req.version_ok('unknown'))
+
+        req = Require('Do-what-I-mean','1.0','d-w-i-m')
+        self.failIf(req.is_present())
+        self.failIf(req.is_current())
+
+        req = Require('Tests', None, 'tests', homepage="http://example.com")
+        self.assertEqual(req.format, None)
+        self.assertEqual(req.attribute, None)
+        self.assertEqual(req.requested_version, None)
+        self.assertEqual(req.full_name(), 'Tests')
+        self.assertEqual(req.homepage, 'http://example.com')
+
+        paths = [os.path.dirname(p) for p in __path__]
+        self.failUnless(req.is_present(paths))
+        self.failUnless(req.is_current(paths))
+
+
+
+class DistroTests(TestCase):
+
+    def setUp(self):
+        self.e1 = Extension('bar.ext',['bar.c'])
+        self.e2 = Extension('c.y', ['y.c'])
+
+        self.dist = makeSetup(
+            packages=['a', 'a.b', 'a.b.c', 'b', 'c'],
+            py_modules=['b.d','x'],
+            ext_modules = (self.e1, self.e2),
+            package_dir = {},
+        )
+
+
+    def testDistroType(self):
+        self.failUnless(isinstance(self.dist,setuptools.dist.Distribution))
+
+
+    def testExcludePackage(self):
+        self.dist.exclude_package('a')
+        self.assertEqual(self.dist.packages, ['b','c'])
+
+        self.dist.exclude_package('b')
+        self.assertEqual(self.dist.packages, ['c'])
+        self.assertEqual(self.dist.py_modules, ['x'])
+        self.assertEqual(self.dist.ext_modules, [self.e1, self.e2])
+
+        self.dist.exclude_package('c')
+        self.assertEqual(self.dist.packages, [])
+        self.assertEqual(self.dist.py_modules, ['x'])
+        self.assertEqual(self.dist.ext_modules, [self.e1])
+
+        # test removals from unspecified options
+        makeSetup().exclude_package('x')
+
+
+
+
+
+
+
+    def testIncludeExclude(self):
+        # remove an extension
+        self.dist.exclude(ext_modules=[self.e1])
+        self.assertEqual(self.dist.ext_modules, [self.e2])
+
+        # add it back in
+        self.dist.include(ext_modules=[self.e1])
+        self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
+
+        # should not add duplicate
+        self.dist.include(ext_modules=[self.e1])
+        self.assertEqual(self.dist.ext_modules, [self.e2, self.e1])
+
+    def testExcludePackages(self):
+        self.dist.exclude(packages=['c','b','a'])
+        self.assertEqual(self.dist.packages, [])
+        self.assertEqual(self.dist.py_modules, ['x'])
+        self.assertEqual(self.dist.ext_modules, [self.e1])
+
+    def testEmpty(self):
+        dist = makeSetup()
+        dist.include(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
+        dist = makeSetup()
+        dist.exclude(packages=['a'], py_modules=['b'], ext_modules=[self.e2])
+
+    def testContents(self):
+        self.failUnless(self.dist.has_contents_for('a'))
+        self.dist.exclude_package('a')
+        self.failIf(self.dist.has_contents_for('a'))
+
+        self.failUnless(self.dist.has_contents_for('b'))
+        self.dist.exclude_package('b')
+        self.failIf(self.dist.has_contents_for('b'))
+
+        self.failUnless(self.dist.has_contents_for('c'))
+        self.dist.exclude_package('c')
+        self.failIf(self.dist.has_contents_for('c'))
+
+
+
+
+    def testInvalidIncludeExclude(self):
+        self.assertRaises(DistutilsSetupError,
+            self.dist.include, nonexistent_option='x'
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.exclude, nonexistent_option='x'
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.include, packages={'x':'y'}
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.exclude, packages={'x':'y'}
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.include, ext_modules={'x':'y'}
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.exclude, ext_modules={'x':'y'}
+        )
+
+        self.assertRaises(DistutilsSetupError,
+            self.dist.include, package_dir=['q']
+        )
+        self.assertRaises(DistutilsSetupError,
+            self.dist.exclude, package_dir=['q']
+        )
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class FeatureTests(TestCase):
+
+    def setUp(self):
+        self.req = Require('Distutils','1.0.3','distutils')
+        self.dist = makeSetup(
+            features={
+                'foo': Feature("foo",standard=True,require_features=['baz',self.req]),
+                'bar': Feature("bar",  standard=True, packages=['pkg.bar'],
+                               py_modules=['bar_et'], remove=['bar.ext'],
+                       ),
+                'baz': Feature(
+                        "baz", optional=False, packages=['pkg.baz'],
+                        scripts = ['scripts/baz_it'],
+                        libraries=[('libfoo','foo/foofoo.c')]
+                       ),
+                'dwim': Feature("DWIM", available=False, remove='bazish'),
+            },
+            script_args=['--without-bar', 'install'],
+            packages = ['pkg.bar', 'pkg.foo'],
+            py_modules = ['bar_et', 'bazish'],
+            ext_modules = [Extension('bar.ext',['bar.c'])]
+        )
+
+    def testDefaults(self):
+        self.failIf(
+            Feature(
+                "test",standard=True,remove='x',available=False
+            ).include_by_default()
+        )
+        self.failUnless(
+            Feature("test",standard=True,remove='x').include_by_default()
+        )
+        # Feature must have either kwargs, removes, or require_features
+        self.assertRaises(DistutilsSetupError, Feature, "test")
+
+    def testAvailability(self):
+        self.assertRaises(
+            DistutilsPlatformError,
+            self.dist.features['dwim'].include_in, self.dist
+        )
+
+    def testFeatureOptions(self):
+        dist = self.dist
+        self.failUnless(
+            ('with-dwim',None,'include DWIM') in dist.feature_options
+        )
+        self.failUnless(
+            ('without-dwim',None,'exclude DWIM (default)') in dist.feature_options
+        )
+        self.failUnless(
+            ('with-bar',None,'include bar (default)') in dist.feature_options
+        )
+        self.failUnless(
+            ('without-bar',None,'exclude bar') in dist.feature_options
+        )
+        self.assertEqual(dist.feature_negopt['without-foo'],'with-foo')
+        self.assertEqual(dist.feature_negopt['without-bar'],'with-bar')
+        self.assertEqual(dist.feature_negopt['without-dwim'],'with-dwim')
+        self.failIf('without-baz' in dist.feature_negopt)
+
+    def testUseFeatures(self):
+        dist = self.dist
+        self.assertEqual(dist.with_foo,1)
+        self.assertEqual(dist.with_bar,0)
+        self.assertEqual(dist.with_baz,1)
+        self.failIf('bar_et' in dist.py_modules)
+        self.failIf('pkg.bar' in dist.packages)
+        self.failUnless('pkg.baz' in dist.packages)
+        self.failUnless('scripts/baz_it' in dist.scripts)
+        self.failUnless(('libfoo','foo/foofoo.c') in dist.libraries)
+        self.assertEqual(dist.ext_modules,[])
+        self.assertEqual(dist.require_features, [self.req])
+
+        # If we ask for bar, it should fail because we explicitly disabled
+        # it on the command line
+        self.assertRaises(DistutilsOptionError, dist.include_feature, 'bar')
+
+    def testFeatureWithInvalidRemove(self):
+        self.assertRaises(
+            SystemExit, makeSetup, features = {'x':Feature('x', remove='y')}
+        )
+
+class TestCommandTests(TestCase):
+
+    def testTestIsCommand(self):
+        test_cmd = makeSetup().get_command_obj('test')
+        self.failUnless(isinstance(test_cmd, distutils.cmd.Command))
+
+    def testLongOptSuiteWNoDefault(self):
+        ts1 = makeSetup(script_args=['test','--test-suite=foo.tests.suite'])
+        ts1 = ts1.get_command_obj('test')
+        ts1.ensure_finalized()
+        self.assertEqual(ts1.test_suite, 'foo.tests.suite')
+
+    def testDefaultSuite(self):
+        ts2 = makeSetup(test_suite='bar.tests.suite').get_command_obj('test')
+        ts2.ensure_finalized()
+        self.assertEqual(ts2.test_suite, 'bar.tests.suite')
+
+    def testDefaultWModuleOnCmdLine(self):
+        ts3 = makeSetup(
+            test_suite='bar.tests',
+            script_args=['test','-m','foo.tests']
+        ).get_command_obj('test')
+        ts3.ensure_finalized()
+        self.assertEqual(ts3.test_module, 'foo.tests')
+        self.assertEqual(ts3.test_suite,  'foo.tests.test_suite')
+
+    def testConflictingOptions(self):
+        ts4 = makeSetup(
+            script_args=['test','-m','bar.tests', '-s','foo.tests.suite']
+        ).get_command_obj('test')
+        self.assertRaises(DistutilsOptionError, ts4.ensure_finalized)
+
+    def testNoSuite(self):
+        ts5 = makeSetup().get_command_obj('test')
+        ts5.ensure_finalized()
+        self.assertEqual(ts5.test_suite, None)
diff --git a/Lib/setuptools/tests/api_tests.txt b/Lib/setuptools/tests/api_tests.txt
new file mode 100755
index 0000000..735ad8d
--- /dev/null
+++ b/Lib/setuptools/tests/api_tests.txt
@@ -0,0 +1,330 @@
+Pluggable Distributions of Python Software
+==========================================
+
+Distributions
+-------------
+
+A "Distribution" is a collection of files that represent a "Release" of a
+"Project" as of a particular point in time, denoted by a
+"Version"::
+
+    >>> import sys, pkg_resources
+    >>> from pkg_resources import Distribution
+    >>> Distribution(project_name="Foo", version="1.2")
+    Foo 1.2
+
+Distributions have a location, which can be a filename, URL, or really anything
+else you care to use::
+
+    >>> dist = Distribution(
+    ...     location="http://example.com/something",
+    ...     project_name="Bar", version="0.9"
+    ... )
+
+    >>> dist
+    Bar 0.9 (http://example.com/something)
+
+
+Distributions have various introspectable attributes::
+
+    >>> dist.location
+    'http://example.com/something'
+
+    >>> dist.project_name
+    'Bar'
+
+    >>> dist.version
+    '0.9'
+
+    >>> dist.py_version == sys.version[:3]
+    True
+
+    >>> print dist.platform
+    None
+
+Including various computed attributes::
+
+    >>> from pkg_resources import parse_version
+    >>> dist.parsed_version == parse_version(dist.version)
+    True
+
+    >>> dist.key    # case-insensitive form of the project name
+    'bar'
+
+Distributions are compared (and hashed) by version first::
+
+    >>> Distribution(version='1.0') == Distribution(version='1.0')
+    True
+    >>> Distribution(version='1.0') == Distribution(version='1.1')
+    False
+    >>> Distribution(version='1.0') <  Distribution(version='1.1')
+    True
+
+but also by project name (case-insensitive), platform, Python version,
+location, etc.::
+
+    >>> Distribution(project_name="Foo",version="1.0") == \
+    ... Distribution(project_name="Foo",version="1.0")
+    True
+
+    >>> Distribution(project_name="Foo",version="1.0") == \
+    ... Distribution(project_name="foo",version="1.0")
+    True
+
+    >>> Distribution(project_name="Foo",version="1.0") == \
+    ... Distribution(project_name="Foo",version="1.1")
+    False
+
+    >>> Distribution(project_name="Foo",py_version="2.3",version="1.0") == \
+    ... Distribution(project_name="Foo",py_version="2.4",version="1.0")
+    False
+
+    >>> Distribution(location="spam",version="1.0") == \
+    ... Distribution(location="spam",version="1.0")
+    True
+
+    >>> Distribution(location="spam",version="1.0") == \
+    ... Distribution(location="baz",version="1.0")
+    False
+
+
+
+Hash and compare distribution by prio/plat
+
+Get version from metadata
+provider capabilities
+egg_name()
+as_requirement()
+from_location, from_filename (w/path normalization)
+
+Releases may have zero or more "Requirements", which indicate
+what releases of another project the release requires in order to
+function.  A Requirement names the other project, expresses some criteria
+as to what releases of that project are acceptable, and lists any "Extras"
+that the requiring release may need from that project.  (An Extra is an
+optional feature of a Release, that can only be used if its additional
+Requirements are satisfied.)
+
+
+
+The Working Set
+---------------
+
+A collection of active distributions is called a Working Set.  Note that a
+Working Set can contain any importable distribution, not just pluggable ones.
+For example, the Python standard library is an importable distribution that
+will usually be part of the Working Set, even though it is not pluggable.
+Similarly, when you are doing development work on a project, the files you are
+editing are also a Distribution.  (And, with a little attention to the
+directory names used,  and including some additional metadata, such a
+"development distribution" can be made pluggable as well.)
+
+    >>> from pkg_resources import WorkingSet
+
+A working set's entries are the sys.path entries that correspond to the active
+distributions.  By default, the working set's entries are the items on
+``sys.path``::
+
+    >>> ws = WorkingSet()
+    >>> ws.entries == sys.path
+    True
+
+But you can also create an empty working set explicitly, and add distributions
+to it::
+
+    >>> ws = WorkingSet([])
+    >>> ws.add(dist)
+    >>> ws.entries
+    ['http://example.com/something']
+    >>> dist in ws
+    True
+    >>> Distribution('foo',version="") in ws
+    False
+
+And you can iterate over its distributions::
+
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+Adding the same distribution more than once is a no-op::
+
+    >>> ws.add(dist)
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+For that matter, adding multiple distributions for the same project also does
+nothing, because a working set can only hold one active distribution per
+project -- the first one added to it::
+
+    >>> ws.add(
+    ...     Distribution(
+    ...         'http://example.com/something', project_name="Bar",
+    ...         version="7.2"
+    ...     )
+    ... )
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+You can append a path entry to a working set using ``add_entry()``::
+
+    >>> ws.entries
+    ['http://example.com/something']
+    >>> ws.add_entry(pkg_resources.__file__)
+    >>> ws.entries
+    ['http://example.com/something', '...pkg_resources.py...']
+
+Multiple additions result in multiple entries, even if the entry is already in
+the working set (because ``sys.path`` can contain the same entry more than
+once)::
+
+    >>> ws.add_entry(pkg_resources.__file__)
+    >>> ws.entries
+    ['...example.com...', '...pkg_resources...', '...pkg_resources...']
+
+And you can specify the path entry a distribution was found under, using the
+optional second parameter to ``add()``::
+
+    >>> ws = WorkingSet([])
+    >>> ws.add(dist,"foo")
+    >>> ws.entries
+    ['foo']
+
+But even if a distribution is found under multiple path entries, it still only
+shows up once when iterating the working set:
+
+    >>> ws.add_entry(ws.entries[0])
+    >>> list(ws)
+    [Bar 0.9 (http://example.com/something)]
+
+You can ask a WorkingSet to ``find()`` a distribution matching a requirement::
+
+    >>> from pkg_resources import Requirement
+    >>> print ws.find(Requirement.parse("Foo==1.0"))    # no match, return None
+    None
+
+    >>> ws.find(Requirement.parse("Bar==0.9"))  # match, return distribution
+    Bar 0.9 (http://example.com/something)
+
+Note that asking for a conflicting version of a distribution already in a
+working set triggers a ``pkg_resources.VersionConflict`` error:
+
+    >>> ws.find(Requirement.parse("Bar==1.0")) # doctest: +NORMALIZE_WHITESPACE
+    Traceback (most recent call last):
+      ...
+    VersionConflict: (Bar 0.9 (http://example.com/something),
+                      Requirement.parse('Bar==1.0'))
+
+You can subscribe a callback function to receive notifications whenever a new
+distribution is added to a working set.  The callback is immediately invoked
+once for each existing distribution in the working set, and then is called
+again for new distributions added thereafter::
+
+    >>> def added(dist): print "Added", dist
+    >>> ws.subscribe(added)
+    Added Bar 0.9
+    >>> foo12 = Distribution(project_name="Foo", version="1.2", location="f12") 
+    >>> ws.add(foo12)
+    Added Foo 1.2
+
+Note, however, that only the first distribution added for a given project name
+will trigger a callback, even during the initial ``subscribe()`` callback::
+
+    >>> foo14 = Distribution(project_name="Foo", version="1.4", location="f14") 
+    >>> ws.add(foo14)   # no callback, because Foo 1.2 is already active
+
+    >>> ws = WorkingSet([])
+    >>> ws.add(foo12)
+    >>> ws.add(foo14)
+    >>> ws.subscribe(added)
+    Added Foo 1.2
+    
+And adding a callback more than once has no effect, either::
+
+    >>> ws.subscribe(added)     # no callbacks
+
+    # and no double-callbacks on subsequent additions, either
+    >>> just_a_test = Distribution(project_name="JustATest", version="0.99")
+    >>> ws.add(just_a_test)
+    Added JustATest 0.99
+
+
+Finding Plugins
+---------------
+
+``WorkingSet`` objects can be used to figure out what plugins in an
+``Environment`` can be loaded without any resolution errors::
+
+    >>> from pkg_resources import Environment
+
+    >>> plugins = Environment([])   # normally, a list of plugin directories
+    >>> plugins.add(foo12)
+    >>> plugins.add(foo14)
+    >>> plugins.add(just_a_test)
+    
+In the simplest case, we just get the newest version of each distribution in
+the plugin environment::
+
+    >>> ws = WorkingSet([])
+    >>> ws.find_plugins(plugins)
+    ([JustATest 0.99, Foo 1.4 (f14)], {})
+
+But if there's a problem with a version conflict or missing requirements, the
+method falls back to older versions, and the error info dict will contain an
+exception instance for each unloadable plugin::
+
+    >>> ws.add(foo12)   # this will conflict with Foo 1.4
+    >>> ws.find_plugins(plugins)
+    ([JustATest 0.99, Foo 1.2 (f12)], {Foo 1.4 (f14): VersionConflict(...)})
+
+But if you disallow fallbacks, the failed plugin will be skipped instead of
+trying older versions::
+
+    >>> ws.find_plugins(plugins, fallback=False)
+    ([JustATest 0.99], {Foo 1.4 (f14): VersionConflict(...)})
+
+
+
+Platform Compatibility Rules
+----------------------------
+
+On the Mac, there are potential compatibility issues for modules compiled
+on newer versions of Mac OS X than what the user is running. Additionally,
+Mac OS X will soon have two platforms to contend with: Intel and PowerPC.
+
+Basic equality works as on other platforms::
+
+    >>> from pkg_resources import compatible_platforms as cp
+    >>> reqd = 'macosx-10.4-ppc'
+    >>> cp(reqd, reqd)
+    True
+    >>> cp("win32", reqd)
+    False
+
+Distributions made on other machine types are not compatible::
+
+    >>> cp("macosx-10.4-i386", reqd)
+    False
+
+Distributions made on earlier versions of the OS are compatible, as
+long as they are from the same top-level version. The patchlevel version
+number does not matter::
+
+    >>> cp("macosx-10.4-ppc", reqd)
+    True
+    >>> cp("macosx-10.3-ppc", reqd)
+    True
+    >>> cp("macosx-10.5-ppc", reqd)
+    False
+    >>> cp("macosx-9.5-ppc", reqd)
+    False
+
+Backwards compatibility for packages made via earlier versions of 
+setuptools is provided as well::
+
+    >>> cp("darwin-8.2.0-Power_Macintosh", reqd)
+    True
+    >>> cp("darwin-7.2.0-Power_Macintosh", reqd)
+    True
+    >>> cp("darwin-8.2.0-Power_Macintosh", "macosx-10.3-ppc")
+    False
+
diff --git a/Lib/setuptools/tests/test_resources.py b/Lib/setuptools/tests/test_resources.py
new file mode 100644
index 0000000..f32c72e
--- /dev/null
+++ b/Lib/setuptools/tests/test_resources.py
@@ -0,0 +1,483 @@
+from unittest import TestCase, makeSuite
+from pkg_resources import *
+import pkg_resources, sys
+from sets import ImmutableSet
+
+class Metadata(EmptyProvider):
+    """Mock object to return metadata as if from an on-disk distribution"""
+
+    def __init__(self,*pairs):
+        self.metadata = dict(pairs)
+
+    def has_metadata(self,name):
+        return name in self.metadata
+
+    def get_metadata(self,name):
+        return self.metadata[name]
+
+    def get_metadata_lines(self,name):
+        return yield_lines(self.get_metadata(name))
+
+
+class DistroTests(TestCase):
+
+    def testCollection(self):
+        # empty path should produce no distributions
+        ad = Environment([], platform=None, python=None)
+        self.assertEqual(list(ad), [])
+        self.assertEqual(ad['FooPkg'],[])
+
+        ad.add(Distribution.from_filename("FooPkg-1.3_1.egg"))
+        ad.add(Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg"))
+        ad.add(Distribution.from_filename("FooPkg-1.2-py2.4.egg"))
+
+        # Name is in there now
+        self.failUnless(ad['FooPkg'])
+
+        # But only 1 package
+        self.assertEqual(list(ad), ['foopkg'])
+
+
+
+        # Distributions sort by version
+        self.assertEqual(
+            [dist.version for dist in ad['FooPkg']], ['1.4','1.3-1','1.2']
+        )
+        # Removing a distribution leaves sequence alone
+        ad.remove(ad['FooPkg'][1])
+        self.assertEqual(
+            [dist.version for dist in ad['FooPkg']], ['1.4','1.2']
+        )
+        # And inserting adds them in order
+        ad.add(Distribution.from_filename("FooPkg-1.9.egg"))
+        self.assertEqual(
+            [dist.version for dist in ad['FooPkg']], ['1.9','1.4','1.2']
+        )
+
+        ws = WorkingSet([])
+        foo12 = Distribution.from_filename("FooPkg-1.2-py2.4.egg")
+        foo14 = Distribution.from_filename("FooPkg-1.4-py2.4-win32.egg")
+        req, = parse_requirements("FooPkg>=1.3")
+
+        # Nominal case: no distros on path, should yield all applicable
+        self.assertEqual(ad.best_match(req,ws).version, '1.9')
+        # If a matching distro is already installed, should return only that
+        ws.add(foo14); self.assertEqual(ad.best_match(req,ws).version, '1.4')
+
+        # If the first matching distro is unsuitable, it's a version conflict
+        ws = WorkingSet([]); ws.add(foo12); ws.add(foo14)
+        self.assertRaises(VersionConflict, ad.best_match, req, ws)
+
+        # If more than one match on the path, the first one takes precedence
+        ws = WorkingSet([]); ws.add(foo14); ws.add(foo12); ws.add(foo14);
+        self.assertEqual(ad.best_match(req,ws).version, '1.4')
+
+    def checkFooPkg(self,d):
+        self.assertEqual(d.project_name, "FooPkg")
+        self.assertEqual(d.key, "foopkg")
+        self.assertEqual(d.version, "1.3-1")
+        self.assertEqual(d.py_version, "2.4")
+        self.assertEqual(d.platform, "win32")
+        self.assertEqual(d.parsed_version, parse_version("1.3-1"))
+
+    def testDistroBasics(self):
+        d = Distribution(
+            "/some/path",
+            project_name="FooPkg",version="1.3-1",py_version="2.4",platform="win32"
+        )
+        self.checkFooPkg(d)
+
+        d = Distribution("/some/path")
+        self.assertEqual(d.py_version, sys.version[:3])
+        self.assertEqual(d.platform, None)
+
+    def testDistroParse(self):
+        d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg")
+        self.checkFooPkg(d)
+        d = Distribution.from_filename("FooPkg-1.3_1-py2.4-win32.egg-info")
+        self.checkFooPkg(d)
+
+    def testDistroMetadata(self):
+        d = Distribution(
+            "/some/path", project_name="FooPkg", py_version="2.4", platform="win32",
+            metadata = Metadata(
+                ('PKG-INFO',"Metadata-Version: 1.0\nVersion: 1.3-1\n")
+            )
+        )
+        self.checkFooPkg(d)
+
+
+    def distRequires(self, txt):
+        return Distribution("/foo", metadata=Metadata(('depends.txt', txt)))
+
+    def checkRequires(self, dist, txt, extras=()):
+        self.assertEqual(
+            list(dist.requires(extras)),
+            list(parse_requirements(txt))
+        )
+
+    def testDistroDependsSimple(self):
+        for v in "Twisted>=1.5", "Twisted>=1.5\nZConfig>=2.0":
+            self.checkRequires(self.distRequires(v), v)
+
+
+    def testResolve(self):
+        ad = Environment([]); ws = WorkingSet([])
+        # Resolving no requirements -> nothing to install
+        self.assertEqual( list(ws.resolve([],ad)), [] )
+        # Request something not in the collection -> DistributionNotFound
+        self.assertRaises(
+            DistributionNotFound, ws.resolve, parse_requirements("Foo"), ad
+        )
+        Foo = Distribution.from_filename(
+            "/foo_dir/Foo-1.2.egg",
+            metadata=Metadata(('depends.txt', "[bar]\nBaz>=2.0"))
+        )
+        ad.add(Foo); ad.add(Distribution.from_filename("Foo-0.9.egg"))
+
+        # Request thing(s) that are available -> list to activate
+        for i in range(3):
+            targets = list(ws.resolve(parse_requirements("Foo"), ad))
+            self.assertEqual(targets, [Foo])
+            map(ws.add,targets)
+        self.assertRaises(VersionConflict, ws.resolve,
+            parse_requirements("Foo==0.9"), ad)
+        ws = WorkingSet([]) # reset
+
+        # Request an extra that causes an unresolved dependency for "Baz"
+        self.assertRaises(
+            DistributionNotFound, ws.resolve,parse_requirements("Foo[bar]"), ad
+        )
+        Baz = Distribution.from_filename(
+            "/foo_dir/Baz-2.1.egg", metadata=Metadata(('depends.txt', "Foo"))
+        )
+        ad.add(Baz)
+
+        # Activation list now includes resolved dependency
+        self.assertEqual(
+            list(ws.resolve(parse_requirements("Foo[bar]"), ad)), [Foo,Baz]
+        )
+        # Requests for conflicting versions produce VersionConflict
+        self.assertRaises( VersionConflict,
+            ws.resolve, parse_requirements("Foo==1.2\nFoo!=1.2"), ad
+        )
+
+    def testDistroDependsOptions(self):
+        d = self.distRequires("""
+            Twisted>=1.5
+            [docgen]
+            ZConfig>=2.0
+            docutils>=0.3
+            [fastcgi]
+            fcgiapp>=0.1""")
+        self.checkRequires(d,"Twisted>=1.5")
+        self.checkRequires(
+            d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3".split(), ["docgen"]
+        )
+        self.checkRequires(
+            d,"Twisted>=1.5 fcgiapp>=0.1".split(), ["fastcgi"]
+        )
+        self.checkRequires(
+            d,"Twisted>=1.5 ZConfig>=2.0 docutils>=0.3 fcgiapp>=0.1".split(),
+            ["docgen","fastcgi"]
+        )
+        self.checkRequires(
+            d,"Twisted>=1.5 fcgiapp>=0.1 ZConfig>=2.0 docutils>=0.3".split(),
+            ["fastcgi", "docgen"]
+        )
+        self.assertRaises(UnknownExtra, d.requires, ["foo"])
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class EntryPointTests(TestCase):
+
+    def assertfields(self, ep):
+        self.assertEqual(ep.name,"foo")
+        self.assertEqual(ep.module_name,"setuptools.tests.test_resources")
+        self.assertEqual(ep.attrs, ("EntryPointTests",))
+        self.assertEqual(ep.extras, ("x",))
+        self.failUnless(ep.load() is EntryPointTests)
+        self.assertEqual(
+            str(ep),
+            "foo = setuptools.tests.test_resources:EntryPointTests [x]"
+        )
+
+    def setUp(self):
+        self.dist = Distribution.from_filename(
+            "FooPkg-1.2-py2.4.egg", metadata=Metadata(('requires.txt','[x]')))
+
+    def testBasics(self):
+        ep = EntryPoint(
+            "foo", "setuptools.tests.test_resources", ["EntryPointTests"],
+            ["x"], self.dist
+        )
+        self.assertfields(ep)
+
+    def testParse(self):
+        s = "foo = setuptools.tests.test_resources:EntryPointTests [x]"
+        ep = EntryPoint.parse(s, self.dist)
+        self.assertfields(ep)
+
+        ep = EntryPoint.parse("bar baz=  spammity[PING]")
+        self.assertEqual(ep.name,"bar baz")
+        self.assertEqual(ep.module_name,"spammity")
+        self.assertEqual(ep.attrs, ())
+        self.assertEqual(ep.extras, ("ping",))
+
+        ep = EntryPoint.parse(" fizzly =  wocka:foo")
+        self.assertEqual(ep.name,"fizzly")
+        self.assertEqual(ep.module_name,"wocka")
+        self.assertEqual(ep.attrs, ("foo",))
+        self.assertEqual(ep.extras, ())
+
+    def testRejects(self):
+        for ep in [
+            "foo", "x=1=2", "x=a:b:c", "q=x/na", "fez=pish:tush-z", "x=f[a]>2",
+        ]:
+            try: EntryPoint.parse(ep)
+            except ValueError: pass
+            else: raise AssertionError("Should've been bad", ep)
+
+    def checkSubMap(self, m):
+        self.assertEqual(str(m),
+            "{"
+            "'feature2': EntryPoint.parse("
+                "'feature2 = another.module:SomeClass [extra1,extra2]'), "
+            "'feature1': EntryPoint.parse("
+                "'feature1 = somemodule:somefunction')"
+            "}"
+        )
+
+    submap_str = """
+            # define features for blah blah
+            feature1 = somemodule:somefunction
+            feature2 = another.module:SomeClass [extra1,extra2]
+    """
+
+    def testParseList(self):
+        self.checkSubMap(EntryPoint.parse_group("xyz", self.submap_str))
+        self.assertRaises(ValueError, EntryPoint.parse_group, "x a", "foo=bar")
+        self.assertRaises(ValueError, EntryPoint.parse_group, "x",
+            ["foo=baz", "foo=bar"])
+
+    def testParseMap(self):
+        m = EntryPoint.parse_map({'xyz':self.submap_str})
+        self.checkSubMap(m['xyz'])
+        self.assertEqual(m.keys(),['xyz'])
+        m = EntryPoint.parse_map("[xyz]\n"+self.submap_str)
+        self.checkSubMap(m['xyz'])
+        self.assertEqual(m.keys(),['xyz'])
+        self.assertRaises(ValueError, EntryPoint.parse_map, ["[xyz]", "[xyz]"])
+        self.assertRaises(ValueError, EntryPoint.parse_map, self.submap_str)
+
+
+class RequirementsTests(TestCase):
+
+    def testBasics(self):
+        r = Requirement.parse("Twisted>=1.2")
+        self.assertEqual(str(r),"Twisted>=1.2")
+        self.assertEqual(repr(r),"Requirement.parse('Twisted>=1.2')")
+        self.assertEqual(r, Requirement("Twisted", [('>=','1.2')], ()))
+        self.assertEqual(r, Requirement("twisTed", [('>=','1.2')], ()))
+        self.assertNotEqual(r, Requirement("Twisted", [('>=','2.0')], ()))
+        self.assertNotEqual(r, Requirement("Zope", [('>=','1.2')], ()))
+        self.assertNotEqual(r, Requirement("Zope", [('>=','3.0')], ()))
+        self.assertNotEqual(r, Requirement.parse("Twisted[extras]>=1.2"))
+
+    def testOrdering(self):
+        r1 = Requirement("Twisted", [('==','1.2c1'),('>=','1.2')], ())
+        r2 = Requirement("Twisted", [('>=','1.2'),('==','1.2c1')], ())
+        self.assertEqual(r1,r2)
+        self.assertEqual(str(r1),str(r2))
+        self.assertEqual(str(r2),"Twisted==1.2c1,>=1.2")
+
+    def testBasicContains(self):
+        r = Requirement("Twisted", [('>=','1.2')], ())
+        foo_dist = Distribution.from_filename("FooPkg-1.3_1.egg")
+        twist11  = Distribution.from_filename("Twisted-1.1.egg")
+        twist12  = Distribution.from_filename("Twisted-1.2.egg")
+        self.failUnless(parse_version('1.2') in r)
+        self.failUnless(parse_version('1.1') not in r)
+        self.failUnless('1.2' in r)
+        self.failUnless('1.1' not in r)
+        self.failUnless(foo_dist not in r)
+        self.failUnless(twist11 not in r)
+        self.failUnless(twist12 in r)
+
+    def testAdvancedContains(self):
+        r, = parse_requirements("Foo>=1.2,<=1.3,==1.9,>2.0,!=2.5,<3.0,==4.5")
+        for v in ('1.2','1.2.2','1.3','1.9','2.0.1','2.3','2.6','3.0c1','4.5'):
+            self.failUnless(v in r, (v,r))
+        for v in ('1.2c1','1.3.1','1.5','1.9.1','2.0','2.5','3.0','4.0'):
+            self.failUnless(v not in r, (v,r))
+
+
+    def testOptionsAndHashing(self):
+        r1 = Requirement.parse("Twisted[foo,bar]>=1.2")
+        r2 = Requirement.parse("Twisted[bar,FOO]>=1.2")
+        r3 = Requirement.parse("Twisted[BAR,FOO]>=1.2.0")
+        self.assertEqual(r1,r2)
+        self.assertEqual(r1,r3)
+        self.assertEqual(r1.extras, ("foo","bar"))
+        self.assertEqual(r2.extras, ("bar","foo"))  # extras are normalized
+        self.assertEqual(hash(r1), hash(r2))
+        self.assertEqual(
+            hash(r1), hash(("twisted", ((">=",parse_version("1.2")),),
+                            ImmutableSet(["foo","bar"])))
+        )
+
+    def testVersionEquality(self):
+        r1 = Requirement.parse("setuptools==0.3a2")
+        r2 = Requirement.parse("setuptools!=0.3a4")
+        d = Distribution.from_filename
+
+        self.failIf(d("setuptools-0.3a4.egg") in r1)
+        self.failIf(d("setuptools-0.3a1.egg") in r1)
+        self.failIf(d("setuptools-0.3a4.egg") in r2)
+
+        self.failUnless(d("setuptools-0.3a2.egg") in r1)
+        self.failUnless(d("setuptools-0.3a2.egg") in r2)
+        self.failUnless(d("setuptools-0.3a3.egg") in r2)
+        self.failUnless(d("setuptools-0.3a5.egg") in r2)
+
+
+
+
+
+
+
+
+
+
+
+
+
+
+class ParseTests(TestCase):
+
+    def testEmptyParse(self):
+        self.assertEqual(list(parse_requirements('')), [])
+
+    def testYielding(self):
+        for inp,out in [
+            ([], []), ('x',['x']), ([[]],[]), (' x\n y', ['x','y']),
+            (['x\n\n','y'], ['x','y']),
+        ]:
+            self.assertEqual(list(pkg_resources.yield_lines(inp)),out)
+
+    def testSplitting(self):
+        self.assertEqual(
+            list(
+                pkg_resources.split_sections("""
+                    x
+                    [Y]
+                    z
+
+                    a
+                    [b ]
+                    # foo
+                    c
+                    [ d]
+                    [q]
+                    v
+                    """
+                )
+            ),
+            [(None,["x"]), ("Y",["z","a"]), ("b",["c"]), ("d",[]), ("q",["v"])]
+        )
+        self.assertRaises(ValueError,list,pkg_resources.split_sections("[foo"))
+
+    def testSafeName(self):
+        self.assertEqual(safe_name("adns-python"), "adns-python")
+        self.assertEqual(safe_name("WSGI Utils"),  "WSGI-Utils")
+        self.assertEqual(safe_name("WSGI  Utils"), "WSGI-Utils")
+        self.assertEqual(safe_name("Money$$$Maker"), "Money-Maker")
+        self.assertNotEqual(safe_name("peak.web"), "peak-web")
+
+    def testSafeVersion(self):
+        self.assertEqual(safe_version("1.2-1"), "1.2-1")
+        self.assertEqual(safe_version("1.2 alpha"),  "1.2.alpha")
+        self.assertEqual(safe_version("2.3.4 20050521"), "2.3.4.20050521")
+        self.assertEqual(safe_version("Money$$$Maker"), "Money-Maker")
+        self.assertEqual(safe_version("peak.web"), "peak.web")
+
+    def testSimpleRequirements(self):
+        self.assertEqual(
+            list(parse_requirements('Twis-Ted>=1.2-1')),
+            [Requirement('Twis-Ted',[('>=','1.2-1')], ())]
+        )
+        self.assertEqual(
+            list(parse_requirements('Twisted >=1.2, \ # more\n<2.0')),
+            [Requirement('Twisted',[('>=','1.2'),('<','2.0')], ())]
+        )
+        self.assertEqual(
+            Requirement.parse("FooBar==1.99a3"),
+            Requirement("FooBar", [('==','1.99a3')], ())
+        )
+        self.assertRaises(ValueError,Requirement.parse,">=2.3")
+        self.assertRaises(ValueError,Requirement.parse,"x\\")
+        self.assertRaises(ValueError,Requirement.parse,"x==2 q")
+        self.assertRaises(ValueError,Requirement.parse,"X==1\nY==2")
+        self.assertRaises(ValueError,Requirement.parse,"#")
+
+    def testVersionEquality(self):
+        def c(s1,s2):
+            p1, p2 = parse_version(s1),parse_version(s2)
+            self.assertEqual(p1,p2, (s1,s2,p1,p2))
+
+        c('1.2-rc1', '1.2rc1')
+        c('0.4', '0.4.0')
+        c('0.4.0.0', '0.4.0')
+        c('0.4.0-0', '0.4-0')
+        c('0pl1', '0.0pl1')
+        c('0pre1', '0.0c1')
+        c('0.0.0preview1', '0c1')
+        c('0.0c1', '0-rc1')
+        c('1.2a1', '1.2.a.1'); c('1.2...a', '1.2a')
+
+    def testVersionOrdering(self):
+        def c(s1,s2):
+            p1, p2 = parse_version(s1),parse_version(s2)
+            self.failUnless(p1<p2, (s1,s2,p1,p2))
+
+        c('2.1','2.1.1')
+        c('2a1','2b0')
+        c('2a1','2.1')
+        c('2.3a1', '2.3')
+        c('2.1-1', '2.1-2')
+        c('2.1-1', '2.1.1')
+        c('2.1', '2.1pl4')
+        c('2.1a0-20040501', '2.1')
+        c('1.1', '02.1')
+        c('A56','B27')
+        c('3.2', '3.2.pl0')
+        c('3.2-1', '3.2pl1')
+        c('3.2pl1', '3.2pl1-1')
+        c('0.4', '4.0')
+        c('0.0.4', '0.4.0')
+        c('0pl1', '0.4pl1')
+        c('2.1.0-rc1','2.1.0')
+
+        torture ="""
+        0.80.1-3 0.80.1-2 0.80.1-1 0.79.9999+0.80.0pre4-1
+        0.79.9999+0.80.0pre2-3 0.79.9999+0.80.0pre2-2
+        0.77.2-1 0.77.1-1 0.77.0-1
+        """.split()
+
+        for p,v1 in enumerate(torture):
+            for v2 in torture[p+1:]:
+                c(v2,v1)
diff --git a/Lib/sgmllib.py b/Lib/sgmllib.py
index 08e365b..3e85a91 100644
--- a/Lib/sgmllib.py
+++ b/Lib/sgmllib.py
@@ -269,9 +269,37 @@
             attrname, rest, attrvalue = match.group(1, 2, 3)
             if not rest:
                 attrvalue = attrname
-            elif attrvalue[:1] == '\'' == attrvalue[-1:] or \
-                 attrvalue[:1] == '"' == attrvalue[-1:]:
-                attrvalue = attrvalue[1:-1]
+            else:
+                if (attrvalue[:1] == "'" == attrvalue[-1:] or
+                    attrvalue[:1] == '"' == attrvalue[-1:]):
+                    # strip quotes
+                    attrvalue = attrvalue[1:-1]
+                l = 0
+                new_attrvalue = ''
+                while l < len(attrvalue):
+                    av_match = entityref.match(attrvalue, l)
+                    if (av_match and av_match.group(1) in self.entitydefs and
+                        attrvalue[av_match.end(1)] == ';'):
+                        # only substitute entityrefs ending in ';' since
+                        # otherwise we may break <a href='?p=x&q=y'>
+                        # which is very common
+                        new_attrvalue += self.entitydefs[av_match.group(1)]
+                        l = av_match.end(0)
+                        continue
+                    ch_match = charref.match(attrvalue, l)
+                    if ch_match:
+                        try:
+                            char = chr(int(ch_match.group(1)))
+                            new_attrvalue += char
+                            l = ch_match.end(0)
+                            continue
+                        except ValueError:
+                            # invalid character reference, don't substitute
+                            pass
+                    # all other cases
+                    new_attrvalue += attrvalue[l]
+                    l += 1
+                attrvalue = new_attrvalue
             attrs.append((attrname.lower(), attrvalue))
             k = match.end(0)
         if rawdata[j] == '>':
diff --git a/Lib/site.py b/Lib/site.py
index 5e7ff7b..47eda24 100644
--- a/Lib/site.py
+++ b/Lib/site.py
@@ -69,6 +69,8 @@
 def abs__file__():
     """Set all module' __file__ attribute to an absolute path"""
     for m in sys.modules.values():
+        if hasattr(m, '__loader__'):
+            continue   # don't mess with a PEP 302-supplied __file__
         try:
             m.__file__ = os.path.abspath(m.__file__)
         except AttributeError:
diff --git a/Lib/smtplib.py b/Lib/smtplib.py
index 71d25fd..07916cc 100755
--- a/Lib/smtplib.py
+++ b/Lib/smtplib.py
@@ -255,7 +255,11 @@
                 self.local_hostname = fqdn
             else:
                 # We can't find an fqdn hostname, so use a domain literal
-                addr = socket.gethostbyname(socket.gethostname())
+                addr = '127.0.0.1'
+                try:
+                    addr = socket.gethostbyname(socket.gethostname())
+                except socket.gaierror:
+                    pass
                 self.local_hostname = '[%s]' % addr
 
     def set_debuglevel(self, debuglevel):
diff --git a/Lib/socket.py b/Lib/socket.py
index ee2457f..32a92b4 100644
--- a/Lib/socket.py
+++ b/Lib/socket.py
@@ -121,14 +121,6 @@
     return name
 
 
-#
-# These classes are used by the socket() defined on Windows and BeOS
-# platforms to provide a best-effort implementation of the cleanup
-# semantics needed when sockets can't be dup()ed.
-#
-# These are not actually used on other platforms.
-#
-
 _socketmethods = (
     'bind', 'connect', 'connect_ex', 'fileno', 'listen',
     'getpeername', 'getsockname', 'getsockopt', 'setsockopt',
@@ -183,6 +175,10 @@
         and bufsize arguments are as for the built-in open() function."""
         return _fileobject(self._sock, mode, bufsize)
 
+    family = property(lambda self: self._sock.family, doc="the socket family")
+    type = property(lambda self: self._sock.type, doc="the socket type")
+    proto = property(lambda self: self._sock.proto, doc="the socket protocol")
+
     _s = ("def %s(self, *args): return self._sock.%s(*args)\n\n"
           "%s.__doc__ = _realsocket.%s.__doc__\n")
     for _m in _socketmethods:
diff --git a/Lib/sqlite3/__init__.py b/Lib/sqlite3/__init__.py
new file mode 100644
index 0000000..41ef2b7
--- /dev/null
+++ b/Lib/sqlite3/__init__.py
@@ -0,0 +1,24 @@
+#-*- coding: ISO-8859-1 -*-
+# pysqlite2/__init__.py: the pysqlite2 package.
+#
+# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+#
+# This file is part of pysqlite.
+#
+# This software is provided 'as-is', without any express or implied
+# warranty.  In no event will the authors be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+#    claim that you wrote the original software. If you use this software
+#    in a product, an acknowledgment in the product documentation would be
+#    appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+#    misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+
+from dbapi2 import *
diff --git a/Lib/sqlite3/dbapi2.py b/Lib/sqlite3/dbapi2.py
new file mode 100644
index 0000000..e0c8a84
--- /dev/null
+++ b/Lib/sqlite3/dbapi2.py
@@ -0,0 +1,84 @@
+#-*- coding: ISO-8859-1 -*-
+# pysqlite2/dbapi2.py: the DB-API 2.0 interface
+#
+# Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
+#
+# This file is part of pysqlite.
+#
+# This software is provided 'as-is', without any express or implied
+# warranty.  In no event will the authors be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+#    claim that you wrote the original software. If you use this software
+#    in a product, an acknowledgment in the product documentation would be
+#    appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+#    misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+
+import datetime
+
+paramstyle = "qmark"
+
+threadsafety = 1
+
+apilevel = "2.0"
+
+from _sqlite3 import *
+
+import datetime, time
+
+Date = datetime.date
+
+Time = datetime.time
+
+Timestamp = datetime.datetime
+
+def DateFromTicks(ticks):
+    return apply(Date,time.localtime(ticks)[:3])
+
+def TimeFromTicks(ticks):
+    return apply(Time,time.localtime(ticks)[3:6])
+
+def TimestampFromTicks(ticks):
+    return apply(Timestamp,time.localtime(ticks)[:6])
+
+_major, _minor, _micro = version.split(".")
+version_info = (int(_major), int(_minor), _micro)
+_major, _minor, _micro = sqlite_version.split(".")
+sqlite_version_info = (int(_major), int(_minor), _micro)
+
+Binary = buffer
+
+def adapt_date(val):
+    return val.isoformat()
+
+def adapt_datetime(val):
+    return val.isoformat(" ")
+
+def convert_date(val):
+    return datetime.date(*map(int, val.split("-")))
+
+def convert_timestamp(val):
+    datepart, timepart = val.split(" ")
+    year, month, day = map(int, datepart.split("-"))
+    timepart_full = timepart.split(".")
+    hours, minutes, seconds = map(int, timepart_full[0].split(":"))
+    if len(timepart_full) == 2:
+        microseconds = int(float("0." + timepart_full[1]) * 1000000)
+    else:
+        microseconds = 0
+
+    val = datetime.datetime(year, month, day, hours, minutes, seconds, microseconds)
+    return val
+
+
+register_adapter(datetime.date, adapt_date)
+register_adapter(datetime.datetime, adapt_datetime)
+register_converter("date", convert_date)
+register_converter("timestamp", convert_timestamp)
diff --git a/Lib/sqlite3/test/__init__.py b/Lib/sqlite3/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Lib/sqlite3/test/__init__.py
diff --git a/Lib/sqlite3/test/dbapi.py b/Lib/sqlite3/test/dbapi.py
new file mode 100644
index 0000000..b08da9c
--- /dev/null
+++ b/Lib/sqlite3/test/dbapi.py
@@ -0,0 +1,732 @@
+#-*- coding: ISO-8859-1 -*-
+# pysqlite2/test/dbapi.py: tests for DB-API compliance
+#
+# Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
+#
+# This file is part of pysqlite.
+#
+# This software is provided 'as-is', without any express or implied
+# warranty.  In no event will the authors be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+#    claim that you wrote the original software. If you use this software
+#    in a product, an acknowledgment in the product documentation would be
+#    appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+#    misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+
+import unittest
+import threading
+import sqlite3 as sqlite
+
+class ModuleTests(unittest.TestCase):
+    def CheckAPILevel(self):
+        self.assertEqual(sqlite.apilevel, "2.0",
+                         "apilevel is %s, should be 2.0" % sqlite.apilevel)
+
+    def CheckThreadSafety(self):
+        self.assertEqual(sqlite.threadsafety, 1,
+                         "threadsafety is %d, should be 1" % sqlite.threadsafety)
+
+    def CheckParamStyle(self):
+        self.assertEqual(sqlite.paramstyle, "qmark",
+                         "paramstyle is '%s', should be 'qmark'" %
+                         sqlite.paramstyle)
+
+    def CheckWarning(self):
+        self.assert_(issubclass(sqlite.Warning, StandardError),
+                     "Warning is not a subclass of StandardError")
+
+    def CheckError(self):
+        self.failUnless(issubclass(sqlite.Error, StandardError),
+                        "Error is not a subclass of StandardError")
+
+    def CheckInterfaceError(self):
+        self.failUnless(issubclass(sqlite.InterfaceError, sqlite.Error),
+                        "InterfaceError is not a subclass of Error")
+
+    def CheckDatabaseError(self):
+        self.failUnless(issubclass(sqlite.DatabaseError, sqlite.Error),
+                        "DatabaseError is not a subclass of Error")
+
+    def CheckDataError(self):
+        self.failUnless(issubclass(sqlite.DataError, sqlite.DatabaseError),
+                        "DataError is not a subclass of DatabaseError")
+
+    def CheckOperationalError(self):
+        self.failUnless(issubclass(sqlite.OperationalError, sqlite.DatabaseError),
+                        "OperationalError is not a subclass of DatabaseError")
+
+    def CheckIntegrityError(self):
+        self.failUnless(issubclass(sqlite.IntegrityError, sqlite.DatabaseError),
+                        "IntegrityError is not a subclass of DatabaseError")
+
+    def CheckInternalError(self):
+        self.failUnless(issubclass(sqlite.InternalError, sqlite.DatabaseError),
+                        "InternalError is not a subclass of DatabaseError")
+
+    def CheckProgrammingError(self):
+        self.failUnless(issubclass(sqlite.ProgrammingError, sqlite.DatabaseError),
+                        "ProgrammingError is not a subclass of DatabaseError")
+
+    def CheckNotSupportedError(self):
+        self.failUnless(issubclass(sqlite.NotSupportedError,
+                                   sqlite.DatabaseError),
+                        "NotSupportedError is not a subclass of DatabaseError")
+
+class ConnectionTests(unittest.TestCase):
+    def setUp(self):
+        self.cx = sqlite.connect(":memory:")
+        cu = self.cx.cursor()
+        cu.execute("create table test(id integer primary key, name text)")
+        cu.execute("insert into test(name) values (?)", ("foo",))
+
+    def tearDown(self):
+        self.cx.close()
+
+    def CheckCommit(self):
+        self.cx.commit()
+
+    def CheckCommitAfterNoChanges(self):
+        """
+        A commit should also work when no changes were made to the database.
+        """
+        self.cx.commit()
+        self.cx.commit()
+
+    def CheckRollback(self):
+        self.cx.rollback()
+
+    def CheckRollbackAfterNoChanges(self):
+        """
+        A rollback should also work when no changes were made to the database.
+        """
+        self.cx.rollback()
+        self.cx.rollback()
+
+    def CheckCursor(self):
+        cu = self.cx.cursor()
+
+    def CheckFailedOpen(self):
+        YOU_CANNOT_OPEN_THIS = "/foo/bar/bla/23534/mydb.db"
+        try:
+            con = sqlite.connect(YOU_CANNOT_OPEN_THIS)
+        except sqlite.OperationalError:
+            return
+        self.fail("should have raised an OperationalError")
+
+    def CheckClose(self):
+        self.cx.close()
+
+    def CheckExceptions(self):
+        # Optional DB-API extension.
+        self.failUnlessEqual(self.cx.Warning, sqlite.Warning)
+        self.failUnlessEqual(self.cx.Error, sqlite.Error)
+        self.failUnlessEqual(self.cx.InterfaceError, sqlite.InterfaceError)
+        self.failUnlessEqual(self.cx.DatabaseError, sqlite.DatabaseError)
+        self.failUnlessEqual(self.cx.DataError, sqlite.DataError)
+        self.failUnlessEqual(self.cx.OperationalError, sqlite.OperationalError)
+        self.failUnlessEqual(self.cx.IntegrityError, sqlite.IntegrityError)
+        self.failUnlessEqual(self.cx.InternalError, sqlite.InternalError)
+        self.failUnlessEqual(self.cx.ProgrammingError, sqlite.ProgrammingError)
+        self.failUnlessEqual(self.cx.NotSupportedError, sqlite.NotSupportedError)
+
+class CursorTests(unittest.TestCase):
+    def setUp(self):
+        self.cx = sqlite.connect(":memory:")
+        self.cu = self.cx.cursor()
+        self.cu.execute("create table test(id integer primary key, name text, income number)")
+        self.cu.execute("insert into test(name) values (?)", ("foo",))
+
+    def tearDown(self):
+        self.cu.close()
+        self.cx.close()
+
+    def CheckExecuteNoArgs(self):
+        self.cu.execute("delete from test")
+
+    def CheckExecuteIllegalSql(self):
+        try:
+            self.cu.execute("select asdf")
+            self.fail("should have raised an OperationalError")
+        except sqlite.OperationalError:
+            return
+        except:
+            self.fail("raised wrong exception")
+
+    def CheckExecuteTooMuchSql(self):
+        try:
+            self.cu.execute("select 5+4; select 4+5")
+            self.fail("should have raised a Warning")
+        except sqlite.Warning:
+            return
+        except:
+            self.fail("raised wrong exception")
+
+    def CheckExecuteTooMuchSql2(self):
+        self.cu.execute("select 5+4; -- foo bar")
+
+    def CheckExecuteTooMuchSql3(self):
+        self.cu.execute("""
+            select 5+4;
+
+            /*
+            foo
+            */
+            """)
+
+    def CheckExecuteWrongSqlArg(self):
+        try:
+            self.cu.execute(42)
+            self.fail("should have raised a ValueError")
+        except ValueError:
+            return
+        except:
+            self.fail("raised wrong exception.")
+
+    def CheckExecuteArgInt(self):
+        self.cu.execute("insert into test(id) values (?)", (42,))
+
+    def CheckExecuteArgFloat(self):
+        self.cu.execute("insert into test(income) values (?)", (2500.32,))
+
+    def CheckExecuteArgString(self):
+        self.cu.execute("insert into test(name) values (?)", ("Hugo",))
+
+    def CheckExecuteWrongNoOfArgs1(self):
+        # too many parameters
+        try:
+            self.cu.execute("insert into test(id) values (?)", (17, "Egon"))
+            self.fail("should have raised ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+
+    def CheckExecuteWrongNoOfArgs2(self):
+        # too little parameters
+        try:
+            self.cu.execute("insert into test(id) values (?)")
+            self.fail("should have raised ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+
+    def CheckExecuteWrongNoOfArgs3(self):
+        # no parameters, parameters are needed
+        try:
+            self.cu.execute("insert into test(id) values (?)")
+            self.fail("should have raised ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+
+    def CheckExecuteDictMapping(self):
+        self.cu.execute("insert into test(name) values ('foo')")
+        self.cu.execute("select name from test where name=:name", {"name": "foo"})
+        row = self.cu.fetchone()
+        self.failUnlessEqual(row[0], "foo")
+
+    def CheckExecuteDictMappingTooLittleArgs(self):
+        self.cu.execute("insert into test(name) values ('foo')")
+        try:
+            self.cu.execute("select name from test where name=:name and id=:id", {"name": "foo"})
+            self.fail("should have raised ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+
+    def CheckExecuteDictMappingNoArgs(self):
+        self.cu.execute("insert into test(name) values ('foo')")
+        try:
+            self.cu.execute("select name from test where name=:name")
+            self.fail("should have raised ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+
+    def CheckExecuteDictMappingUnnamed(self):
+        self.cu.execute("insert into test(name) values ('foo')")
+        try:
+            self.cu.execute("select name from test where name=?", {"name": "foo"})
+            self.fail("should have raised ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+
+    def CheckClose(self):
+        self.cu.close()
+
+    def CheckRowcountExecute(self):
+        self.cu.execute("delete from test")
+        self.cu.execute("insert into test(name) values ('foo')")
+        self.cu.execute("insert into test(name) values ('foo')")
+        self.cu.execute("update test set name='bar'")
+        self.failUnlessEqual(self.cu.rowcount, 2)
+
+    def CheckRowcountExecutemany(self):
+        self.cu.execute("delete from test")
+        self.cu.executemany("insert into test(name) values (?)", [(1,), (2,), (3,)])
+        self.failUnlessEqual(self.cu.rowcount, 3)
+
+    def CheckTotalChanges(self):
+        self.cu.execute("insert into test(name) values ('foo')")
+        self.cu.execute("insert into test(name) values ('foo')")
+        if self.cx.total_changes < 2:
+            self.fail("total changes reported wrong value")
+
+    # Checks for executemany:
+    # Sequences are required by the DB-API, iterators
+    # enhancements in pysqlite.
+
+    def CheckExecuteManySequence(self):
+        self.cu.executemany("insert into test(income) values (?)", [(x,) for x in range(100, 110)])
+
+    def CheckExecuteManyIterator(self):
+        class MyIter:
+            def __init__(self):
+                self.value = 5
+
+            def next(self):
+                if self.value == 10:
+                    raise StopIteration
+                else:
+                    self.value += 1
+                    return (self.value,)
+
+        self.cu.executemany("insert into test(income) values (?)", MyIter())
+
+    def CheckExecuteManyGenerator(self):
+        def mygen():
+            for i in range(5):
+                yield (i,)
+
+        self.cu.executemany("insert into test(income) values (?)", mygen())
+
+    def CheckExecuteManyWrongSqlArg(self):
+        try:
+            self.cu.executemany(42, [(3,)])
+            self.fail("should have raised a ValueError")
+        except ValueError:
+            return
+        except:
+            self.fail("raised wrong exception.")
+
+    def CheckExecuteManySelect(self):
+        try:
+            self.cu.executemany("select ?", [(3,)])
+            self.fail("should have raised a ProgrammingError")
+        except sqlite.ProgrammingError:
+            return
+        except:
+            self.fail("raised wrong exception.")
+
+    def CheckExecuteManyNotIterable(self):
+        try:
+            self.cu.executemany("insert into test(income) values (?)", 42)
+            self.fail("should have raised a TypeError")
+        except TypeError:
+            return
+        except Exception, e:
+            print "raised", e.__class__
+            self.fail("raised wrong exception.")
+
+    def CheckFetchIter(self):
+        # Optional DB-API extension.
+        self.cu.execute("delete from test")
+        self.cu.execute("insert into test(id) values (?)", (5,))
+        self.cu.execute("insert into test(id) values (?)", (6,))
+        self.cu.execute("select id from test order by id")
+        lst = []
+        for row in self.cu:
+            lst.append(row[0])
+        self.failUnlessEqual(lst[0], 5)
+        self.failUnlessEqual(lst[1], 6)
+
+    def CheckFetchone(self):
+        self.cu.execute("select name from test")
+        row = self.cu.fetchone()
+        self.failUnlessEqual(row[0], "foo")
+        row = self.cu.fetchone()
+        self.failUnlessEqual(row, None)
+
+    def CheckFetchoneNoStatement(self):
+        cur = self.cx.cursor()
+        row = cur.fetchone()
+        self.failUnlessEqual(row, None)
+
+    def CheckArraySize(self):
+        # must default ot 1
+        self.failUnlessEqual(self.cu.arraysize, 1)
+
+        # now set to 2
+        self.cu.arraysize = 2
+
+        # now make the query return 3 rows
+        self.cu.execute("delete from test")
+        self.cu.execute("insert into test(name) values ('A')")
+        self.cu.execute("insert into test(name) values ('B')")
+        self.cu.execute("insert into test(name) values ('C')")
+        self.cu.execute("select name from test")
+        res = self.cu.fetchmany()
+
+        self.failUnlessEqual(len(res), 2)
+
+    def CheckFetchmany(self):
+        self.cu.execute("select name from test")
+        res = self.cu.fetchmany(100)
+        self.failUnlessEqual(len(res), 1)
+        res = self.cu.fetchmany(100)
+        self.failUnlessEqual(res, [])
+
+    def CheckFetchall(self):
+        self.cu.execute("select name from test")
+        res = self.cu.fetchall()
+        self.failUnlessEqual(len(res), 1)
+        res = self.cu.fetchall()
+        self.failUnlessEqual(res, [])
+
+    def CheckSetinputsizes(self):
+        self.cu.setinputsizes([3, 4, 5])
+
+    def CheckSetoutputsize(self):
+        self.cu.setoutputsize(5, 0)
+
+    def CheckSetoutputsizeNoColumn(self):
+        self.cu.setoutputsize(42)
+
+    def CheckCursorConnection(self):
+        # Optional DB-API extension.
+        self.failUnlessEqual(self.cu.connection, self.cx)
+
+    def CheckWrongCursorCallable(self):
+        try:
+            def f(): pass
+            cur = self.cx.cursor(f)
+            self.fail("should have raised a TypeError")
+        except TypeError:
+            return
+        self.fail("should have raised a ValueError")
+
+    def CheckCursorWrongClass(self):
+        class Foo: pass
+        foo = Foo()
+        try:
+            cur = sqlite.Cursor(foo)
+            self.fail("should have raised a ValueError")
+        except TypeError:
+            pass
+
+class ThreadTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+        self.cur = self.con.cursor()
+        self.cur.execute("create table test(id integer primary key, name text, bin binary, ratio number, ts timestamp)")
+
+    def tearDown(self):
+        self.cur.close()
+        self.con.close()
+
+    def CheckConCursor(self):
+        def run(con, errors):
+            try:
+                cur = con.cursor()
+                errors.append("did not raise ProgrammingError")
+                return
+            except sqlite.ProgrammingError:
+                return
+            except:
+                errors.append("raised wrong exception")
+
+        errors = []
+        t = threading.Thread(target=run, kwargs={"con": self.con, "errors": errors})
+        t.start()
+        t.join()
+        if len(errors) > 0:
+            self.fail("\n".join(errors))
+
+    def CheckConCommit(self):
+        def run(con, errors):
+            try:
+                con.commit()
+                errors.append("did not raise ProgrammingError")
+                return
+            except sqlite.ProgrammingError:
+                return
+            except:
+                errors.append("raised wrong exception")
+
+        errors = []
+        t = threading.Thread(target=run, kwargs={"con": self.con, "errors": errors})
+        t.start()
+        t.join()
+        if len(errors) > 0:
+            self.fail("\n".join(errors))
+
+    def CheckConRollback(self):
+        def run(con, errors):
+            try:
+                con.rollback()
+                errors.append("did not raise ProgrammingError")
+                return
+            except sqlite.ProgrammingError:
+                return
+            except:
+                errors.append("raised wrong exception")
+
+        errors = []
+        t = threading.Thread(target=run, kwargs={"con": self.con, "errors": errors})
+        t.start()
+        t.join()
+        if len(errors) > 0:
+            self.fail("\n".join(errors))
+
+    def CheckConClose(self):
+        def run(con, errors):
+            try:
+                con.close()
+                errors.append("did not raise ProgrammingError")
+                return
+            except sqlite.ProgrammingError:
+                return
+            except:
+                errors.append("raised wrong exception")
+
+        errors = []
+        t = threading.Thread(target=run, kwargs={"con": self.con, "errors": errors})
+        t.start()
+        t.join()
+        if len(errors) > 0:
+            self.fail("\n".join(errors))
+
+    def CheckCurImplicitBegin(self):
+        def run(cur, errors):
+            try:
+                cur.execute("insert into test(name) values ('a')")
+                errors.append("did not raise ProgrammingError")
+                return
+            except sqlite.ProgrammingError:
+                return
+            except:
+                errors.append("raised wrong exception")
+
+        errors = []
+        t = threading.Thread(target=run, kwargs={"cur": self.cur, "errors": errors})
+        t.start()
+        t.join()
+        if len(errors) > 0:
+            self.fail("\n".join(errors))
+
+    def CheckCurClose(self):
+        def run(cur, errors):
+            try:
+                cur.close()
+                errors.append("did not raise ProgrammingError")
+                return
+            except sqlite.ProgrammingError:
+                return
+            except:
+                errors.append("raised wrong exception")
+
+        errors = []
+        t = threading.Thread(target=run, kwargs={"cur": self.cur, "errors": errors})
+        t.start()
+        t.join()
+        if len(errors) > 0:
+            self.fail("\n".join(errors))
+
+    def CheckCurExecute(self):
+        def run(cur, errors):
+            try:
+                cur.execute("select name from test")
+                errors.append("did not raise ProgrammingError")
+                return
+            except sqlite.ProgrammingError:
+                return
+            except:
+                errors.append("raised wrong exception")
+
+        errors = []
+        self.cur.execute("insert into test(name) values ('a')")
+        t = threading.Thread(target=run, kwargs={"cur": self.cur, "errors": errors})
+        t.start()
+        t.join()
+        if len(errors) > 0:
+            self.fail("\n".join(errors))
+
+    def CheckCurIterNext(self):
+        def run(cur, errors):
+            try:
+                row = cur.fetchone()
+                errors.append("did not raise ProgrammingError")
+                return
+            except sqlite.ProgrammingError:
+                return
+            except:
+                errors.append("raised wrong exception")
+
+        errors = []
+        self.cur.execute("insert into test(name) values ('a')")
+        self.cur.execute("select name from test")
+        t = threading.Thread(target=run, kwargs={"cur": self.cur, "errors": errors})
+        t.start()
+        t.join()
+        if len(errors) > 0:
+            self.fail("\n".join(errors))
+
+class ConstructorTests(unittest.TestCase):
+    def CheckDate(self):
+        d = sqlite.Date(2004, 10, 28)
+
+    def CheckTime(self):
+        t = sqlite.Time(12, 39, 35)
+
+    def CheckTimestamp(self):
+        ts = sqlite.Timestamp(2004, 10, 28, 12, 39, 35)
+
+    def CheckDateFromTicks(self):
+        d = sqlite.DateFromTicks(42)
+
+    def CheckTimeFromTicks(self):
+        t = sqlite.TimeFromTicks(42)
+
+    def CheckTimestampFromTicks(self):
+        ts = sqlite.TimestampFromTicks(42)
+
+    def CheckBinary(self):
+        b = sqlite.Binary(chr(0) + "'")
+
+class ExtensionTests(unittest.TestCase):
+    def CheckScriptStringSql(self):
+        con = sqlite.connect(":memory:")
+        cur = con.cursor()
+        cur.executescript("""
+            -- bla bla
+            /* a stupid comment */
+            create table a(i);
+            insert into a(i) values (5);
+            """)
+        cur.execute("select i from a")
+        res = cur.fetchone()[0]
+        self.failUnlessEqual(res, 5)
+
+    def CheckScriptStringUnicode(self):
+        con = sqlite.connect(":memory:")
+        cur = con.cursor()
+        cur.executescript(u"""
+            create table a(i);
+            insert into a(i) values (5);
+            select i from a;
+            delete from a;
+            insert into a(i) values (6);
+            """)
+        cur.execute("select i from a")
+        res = cur.fetchone()[0]
+        self.failUnlessEqual(res, 6)
+
+    def CheckScriptErrorIncomplete(self):
+        con = sqlite.connect(":memory:")
+        cur = con.cursor()
+        raised = False
+        try:
+            cur.executescript("create table test(sadfsadfdsa")
+        except sqlite.ProgrammingError:
+            raised = True
+        self.failUnlessEqual(raised, True, "should have raised an exception")
+
+    def CheckScriptErrorNormal(self):
+        con = sqlite.connect(":memory:")
+        cur = con.cursor()
+        raised = False
+        try:
+            cur.executescript("create table test(sadfsadfdsa); select foo from hurz;")
+        except sqlite.OperationalError:
+            raised = True
+        self.failUnlessEqual(raised, True, "should have raised an exception")
+
+    def CheckConnectionExecute(self):
+        con = sqlite.connect(":memory:")
+        result = con.execute("select 5").fetchone()[0]
+        self.failUnlessEqual(result, 5, "Basic test of Connection.execute")
+
+    def CheckConnectionExecutemany(self):
+        con = sqlite.connect(":memory:")
+        con.execute("create table test(foo)")
+        con.executemany("insert into test(foo) values (?)", [(3,), (4,)])
+        result = con.execute("select foo from test order by foo").fetchall()
+        self.failUnlessEqual(result[0][0], 3, "Basic test of Connection.executemany")
+        self.failUnlessEqual(result[1][0], 4, "Basic test of Connection.executemany")
+
+    def CheckConnectionExecutescript(self):
+        con = sqlite.connect(":memory:")
+        con.executescript("create table test(foo); insert into test(foo) values (5);")
+        result = con.execute("select foo from test").fetchone()[0]
+        self.failUnlessEqual(result, 5, "Basic test of Connection.executescript")
+
+class ClosedTests(unittest.TestCase):
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def CheckClosedConCursor(self):
+        con = sqlite.connect(":memory:")
+        con.close()
+        try:
+            cur = con.cursor()
+            self.fail("Should have raised a ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+        except:
+            self.fail("Should have raised a ProgrammingError")
+
+    def CheckClosedConCommit(self):
+        con = sqlite.connect(":memory:")
+        con.close()
+        try:
+            con.commit()
+            self.fail("Should have raised a ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+        except:
+            self.fail("Should have raised a ProgrammingError")
+
+    def CheckClosedConRollback(self):
+        con = sqlite.connect(":memory:")
+        con.close()
+        try:
+            con.rollback()
+            self.fail("Should have raised a ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+        except:
+            self.fail("Should have raised a ProgrammingError")
+
+    def CheckClosedCurExecute(self):
+        con = sqlite.connect(":memory:")
+        cur = con.cursor()
+        con.close()
+        try:
+            cur.execute("select 4")
+            self.fail("Should have raised a ProgrammingError")
+        except sqlite.ProgrammingError:
+            pass
+        except:
+            self.fail("Should have raised a ProgrammingError")
+
+def suite():
+    module_suite = unittest.makeSuite(ModuleTests, "Check")
+    connection_suite = unittest.makeSuite(ConnectionTests, "Check")
+    cursor_suite = unittest.makeSuite(CursorTests, "Check")
+    thread_suite = unittest.makeSuite(ThreadTests, "Check")
+    constructor_suite = unittest.makeSuite(ConstructorTests, "Check")
+    ext_suite = unittest.makeSuite(ExtensionTests, "Check")
+    closed_suite = unittest.makeSuite(ClosedTests, "Check")
+    return unittest.TestSuite((module_suite, connection_suite, cursor_suite, thread_suite, constructor_suite, ext_suite, closed_suite))
+
+def test():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    test()
diff --git a/Lib/sqlite3/test/factory.py b/Lib/sqlite3/test/factory.py
new file mode 100644
index 0000000..8778056
--- /dev/null
+++ b/Lib/sqlite3/test/factory.py
@@ -0,0 +1,164 @@
+#-*- coding: ISO-8859-1 -*-
+# pysqlite2/test/factory.py: tests for the various factories in pysqlite
+#
+# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+#
+# This file is part of pysqlite.
+#
+# This software is provided 'as-is', without any express or implied
+# warranty.  In no event will the authors be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+#    claim that you wrote the original software. If you use this software
+#    in a product, an acknowledgment in the product documentation would be
+#    appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+#    misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+
+import unittest
+import sqlite3 as sqlite
+
+class MyConnection(sqlite.Connection):
+    def __init__(self, *args, **kwargs):
+        sqlite.Connection.__init__(self, *args, **kwargs)
+
+def dict_factory(cursor, row):
+    d = {}
+    for idx, col in enumerate(cursor.description):
+        d[col[0]] = row[idx]
+    return d
+
+class MyCursor(sqlite.Cursor):
+    def __init__(self, *args, **kwargs):
+        sqlite.Cursor.__init__(self, *args, **kwargs)
+        self.row_factory = dict_factory
+
+class ConnectionFactoryTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:", factory=MyConnection)
+
+    def tearDown(self):
+        self.con.close()
+
+    def CheckIsInstance(self):
+        self.failUnless(isinstance(self.con,
+                                   MyConnection),
+                        "connection is not instance of MyConnection")
+
+class CursorFactoryTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+
+    def tearDown(self):
+        self.con.close()
+
+    def CheckIsInstance(self):
+        cur = self.con.cursor(factory=MyCursor)
+        self.failUnless(isinstance(cur,
+                                   MyCursor),
+                        "cursor is not instance of MyCursor")
+
+class RowFactoryTestsBackwardsCompat(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+
+    def CheckIsProducedByFactory(self):
+        cur = self.con.cursor(factory=MyCursor)
+        cur.execute("select 4+5 as foo")
+        row = cur.fetchone()
+        self.failUnless(isinstance(row,
+                                   dict),
+                        "row is not instance of dict")
+        cur.close()
+
+    def tearDown(self):
+        self.con.close()
+
+class RowFactoryTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+
+    def CheckCustomFactory(self):
+        self.con.row_factory = lambda cur, row: list(row)
+        row = self.con.execute("select 1, 2").fetchone()
+        self.failUnless(isinstance(row,
+                                   list),
+                        "row is not instance of list")
+
+    def CheckSqliteRow(self):
+        self.con.row_factory = sqlite.Row
+        row = self.con.execute("select 1 as a, 2 as b").fetchone()
+        self.failUnless(isinstance(row,
+                                   sqlite.Row),
+                        "row is not instance of sqlite.Row")
+
+        col1, col2 = row["a"], row["b"]
+        self.failUnless(col1 == 1, "by name: wrong result for column 'a'")
+        self.failUnless(col2 == 2, "by name: wrong result for column 'a'")
+
+        col1, col2 = row["A"], row["B"]
+        self.failUnless(col1 == 1, "by name: wrong result for column 'A'")
+        self.failUnless(col2 == 2, "by name: wrong result for column 'B'")
+
+        col1, col2 = row[0], row[1]
+        self.failUnless(col1 == 1, "by index: wrong result for column 0")
+        self.failUnless(col2 == 2, "by index: wrong result for column 1")
+
+    def tearDown(self):
+        self.con.close()
+
+class TextFactoryTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+
+    def CheckUnicode(self):
+        austria = unicode("Österreich", "latin1")
+        row = self.con.execute("select ?", (austria,)).fetchone()
+        self.failUnless(type(row[0]) == unicode, "type of row[0] must be unicode")
+
+    def CheckString(self):
+        self.con.text_factory = str
+        austria = unicode("Österreich", "latin1")
+        row = self.con.execute("select ?", (austria,)).fetchone()
+        self.failUnless(type(row[0]) == str, "type of row[0] must be str")
+        self.failUnless(row[0] == austria.encode("utf-8"), "column must equal original data in UTF-8")
+
+    def CheckCustom(self):
+        self.con.text_factory = lambda x: unicode(x, "utf-8", "ignore")
+        austria = unicode("Österreich", "latin1")
+        row = self.con.execute("select ?", (austria.encode("latin1"),)).fetchone()
+        self.failUnless(type(row[0]) == unicode, "type of row[0] must be unicode")
+        self.failUnless(row[0].endswith(u"reich"), "column must contain original data")
+
+    def CheckOptimizedUnicode(self):
+        self.con.text_factory = sqlite.OptimizedUnicode
+        austria = unicode("Österreich", "latin1")
+        germany = unicode("Deutchland")
+        a_row = self.con.execute("select ?", (austria,)).fetchone()
+        d_row = self.con.execute("select ?", (germany,)).fetchone()
+        self.failUnless(type(a_row[0]) == unicode, "type of non-ASCII row must be unicode")
+        self.failUnless(type(d_row[0]) == str, "type of ASCII-only row must be str")
+
+    def tearDown(self):
+        self.con.close()
+
+def suite():
+    connection_suite = unittest.makeSuite(ConnectionFactoryTests, "Check")
+    cursor_suite = unittest.makeSuite(CursorFactoryTests, "Check")
+    row_suite_compat = unittest.makeSuite(RowFactoryTestsBackwardsCompat, "Check")
+    row_suite = unittest.makeSuite(RowFactoryTests, "Check")
+    text_suite = unittest.makeSuite(TextFactoryTests, "Check")
+    return unittest.TestSuite((connection_suite, cursor_suite, row_suite_compat, row_suite, text_suite))
+
+def test():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    test()
diff --git a/Lib/sqlite3/test/hooks.py b/Lib/sqlite3/test/hooks.py
new file mode 100644
index 0000000..21f7b88
--- /dev/null
+++ b/Lib/sqlite3/test/hooks.py
@@ -0,0 +1,115 @@
+#-*- coding: ISO-8859-1 -*-
+# pysqlite2/test/hooks.py: tests for various SQLite-specific hooks
+#
+# Copyright (C) 2006 Gerhard Häring <gh@ghaering.de>
+#
+# This file is part of pysqlite.
+#
+# This software is provided 'as-is', without any express or implied
+# warranty.  In no event will the authors be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+#    claim that you wrote the original software. If you use this software
+#    in a product, an acknowledgment in the product documentation would be
+#    appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+#    misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+
+import os, unittest
+import pysqlite2.dbapi2 as sqlite
+
+class CollationTests(unittest.TestCase):
+    def setUp(self):
+        pass
+
+    def tearDown(self):
+        pass
+
+    def CheckCreateCollationNotCallable(self):
+        con = sqlite.connect(":memory:")
+        try:
+            con.create_collation("X", 42)
+            self.fail("should have raised a TypeError")
+        except TypeError, e:
+            self.failUnlessEqual(e.args[0], "parameter must be callable")
+
+    def CheckCreateCollationNotAscii(self):
+        con = sqlite.connect(":memory:")
+        try:
+            con.create_collation("collä", cmp)
+            self.fail("should have raised a ProgrammingError")
+        except sqlite.ProgrammingError, e:
+            pass
+
+    def CheckCollationIsUsed(self):
+        def mycoll(x, y):
+            # reverse order
+            return -cmp(x, y)
+
+        con = sqlite.connect(":memory:")
+        con.create_collation("mycoll", mycoll)
+        sql = """
+            select x from (
+            select 'a' as x
+            union
+            select 'b' as x
+            union
+            select 'c' as x
+            ) order by x collate mycoll
+            """
+        result = con.execute(sql).fetchall()
+        if result[0][0] != "c" or result[1][0] != "b" or result[2][0] != "a":
+            self.fail("the expected order was not returned")
+
+        con.create_collation("mycoll", None)
+        try:
+            result = con.execute(sql).fetchall()
+            self.fail("should have raised an OperationalError")
+        except sqlite.OperationalError, e:
+            self.failUnlessEqual(e.args[0], "no such collation sequence: mycoll")
+
+    def CheckCollationRegisterTwice(self):
+        """
+        Register two different collation functions under the same name.
+        Verify that the last one is actually used.
+        """
+        con = sqlite.connect(":memory:")
+        con.create_collation("mycoll", cmp)
+        con.create_collation("mycoll", lambda x, y: -cmp(x, y))
+        result = con.execute("""
+            select x from (select 'a' as x union select 'b' as x) order by x collate mycoll
+            """).fetchall()
+        if result[0][0] != 'b' or result[1][0] != 'a':
+            self.fail("wrong collation function is used")
+
+    def CheckDeregisterCollation(self):
+        """
+        Register a collation, then deregister it. Make sure an error is raised if we try
+        to use it.
+        """
+        con = sqlite.connect(":memory:")
+        con.create_collation("mycoll", cmp)
+        con.create_collation("mycoll", None)
+        try:
+            con.execute("select 'a' as x union select 'b' as x order by x collate mycoll")
+            self.fail("should have raised an OperationalError")
+        except sqlite.OperationalError, e:
+            if not e.args[0].startswith("no such collation sequence"):
+                self.fail("wrong OperationalError raised")
+
+def suite():
+    collation_suite = unittest.makeSuite(CollationTests, "Check")
+    return unittest.TestSuite((collation_suite,))
+
+def test():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    test()
diff --git a/Lib/sqlite3/test/regression.py b/Lib/sqlite3/test/regression.py
new file mode 100644
index 0000000..648ada5
--- /dev/null
+++ b/Lib/sqlite3/test/regression.py
@@ -0,0 +1,48 @@
+#-*- coding: ISO-8859-1 -*-
+# pysqlite2/test/regression.py: pysqlite regression tests
+#
+# Copyright (C) 2006 Gerhard Häring <gh@ghaering.de>
+#
+# This file is part of pysqlite.
+#
+# This software is provided 'as-is', without any express or implied
+# warranty.  In no event will the authors be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+#    claim that you wrote the original software. If you use this software
+#    in a product, an acknowledgment in the product documentation would be
+#    appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+#    misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+
+import unittest
+import pysqlite2.dbapi2 as sqlite
+
+class RegressionTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+
+    def tearDown(self):
+        self.con.close()
+
+    def CheckPragmaUserVersion(self):
+        # This used to crash pysqlite because this pragma command returns NULL for the column name
+        cur = self.con.cursor()
+        cur.execute("pragma user_version")
+
+def suite():
+    regression_suite = unittest.makeSuite(RegressionTests, "Check")
+    return unittest.TestSuite((regression_suite,))
+
+def test():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    test()
diff --git a/Lib/sqlite3/test/transactions.py b/Lib/sqlite3/test/transactions.py
new file mode 100644
index 0000000..1f0b19a
--- /dev/null
+++ b/Lib/sqlite3/test/transactions.py
@@ -0,0 +1,156 @@
+#-*- coding: ISO-8859-1 -*-
+# pysqlite2/test/transactions.py: tests transactions
+#
+# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+#
+# This file is part of pysqlite.
+#
+# This software is provided 'as-is', without any express or implied
+# warranty.  In no event will the authors be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+#    claim that you wrote the original software. If you use this software
+#    in a product, an acknowledgment in the product documentation would be
+#    appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+#    misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+
+import os, unittest
+import sqlite3 as sqlite
+
+def get_db_path():
+    return "sqlite_testdb"
+
+class TransactionTests(unittest.TestCase):
+    def setUp(self):
+        try:
+            os.remove(get_db_path())
+        except:
+            pass
+
+        self.con1 = sqlite.connect(get_db_path(), timeout=0.1)
+        self.cur1 = self.con1.cursor()
+
+        self.con2 = sqlite.connect(get_db_path(), timeout=0.1)
+        self.cur2 = self.con2.cursor()
+
+    def tearDown(self):
+        self.cur1.close()
+        self.con1.close()
+
+        self.cur2.close()
+        self.con2.close()
+
+        os.unlink(get_db_path())
+
+    def CheckDMLdoesAutoCommitBefore(self):
+        self.cur1.execute("create table test(i)")
+        self.cur1.execute("insert into test(i) values (5)")
+        self.cur1.execute("create table test2(j)")
+        self.cur2.execute("select i from test")
+        res = self.cur2.fetchall()
+        self.failUnlessEqual(len(res), 1)
+
+    def CheckInsertStartsTransaction(self):
+        self.cur1.execute("create table test(i)")
+        self.cur1.execute("insert into test(i) values (5)")
+        self.cur2.execute("select i from test")
+        res = self.cur2.fetchall()
+        self.failUnlessEqual(len(res), 0)
+
+    def CheckUpdateStartsTransaction(self):
+        self.cur1.execute("create table test(i)")
+        self.cur1.execute("insert into test(i) values (5)")
+        self.con1.commit()
+        self.cur1.execute("update test set i=6")
+        self.cur2.execute("select i from test")
+        res = self.cur2.fetchone()[0]
+        self.failUnlessEqual(res, 5)
+
+    def CheckDeleteStartsTransaction(self):
+        self.cur1.execute("create table test(i)")
+        self.cur1.execute("insert into test(i) values (5)")
+        self.con1.commit()
+        self.cur1.execute("delete from test")
+        self.cur2.execute("select i from test")
+        res = self.cur2.fetchall()
+        self.failUnlessEqual(len(res), 1)
+
+    def CheckReplaceStartsTransaction(self):
+        self.cur1.execute("create table test(i)")
+        self.cur1.execute("insert into test(i) values (5)")
+        self.con1.commit()
+        self.cur1.execute("replace into test(i) values (6)")
+        self.cur2.execute("select i from test")
+        res = self.cur2.fetchall()
+        self.failUnlessEqual(len(res), 1)
+        self.failUnlessEqual(res[0][0], 5)
+
+    def CheckToggleAutoCommit(self):
+        self.cur1.execute("create table test(i)")
+        self.cur1.execute("insert into test(i) values (5)")
+        self.con1.isolation_level = None
+        self.failUnlessEqual(self.con1.isolation_level, None)
+        self.cur2.execute("select i from test")
+        res = self.cur2.fetchall()
+        self.failUnlessEqual(len(res), 1)
+
+        self.con1.isolation_level = "DEFERRED"
+        self.failUnlessEqual(self.con1.isolation_level , "DEFERRED")
+        self.cur1.execute("insert into test(i) values (5)")
+        self.cur2.execute("select i from test")
+        res = self.cur2.fetchall()
+        self.failUnlessEqual(len(res), 1)
+
+    def CheckRaiseTimeout(self):
+        self.cur1.execute("create table test(i)")
+        self.cur1.execute("insert into test(i) values (5)")
+        try:
+            self.cur2.execute("insert into test(i) values (5)")
+            self.fail("should have raised an OperationalError")
+        except sqlite.OperationalError:
+            pass
+        except:
+            self.fail("should have raised an OperationalError")
+
+class SpecialCommandTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+        self.cur = self.con.cursor()
+
+    def CheckVacuum(self):
+        self.cur.execute("create table test(i)")
+        self.cur.execute("insert into test(i) values (5)")
+        self.cur.execute("vacuum")
+
+    def CheckDropTable(self):
+        self.cur.execute("create table test(i)")
+        self.cur.execute("insert into test(i) values (5)")
+        self.cur.execute("drop table test")
+
+    def CheckPragma(self):
+        self.cur.execute("create table test(i)")
+        self.cur.execute("insert into test(i) values (5)")
+        self.cur.execute("pragma count_changes=1")
+
+    def tearDown(self):
+        self.cur.close()
+        self.con.close()
+
+def suite():
+    default_suite = unittest.makeSuite(TransactionTests, "Check")
+    special_command_suite = unittest.makeSuite(SpecialCommandTests, "Check")
+    return unittest.TestSuite((default_suite, special_command_suite))
+
+def test():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    test()
diff --git a/Lib/sqlite3/test/types.py b/Lib/sqlite3/test/types.py
new file mode 100644
index 0000000..e49f7dd
--- /dev/null
+++ b/Lib/sqlite3/test/types.py
@@ -0,0 +1,339 @@
+#-*- coding: ISO-8859-1 -*-
+# pysqlite2/test/types.py: tests for type conversion and detection
+#
+# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+#
+# This file is part of pysqlite.
+#
+# This software is provided 'as-is', without any express or implied
+# warranty.  In no event will the authors be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+#    claim that you wrote the original software. If you use this software
+#    in a product, an acknowledgment in the product documentation would be
+#    appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+#    misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+
+import datetime
+import unittest
+import sqlite3 as sqlite
+
+class SqliteTypeTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+        self.cur = self.con.cursor()
+        self.cur.execute("create table test(i integer, s varchar, f number, b blob)")
+
+    def tearDown(self):
+        self.cur.close()
+        self.con.close()
+
+    def CheckString(self):
+        self.cur.execute("insert into test(s) values (?)", (u"Österreich",))
+        self.cur.execute("select s from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], u"Österreich")
+
+    def CheckSmallInt(self):
+        self.cur.execute("insert into test(i) values (?)", (42,))
+        self.cur.execute("select i from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], 42)
+
+    def CheckLargeInt(self):
+        num = 2**40
+        self.cur.execute("insert into test(i) values (?)", (num,))
+        self.cur.execute("select i from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], num)
+
+    def CheckFloat(self):
+        val = 3.14
+        self.cur.execute("insert into test(f) values (?)", (val,))
+        self.cur.execute("select f from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], val)
+
+    def CheckBlob(self):
+        val = buffer("Guglhupf")
+        self.cur.execute("insert into test(b) values (?)", (val,))
+        self.cur.execute("select b from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], val)
+
+    def CheckUnicodeExecute(self):
+        self.cur.execute(u"select 'Österreich'")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], u"Österreich")
+
+class DeclTypesTests(unittest.TestCase):
+    class Foo:
+        def __init__(self, _val):
+            self.val = _val
+
+        def __cmp__(self, other):
+            if not isinstance(other, DeclTypesTests.Foo):
+                raise ValueError
+            if self.val == other.val:
+                return 0
+            else:
+                return 1
+
+        def __conform__(self, protocol):
+            if protocol is sqlite.PrepareProtocol:
+                return self.val
+            else:
+                return None
+
+        def __str__(self):
+            return "<%s>" % self.val
+
+    def setUp(self):
+        self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES)
+        self.cur = self.con.cursor()
+        self.cur.execute("create table test(i int, s str, f float, b bool, u unicode, foo foo, bin blob)")
+
+        # override float, make them always return the same number
+        sqlite.converters["float"] = lambda x: 47.2
+
+        # and implement two custom ones
+        sqlite.converters["bool"] = lambda x: bool(int(x))
+        sqlite.converters["foo"] = DeclTypesTests.Foo
+
+    def tearDown(self):
+        del sqlite.converters["float"]
+        del sqlite.converters["bool"]
+        del sqlite.converters["foo"]
+        self.cur.close()
+        self.con.close()
+
+    def CheckString(self):
+        # default
+        self.cur.execute("insert into test(s) values (?)", ("foo",))
+        self.cur.execute("select s from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], "foo")
+
+    def CheckSmallInt(self):
+        # default
+        self.cur.execute("insert into test(i) values (?)", (42,))
+        self.cur.execute("select i from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], 42)
+
+    def CheckLargeInt(self):
+        # default
+        num = 2**40
+        self.cur.execute("insert into test(i) values (?)", (num,))
+        self.cur.execute("select i from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], num)
+
+    def CheckFloat(self):
+        # custom
+        val = 3.14
+        self.cur.execute("insert into test(f) values (?)", (val,))
+        self.cur.execute("select f from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], 47.2)
+
+    def CheckBool(self):
+        # custom
+        self.cur.execute("insert into test(b) values (?)", (False,))
+        self.cur.execute("select b from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], False)
+
+        self.cur.execute("delete from test")
+        self.cur.execute("insert into test(b) values (?)", (True,))
+        self.cur.execute("select b from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], True)
+
+    def CheckUnicode(self):
+        # default
+        val = u"\xd6sterreich"
+        self.cur.execute("insert into test(u) values (?)", (val,))
+        self.cur.execute("select u from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], val)
+
+    def CheckFoo(self):
+        val = DeclTypesTests.Foo("bla")
+        self.cur.execute("insert into test(foo) values (?)", (val,))
+        self.cur.execute("select foo from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], val)
+
+    def CheckUnsupportedSeq(self):
+        class Bar: pass
+        val = Bar()
+        try:
+            self.cur.execute("insert into test(f) values (?)", (val,))
+            self.fail("should have raised an InterfaceError")
+        except sqlite.InterfaceError:
+            pass
+        except:
+            self.fail("should have raised an InterfaceError")
+
+    def CheckUnsupportedDict(self):
+        class Bar: pass
+        val = Bar()
+        try:
+            self.cur.execute("insert into test(f) values (:val)", {"val": val})
+            self.fail("should have raised an InterfaceError")
+        except sqlite.InterfaceError:
+            pass
+        except:
+            self.fail("should have raised an InterfaceError")
+
+    def CheckBlob(self):
+        # default
+        val = buffer("Guglhupf")
+        self.cur.execute("insert into test(bin) values (?)", (val,))
+        self.cur.execute("select bin from test")
+        row = self.cur.fetchone()
+        self.failUnlessEqual(row[0], val)
+
+class ColNamesTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_COLNAMES|sqlite.PARSE_DECLTYPES)
+        self.cur = self.con.cursor()
+        self.cur.execute("create table test(x foo)")
+
+        sqlite.converters["foo"] = lambda x: "[%s]" % x
+        sqlite.converters["bar"] = lambda x: "<%s>" % x
+        sqlite.converters["exc"] = lambda x: 5/0
+
+    def tearDown(self):
+        del sqlite.converters["foo"]
+        del sqlite.converters["bar"]
+        del sqlite.converters["exc"]
+        self.cur.close()
+        self.con.close()
+
+    def CheckDeclType(self):
+        self.cur.execute("insert into test(x) values (?)", ("xxx",))
+        self.cur.execute("select x from test")
+        val = self.cur.fetchone()[0]
+        self.failUnlessEqual(val, "[xxx]")
+
+    def CheckNone(self):
+        self.cur.execute("insert into test(x) values (?)", (None,))
+        self.cur.execute("select x from test")
+        val = self.cur.fetchone()[0]
+        self.failUnlessEqual(val, None)
+
+    def CheckExc(self):
+        # Exceptions in type converters result in returned Nones
+        self.cur.execute('select 5 as "x [exc]"')
+        val = self.cur.fetchone()[0]
+        self.failUnlessEqual(val, None)
+
+    def CheckColName(self):
+        self.cur.execute("insert into test(x) values (?)", ("xxx",))
+        self.cur.execute('select x as "x [bar]" from test')
+        val = self.cur.fetchone()[0]
+        self.failUnlessEqual(val, "<xxx>")
+
+        # Check if the stripping of colnames works. Everything after the first
+        # whitespace should be stripped.
+        self.failUnlessEqual(self.cur.description[0][0], "x")
+
+    def CheckCursorDescriptionNoRow(self):
+        """
+        cursor.description should at least provide the column name(s), even if
+        no row returned.
+        """
+        self.cur.execute("select * from test where 0 = 1")
+        self.assert_(self.cur.description[0][0] == "x")
+
+class ObjectAdaptationTests(unittest.TestCase):
+    def cast(obj):
+        return float(obj)
+    cast = staticmethod(cast)
+
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+        try:
+            del sqlite.adapters[int]
+        except:
+            pass
+        sqlite.register_adapter(int, ObjectAdaptationTests.cast)
+        self.cur = self.con.cursor()
+
+    def tearDown(self):
+        del sqlite.adapters[(int, sqlite.PrepareProtocol)]
+        self.cur.close()
+        self.con.close()
+
+    def CheckCasterIsUsed(self):
+        self.cur.execute("select ?", (4,))
+        val = self.cur.fetchone()[0]
+        self.failUnlessEqual(type(val), float)
+
+class DateTimeTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:", detect_types=sqlite.PARSE_DECLTYPES)
+        self.cur = self.con.cursor()
+        self.cur.execute("create table test(d date, ts timestamp)")
+
+    def tearDown(self):
+        self.cur.close()
+        self.con.close()
+
+    def CheckSqliteDate(self):
+        d = sqlite.Date(2004, 2, 14)
+        self.cur.execute("insert into test(d) values (?)", (d,))
+        self.cur.execute("select d from test")
+        d2 = self.cur.fetchone()[0]
+        self.failUnlessEqual(d, d2)
+
+    def CheckSqliteTimestamp(self):
+        ts = sqlite.Timestamp(2004, 2, 14, 7, 15, 0)
+        self.cur.execute("insert into test(ts) values (?)", (ts,))
+        self.cur.execute("select ts from test")
+        ts2 = self.cur.fetchone()[0]
+        self.failUnlessEqual(ts, ts2)
+
+    def CheckSqlTimestamp(self):
+        # The date functions are only available in SQLite version 3.1 or later
+        if sqlite.sqlite_version_info < (3, 1):
+            return
+
+        # SQLite's current_timestamp uses UTC time, while datetime.datetime.now() uses local time.
+        now = datetime.datetime.now()
+        self.cur.execute("insert into test(ts) values (current_timestamp)")
+        self.cur.execute("select ts from test")
+        ts = self.cur.fetchone()[0]
+        self.failUnlessEqual(type(ts), datetime.datetime)
+        self.failUnlessEqual(ts.year, now.year)
+
+    def CheckDateTimeSubSeconds(self):
+        ts = sqlite.Timestamp(2004, 2, 14, 7, 15, 0, 500000)
+        self.cur.execute("insert into test(ts) values (?)", (ts,))
+        self.cur.execute("select ts from test")
+        ts2 = self.cur.fetchone()[0]
+        self.failUnlessEqual(ts, ts2)
+
+def suite():
+    sqlite_type_suite = unittest.makeSuite(SqliteTypeTests, "Check")
+    decltypes_type_suite = unittest.makeSuite(DeclTypesTests, "Check")
+    colnames_type_suite = unittest.makeSuite(ColNamesTests, "Check")
+    adaptation_suite = unittest.makeSuite(ObjectAdaptationTests, "Check")
+    date_suite = unittest.makeSuite(DateTimeTests, "Check")
+    return unittest.TestSuite((sqlite_type_suite, decltypes_type_suite, colnames_type_suite, adaptation_suite, date_suite))
+
+def test():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    test()
diff --git a/Lib/sqlite3/test/userfunctions.py b/Lib/sqlite3/test/userfunctions.py
new file mode 100644
index 0000000..ff7db9c
--- /dev/null
+++ b/Lib/sqlite3/test/userfunctions.py
@@ -0,0 +1,330 @@
+#-*- coding: ISO-8859-1 -*-
+# pysqlite2/test/userfunctions.py: tests for user-defined functions and
+#                                  aggregates.
+#
+# Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+#
+# This file is part of pysqlite.
+#
+# This software is provided 'as-is', without any express or implied
+# warranty.  In no event will the authors be held liable for any damages
+# arising from the use of this software.
+#
+# Permission is granted to anyone to use this software for any purpose,
+# including commercial applications, and to alter it and redistribute it
+# freely, subject to the following restrictions:
+#
+# 1. The origin of this software must not be misrepresented; you must not
+#    claim that you wrote the original software. If you use this software
+#    in a product, an acknowledgment in the product documentation would be
+#    appreciated but is not required.
+# 2. Altered source versions must be plainly marked as such, and must not be
+#    misrepresented as being the original software.
+# 3. This notice may not be removed or altered from any source distribution.
+
+import unittest
+import sqlite3 as sqlite
+
+def func_returntext():
+    return "foo"
+def func_returnunicode():
+    return u"bar"
+def func_returnint():
+    return 42
+def func_returnfloat():
+    return 3.14
+def func_returnnull():
+    return None
+def func_returnblob():
+    return buffer("blob")
+def func_raiseexception():
+    5/0
+
+def func_isstring(v):
+    return type(v) is unicode
+def func_isint(v):
+    return type(v) is int
+def func_isfloat(v):
+    return type(v) is float
+def func_isnone(v):
+    return type(v) is type(None)
+def func_isblob(v):
+    return type(v) is buffer
+
+class AggrNoStep:
+    def __init__(self):
+        pass
+
+class AggrNoFinalize:
+    def __init__(self):
+        pass
+
+    def step(self, x):
+        pass
+
+class AggrExceptionInInit:
+    def __init__(self):
+        5/0
+
+    def step(self, x):
+        pass
+
+    def finalize(self):
+        pass
+
+class AggrExceptionInStep:
+    def __init__(self):
+        pass
+
+    def step(self, x):
+        5/0
+
+    def finalize(self):
+        return 42
+
+class AggrExceptionInFinalize:
+    def __init__(self):
+        pass
+
+    def step(self, x):
+        pass
+
+    def finalize(self):
+        5/0
+
+class AggrCheckType:
+    def __init__(self):
+        self.val = None
+
+    def step(self, whichType, val):
+        theType = {"str": unicode, "int": int, "float": float, "None": type(None), "blob": buffer}
+        self.val = int(theType[whichType] is type(val))
+
+    def finalize(self):
+        return self.val
+
+class AggrSum:
+    def __init__(self):
+        self.val = 0.0
+
+    def step(self, val):
+        self.val += val
+
+    def finalize(self):
+        return self.val
+
+class FunctionTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+
+        self.con.create_function("returntext", 0, func_returntext)
+        self.con.create_function("returnunicode", 0, func_returnunicode)
+        self.con.create_function("returnint", 0, func_returnint)
+        self.con.create_function("returnfloat", 0, func_returnfloat)
+        self.con.create_function("returnnull", 0, func_returnnull)
+        self.con.create_function("returnblob", 0, func_returnblob)
+        self.con.create_function("raiseexception", 0, func_raiseexception)
+
+        self.con.create_function("isstring", 1, func_isstring)
+        self.con.create_function("isint", 1, func_isint)
+        self.con.create_function("isfloat", 1, func_isfloat)
+        self.con.create_function("isnone", 1, func_isnone)
+        self.con.create_function("isblob", 1, func_isblob)
+
+    def tearDown(self):
+        self.con.close()
+
+    def CheckFuncRefCount(self):
+        def getfunc():
+            def f():
+                return val
+            return f
+        self.con.create_function("reftest", 0, getfunc())
+        cur = self.con.cursor()
+        cur.execute("select reftest()")
+
+    def CheckFuncReturnText(self):
+        cur = self.con.cursor()
+        cur.execute("select returntext()")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(type(val), unicode)
+        self.failUnlessEqual(val, "foo")
+
+    def CheckFuncReturnUnicode(self):
+        cur = self.con.cursor()
+        cur.execute("select returnunicode()")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(type(val), unicode)
+        self.failUnlessEqual(val, u"bar")
+
+    def CheckFuncReturnInt(self):
+        cur = self.con.cursor()
+        cur.execute("select returnint()")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(type(val), int)
+        self.failUnlessEqual(val, 42)
+
+    def CheckFuncReturnFloat(self):
+        cur = self.con.cursor()
+        cur.execute("select returnfloat()")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(type(val), float)
+        if val < 3.139 or val > 3.141:
+            self.fail("wrong value")
+
+    def CheckFuncReturnNull(self):
+        cur = self.con.cursor()
+        cur.execute("select returnnull()")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(type(val), type(None))
+        self.failUnlessEqual(val, None)
+
+    def CheckFuncReturnBlob(self):
+        cur = self.con.cursor()
+        cur.execute("select returnblob()")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(type(val), buffer)
+        self.failUnlessEqual(val, buffer("blob"))
+
+    def CheckFuncException(self):
+        cur = self.con.cursor()
+        cur.execute("select raiseexception()")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, None)
+
+    def CheckParamString(self):
+        cur = self.con.cursor()
+        cur.execute("select isstring(?)", ("foo",))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+    def CheckParamInt(self):
+        cur = self.con.cursor()
+        cur.execute("select isint(?)", (42,))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+    def CheckParamFloat(self):
+        cur = self.con.cursor()
+        cur.execute("select isfloat(?)", (3.14,))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+    def CheckParamNone(self):
+        cur = self.con.cursor()
+        cur.execute("select isnone(?)", (None,))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+    def CheckParamBlob(self):
+        cur = self.con.cursor()
+        cur.execute("select isblob(?)", (buffer("blob"),))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+class AggregateTests(unittest.TestCase):
+    def setUp(self):
+        self.con = sqlite.connect(":memory:")
+        cur = self.con.cursor()
+        cur.execute("""
+            create table test(
+                t text,
+                i integer,
+                f float,
+                n,
+                b blob
+                )
+            """)
+        cur.execute("insert into test(t, i, f, n, b) values (?, ?, ?, ?, ?)",
+            ("foo", 5, 3.14, None, buffer("blob"),))
+
+        self.con.create_aggregate("nostep", 1, AggrNoStep)
+        self.con.create_aggregate("nofinalize", 1, AggrNoFinalize)
+        self.con.create_aggregate("excInit", 1, AggrExceptionInInit)
+        self.con.create_aggregate("excStep", 1, AggrExceptionInStep)
+        self.con.create_aggregate("excFinalize", 1, AggrExceptionInFinalize)
+        self.con.create_aggregate("checkType", 2, AggrCheckType)
+        self.con.create_aggregate("mysum", 1, AggrSum)
+
+    def tearDown(self):
+        #self.cur.close()
+        #self.con.close()
+        pass
+
+    def CheckAggrNoStep(self):
+        cur = self.con.cursor()
+        cur.execute("select nostep(t) from test")
+
+    def CheckAggrNoFinalize(self):
+        cur = self.con.cursor()
+        cur.execute("select nofinalize(t) from test")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, None)
+
+    def CheckAggrExceptionInInit(self):
+        cur = self.con.cursor()
+        cur.execute("select excInit(t) from test")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, None)
+
+    def CheckAggrExceptionInStep(self):
+        cur = self.con.cursor()
+        cur.execute("select excStep(t) from test")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 42)
+
+    def CheckAggrExceptionInFinalize(self):
+        cur = self.con.cursor()
+        cur.execute("select excFinalize(t) from test")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, None)
+
+    def CheckAggrCheckParamStr(self):
+        cur = self.con.cursor()
+        cur.execute("select checkType('str', ?)", ("foo",))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+    def CheckAggrCheckParamInt(self):
+        cur = self.con.cursor()
+        cur.execute("select checkType('int', ?)", (42,))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+    def CheckAggrCheckParamFloat(self):
+        cur = self.con.cursor()
+        cur.execute("select checkType('float', ?)", (3.14,))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+    def CheckAggrCheckParamNone(self):
+        cur = self.con.cursor()
+        cur.execute("select checkType('None', ?)", (None,))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+    def CheckAggrCheckParamBlob(self):
+        cur = self.con.cursor()
+        cur.execute("select checkType('blob', ?)", (buffer("blob"),))
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 1)
+
+    def CheckAggrCheckAggrSum(self):
+        cur = self.con.cursor()
+        cur.execute("delete from test")
+        cur.executemany("insert into test(i) values (?)", [(10,), (20,), (30,)])
+        cur.execute("select mysum(i) from test")
+        val = cur.fetchone()[0]
+        self.failUnlessEqual(val, 60)
+
+def suite():
+    function_suite = unittest.makeSuite(FunctionTests, "Check")
+    aggregate_suite = unittest.makeSuite(AggregateTests, "Check")
+    return unittest.TestSuite((function_suite, aggregate_suite))
+
+def test():
+    runner = unittest.TextTestRunner()
+    runner.run(suite())
+
+if __name__ == "__main__":
+    test()
diff --git a/Lib/sre.py b/Lib/sre.py
new file mode 100644
index 0000000..390094a
--- /dev/null
+++ b/Lib/sre.py
@@ -0,0 +1,10 @@
+"""This file is only retained for backwards compatibility.
+It will be removed in the future.  sre was moved to re in version 2.5.
+"""
+
+import warnings
+warnings.warn("The sre module is deprecated, please import re.",
+              DeprecationWarning, 2)
+
+from re import *
+from re import __all__
diff --git a/Lib/subprocess.py b/Lib/subprocess.py
index 6827244..3cd0df5 100644
--- a/Lib/subprocess.py
+++ b/Lib/subprocess.py
@@ -414,7 +414,13 @@
 
 def _cleanup():
     for inst in _active[:]:
-        inst.poll()
+        if inst.poll(_deadstate=sys.maxint) >= 0:
+            try:
+                _active.remove(inst)
+            except ValueError:
+                # This can happen if two threads create a new Popen instance.
+                # It's harmless that it was already removed, so ignore.
+                pass
 
 PIPE = -1
 STDOUT = -2
@@ -527,6 +533,7 @@
         """Create new Popen instance."""
         _cleanup()
 
+        self._child_created = False
         if not isinstance(bufsize, (int, long)):
             raise TypeError("bufsize must be an integer")
 
@@ -592,14 +599,24 @@
             else:
                 self.stderr = os.fdopen(errread, 'rb', bufsize)
 
-        _active.append(self)
-
 
     def _translate_newlines(self, data):
         data = data.replace("\r\n", "\n")
         data = data.replace("\r", "\n")
         return data
 
+
+    def __del__(self):
+        if not self._child_created:
+            # We didn't get to successfully create a child process.
+            return
+        # In case the child hasn't been waited on, check if it's done.
+        self.poll(_deadstate=sys.maxint)
+        if self.returncode is None:
+            # Child is still running, keep us alive until we can wait on it.
+            _active.append(self)
+
+
     def communicate(self, input=None):
         """Interact with process: Send data to stdin.  Read data from
         stdout and stderr, until end-of-file is reached.  Wait for
@@ -777,6 +794,7 @@
                 raise WindowsError(*e.args)
 
             # Retain the process handle, but close the thread handle
+            self._child_created = True
             self._handle = hp
             self.pid = pid
             ht.Close()
@@ -795,13 +813,12 @@
                 errwrite.Close()
 
 
-        def poll(self):
+        def poll(self, _deadstate=None):
             """Check if child process has terminated.  Returns returncode
             attribute."""
             if self.returncode is None:
                 if WaitForSingleObject(self._handle, 0) == WAIT_OBJECT_0:
                     self.returncode = GetExitCodeProcess(self._handle)
-                    _active.remove(self)
             return self.returncode
 
 
@@ -811,7 +828,6 @@
             if self.returncode is None:
                 obj = WaitForSingleObject(self._handle, INFINITE)
                 self.returncode = GetExitCodeProcess(self._handle)
-                _active.remove(self)
             return self.returncode
 
 
@@ -958,6 +974,7 @@
             self._set_cloexec_flag(errpipe_write)
 
             self.pid = os.fork()
+            self._child_created = True
             if self.pid == 0:
                 # Child
                 try:
@@ -1042,10 +1059,8 @@
                 # Should never happen
                 raise RuntimeError("Unknown child exit status!")
 
-            _active.remove(self)
 
-
-        def poll(self):
+        def poll(self, _deadstate=None):
             """Check if child process has terminated.  Returns returncode
             attribute."""
             if self.returncode is None:
@@ -1054,7 +1069,8 @@
                     if pid == self.pid:
                         self._handle_exitstatus(sts)
                 except os.error:
-                    pass
+                    if _deadstate is not None:
+                        self.returncode = _deadstate
             return self.returncode
 
 
diff --git a/Lib/telnetlib.py b/Lib/telnetlib.py
index 8a2513b..3523037 100644
--- a/Lib/telnetlib.py
+++ b/Lib/telnetlib.py
@@ -438,7 +438,7 @@
                     else:
                         self.iacseq += c
                 elif len(self.iacseq) == 1:
-                    'IAC: IAC CMD [OPTION only for WILL/WONT/DO/DONT]'
+                    # 'IAC: IAC CMD [OPTION only for WILL/WONT/DO/DONT]'
                     if c in (DO, DONT, WILL, WONT):
                         self.iacseq += c
                         continue
diff --git a/Lib/test/check_soundcard.vbs b/Lib/test/check_soundcard.vbs
new file mode 100644
index 0000000..8c21852
--- /dev/null
+++ b/Lib/test/check_soundcard.vbs
@@ -0,0 +1,13 @@
+rem Check for a working sound-card - exit with 0 if OK, 1 otherwise.
+set wmi = GetObject("winmgmts:")
+set scs = wmi.InstancesOf("win32_sounddevice")
+for each sc in scs
+   set status = sc.Properties_("Status")
+   wscript.Echo(sc.Properties_("Name") + "/" + status)
+   if status = "OK" then
+       wscript.Quit 0 rem normal exit
+   end if
+next
+rem No sound card found - exit with status code of 1
+wscript.Quit 1
+
diff --git a/Lib/test/crashers/README b/Lib/test/crashers/README
index 9369282..070c3f1 100644
--- a/Lib/test/crashers/README
+++ b/Lib/test/crashers/README
@@ -13,3 +13,8 @@
 Put as much info into a docstring or comments to help determine
 the cause of the failure.  Particularly note if the cause is
 system or environment dependent and what the variables are.
+
+Once the crash is fixed, the test case should be moved into an appropriate
+test (even if it was originally from the test suite).  This ensures the
+regression doesn't happen again.  And if it does, it should be easier
+to track down.
diff --git a/Lib/test/crashers/dictresize_attack.py b/Lib/test/crashers/dictresize_attack.py
new file mode 100644
index 0000000..1895791
--- /dev/null
+++ b/Lib/test/crashers/dictresize_attack.py
@@ -0,0 +1,32 @@
+# http://www.python.org/sf/1456209
+
+# A dictresize() attack.  If oldtable == mp->ma_smalltable then pure
+# Python code can mangle with mp->ma_smalltable while it is being walked
+# over.
+
+class X(object):
+
+    def __hash__(self):
+        return 5
+
+    def __eq__(self, other):
+        if resizing:
+            d.clear()
+        return False
+
+
+d = {}
+
+resizing = False
+
+d[X()] = 1
+d[X()] = 2
+d[X()] = 3
+d[X()] = 4
+d[X()] = 5
+
+# now trigger a resize
+resizing = True
+d[9] = 6
+
+# ^^^ I get Segmentation fault or Illegal instruction here.
diff --git a/Lib/test/crashers/nasty_eq_vs_dict.py b/Lib/test/crashers/nasty_eq_vs_dict.py
new file mode 100644
index 0000000..3f3083d
--- /dev/null
+++ b/Lib/test/crashers/nasty_eq_vs_dict.py
@@ -0,0 +1,47 @@
+# from http://mail.python.org/pipermail/python-dev/2001-June/015239.html
+
+# if you keep changing a dictionary while looking up a key, you can
+# provoke an infinite recursion in C
+
+# At the time neither Tim nor Michael could be bothered to think of a
+# way to fix it.
+
+class Yuck:
+    def __init__(self):
+        self.i = 0
+
+    def make_dangerous(self):
+        self.i = 1
+
+    def __hash__(self):
+        # direct to slot 4 in table of size 8; slot 12 when size 16
+        return 4 + 8
+
+    def __eq__(self, other):
+        if self.i == 0:
+            # leave dict alone
+            pass
+        elif self.i == 1:
+            # fiddle to 16 slots
+            self.__fill_dict(6)
+            self.i = 2
+        else:
+            # fiddle to 8 slots
+            self.__fill_dict(4)
+            self.i = 1
+
+        return 1
+
+    def __fill_dict(self, n):
+        self.i = 0
+        dict.clear()
+        for i in range(n):
+            dict[i] = i
+        dict[self] = "OK!"
+
+y = Yuck()
+dict = {y: "OK!"}
+
+z = Yuck()
+y.make_dangerous()
+print dict[z]
diff --git a/Lib/test/empty.vbs b/Lib/test/empty.vbs
new file mode 100644
index 0000000..f35f076
--- /dev/null
+++ b/Lib/test/empty.vbs
@@ -0,0 +1 @@
+'Empty VBS file, does nothing.  Helper for Lib\test\test_startfile.py.
\ No newline at end of file
diff --git a/Lib/test/fork_wait.py b/Lib/test/fork_wait.py
new file mode 100644
index 0000000..5600bdb
--- /dev/null
+++ b/Lib/test/fork_wait.py
@@ -0,0 +1,71 @@
+"""This test case provides support for checking forking and wait behavior.
+
+To test different wait behavior, overrise the wait_impl method.
+
+We want fork1() semantics -- only the forking thread survives in the
+child after a fork().
+
+On some systems (e.g. Solaris without posix threads) we find that all
+active threads survive in the child after a fork(); this is an error.
+
+While BeOS doesn't officially support fork and native threading in
+the same application, the present example should work just fine.  DC
+"""
+
+import os, sys, time, thread, unittest
+from test.test_support import TestSkipped
+
+LONGSLEEP = 2
+SHORTSLEEP = 0.5
+NUM_THREADS = 4
+
+class ForkWait(unittest.TestCase):
+
+    def setUp(self):
+        self.alive = {}
+        self.stop = 0
+
+    def f(self, id):
+        while not self.stop:
+            self.alive[id] = os.getpid()
+            try:
+                time.sleep(SHORTSLEEP)
+            except IOError:
+                pass
+
+    def wait_impl(self, cpid):
+        spid, status = os.waitpid(cpid, 0)
+        self.assertEquals(spid, cpid)
+        self.assertEquals(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
+
+    def test_wait(self):
+        for i in range(NUM_THREADS):
+            thread.start_new(self.f, (i,))
+
+        time.sleep(LONGSLEEP)
+
+        a = self.alive.keys()
+        a.sort()
+        self.assertEquals(a, range(NUM_THREADS))
+
+        prefork_lives = self.alive.copy()
+
+        if sys.platform in ['unixware7']:
+            cpid = os.fork1()
+        else:
+            cpid = os.fork()
+
+        if cpid == 0:
+            # Child
+            time.sleep(LONGSLEEP)
+            n = 0
+            for key in self.alive:
+                if self.alive[key] != prefork_lives[key]:
+                    n += 1
+            os._exit(n)
+        else:
+            # Parent
+            self.wait_impl(cpid)
+            # Tell threads to die
+            self.stop = 1
+            time.sleep(2*SHORTSLEEP) # Wait for threads to die
diff --git a/Lib/test/leakers/README.txt b/Lib/test/leakers/README.txt
index 69ee35a..beeee0e 100644
--- a/Lib/test/leakers/README.txt
+++ b/Lib/test/leakers/README.txt
@@ -5,6 +5,15 @@
 doesn't increase, the bug has been fixed and the file should be removed
 from the repository.
 
+Note:  be careful to check for cyclic garbage.  Sometimes it may be helpful
+to define the leak function like:
+
+def leak():
+    def inner_leak():
+        # this is the function that leaks, but also creates cycles
+    inner_leak()
+    gc.collect() ; gc.collect() ; gc.collect()
+
 Here's an example interpreter session for test_gestalt which still leaks:
 
 >>> from test.leakers.test_gestalt import leak
@@ -17,3 +26,7 @@
 [28940 refs]
 >>> 
 
+Once the leak is fixed, the test case should be moved into an appropriate
+test (even if it was originally from the test suite).  This ensures the
+regression doesn't happen again.  And if it does, it should be easier
+to track down.
diff --git a/Lib/test/leakers/test_ctypes.py b/Lib/test/leakers/test_ctypes.py
new file mode 100644
index 0000000..0f9a2cd
--- /dev/null
+++ b/Lib/test/leakers/test_ctypes.py
@@ -0,0 +1,16 @@
+
+# Taken from Lib/ctypes/test/test_keeprefs.py, PointerToStructure.test().
+# When this leak is fixed, remember to remove from Misc/build.sh LEAKY_TESTS.
+
+from ctypes import Structure, c_int, POINTER
+import gc
+
+def leak_inner():
+    class POINT(Structure):
+        _fields_ = [("x", c_int)]
+    class RECT(Structure):
+        _fields_ = [("a", POINTER(POINT))]
+
+def leak():
+    leak_inner()
+    gc.collect()
diff --git a/Lib/test/leakers/test_selftype.py b/Lib/test/leakers/test_selftype.py
new file mode 100644
index 0000000..4207c32
--- /dev/null
+++ b/Lib/test/leakers/test_selftype.py
@@ -0,0 +1,13 @@
+# Reference cycles involving only the ob_type field are rather uncommon
+# but possible.  Inspired by SF bug 1469629.
+
+import gc
+
+def leak():
+    class T(type):
+        pass
+    class U(type):
+        __metaclass__ = T
+    U.__class__ = U
+    del U
+    gc.collect(); gc.collect(); gc.collect()
diff --git a/Lib/test/leakers/test_tee.py b/Lib/test/leakers/test_tee.py
deleted file mode 100644
index 4ce24ca..0000000
--- a/Lib/test/leakers/test_tee.py
+++ /dev/null
@@ -1,19 +0,0 @@
-
-# Test case taken from test_itertools
-# See http://mail.python.org/pipermail/python-dev/2005-November/058339.html
-
-from itertools import tee
-
-def leak():
-    def fib():
-        def yield_identity_forever(g):
-            while 1:
-                yield g
-        def _fib():
-            for i in yield_identity_forever(head):
-                yield i
-        head, tail, result = tee(_fib(), 3)
-        return result
-
-    x = fib()
-    x.next()
diff --git a/Lib/test/output/test_augassign b/Lib/test/output/test_augassign
deleted file mode 100644
index b66b7e5..0000000
--- a/Lib/test/output/test_augassign
+++ /dev/null
@@ -1,54 +0,0 @@
-test_augassign
-6.0
-6
-[6.0]
-6
-6.0
-6
-[1, 2, 3, 4, 1, 2, 3, 4]
-[1, 2, 1, 2, 3]
-True
-True
-True
-11
-True
-12
-True
-True
-13
-__add__ called
-__radd__ called
-__iadd__ called
-__sub__ called
-__rsub__ called
-__isub__ called
-__mul__ called
-__rmul__ called
-__imul__ called
-__truediv__ called
-__rtruediv__ called
-__itruediv__ called
-__floordiv__ called
-__rfloordiv__ called
-__ifloordiv__ called
-__mod__ called
-__rmod__ called
-__imod__ called
-__pow__ called
-__rpow__ called
-__ipow__ called
-__or__ called
-__ror__ called
-__ior__ called
-__and__ called
-__rand__ called
-__iand__ called
-__xor__ called
-__rxor__ called
-__ixor__ called
-__rshift__ called
-__rrshift__ called
-__irshift__ called
-__lshift__ called
-__rlshift__ called
-__ilshift__ called
diff --git a/Lib/test/output/test_coercion b/Lib/test/output/test_coercion
deleted file mode 100644
index ad35b60..0000000
--- a/Lib/test/output/test_coercion
+++ /dev/null
@@ -1,1054 +0,0 @@
-test_coercion
-2 + 2 = 4
-2 += 2 => 4
-2 - 2 = 0
-2 -= 2 => 0
-2 * 2 = 4
-2 *= 2 => 4
-2 / 2 = 1
-2 /= 2 => 1
-2 ** 2 = 4
-2 **= 2 => 4
-2 % 2 = 0
-2 %= 2 => 0
-2 + 4.0 = 6.0
-2 += 4.0 => 6.0
-2 - 4.0 = -2.0
-2 -= 4.0 => -2.0
-2 * 4.0 = 8.0
-2 *= 4.0 => 8.0
-2 / 4.0 = 0.5
-2 /= 4.0 => 0.5
-2 ** 4.0 = 16.0
-2 **= 4.0 => 16.0
-2 % 4.0 = 2.0
-2 %= 4.0 => 2.0
-2 + 2 = 4
-2 += 2 => 4
-2 - 2 = 0
-2 -= 2 => 0
-2 * 2 = 4
-2 *= 2 => 4
-2 / 2 = 1
-2 /= 2 => 1
-2 ** 2 = 4
-2 **= 2 => 4
-2 % 2 = 0
-2 %= 2 => 0
-2 + (2+0j) = (4.0 + 0.0j)
-2 += (2+0j) => (4.0 + 0.0j)
-2 - (2+0j) = (0.0 + 0.0j)
-2 -= (2+0j) => (0.0 + 0.0j)
-2 * (2+0j) = (4.0 + 0.0j)
-2 *= (2+0j) => (4.0 + 0.0j)
-2 / (2+0j) = (1.0 + 0.0j)
-2 /= (2+0j) => (1.0 + 0.0j)
-2 ** (2+0j) = (4.0 + 0.0j)
-2 **= (2+0j) => (4.0 + 0.0j)
-2 % (2+0j) = (0.0 + 0.0j)
-2 %= (2+0j) => (0.0 + 0.0j)
-2 + [1] ... exceptions.TypeError
-2 += [1] ... exceptions.TypeError
-2 - [1] ... exceptions.TypeError
-2 -= [1] ... exceptions.TypeError
-2 * [1] = [1, 1]
-2 *= [1] => [1, 1]
-2 / [1] ... exceptions.TypeError
-2 /= [1] ... exceptions.TypeError
-2 ** [1] ... exceptions.TypeError
-2 **= [1] ... exceptions.TypeError
-2 % [1] ... exceptions.TypeError
-2 %= [1] ... exceptions.TypeError
-2 + (2,) ... exceptions.TypeError
-2 += (2,) ... exceptions.TypeError
-2 - (2,) ... exceptions.TypeError
-2 -= (2,) ... exceptions.TypeError
-2 * (2,) = (2, 2)
-2 *= (2,) => (2, 2)
-2 / (2,) ... exceptions.TypeError
-2 /= (2,) ... exceptions.TypeError
-2 ** (2,) ... exceptions.TypeError
-2 **= (2,) ... exceptions.TypeError
-2 % (2,) ... exceptions.TypeError
-2 %= (2,) ... exceptions.TypeError
-2 + None ... exceptions.TypeError
-2 += None ... exceptions.TypeError
-2 - None ... exceptions.TypeError
-2 -= None ... exceptions.TypeError
-2 * None ... exceptions.TypeError
-2 *= None ... exceptions.TypeError
-2 / None ... exceptions.TypeError
-2 /= None ... exceptions.TypeError
-2 ** None ... exceptions.TypeError
-2 **= None ... exceptions.TypeError
-2 % None ... exceptions.TypeError
-2 %= None ... exceptions.TypeError
-2 + <MethodNumber 2> = 4
-2 += <MethodNumber 2> => 4
-2 - <MethodNumber 2> = 0
-2 -= <MethodNumber 2> => 0
-2 * <MethodNumber 2> = 4
-2 *= <MethodNumber 2> => 4
-2 / <MethodNumber 2> = 1
-2 /= <MethodNumber 2> => 1
-2 ** <MethodNumber 2> = 4
-2 **= <MethodNumber 2> => 4
-2 % <MethodNumber 2> = 0
-2 %= <MethodNumber 2> => 0
-2 + <CoerceNumber 2> = 4
-2 += <CoerceNumber 2> => 4
-2 - <CoerceNumber 2> = 0
-2 -= <CoerceNumber 2> => 0
-2 * <CoerceNumber 2> = 4
-2 *= <CoerceNumber 2> => 4
-2 / <CoerceNumber 2> = 1
-2 /= <CoerceNumber 2> => 1
-2 ** <CoerceNumber 2> = 4
-2 **= <CoerceNumber 2> => 4
-2 % <CoerceNumber 2> = 0
-2 %= <CoerceNumber 2> => 0
-4.0 + 2 = 6.0
-4.0 += 2 => 6.0
-4.0 - 2 = 2.0
-4.0 -= 2 => 2.0
-4.0 * 2 = 8.0
-4.0 *= 2 => 8.0
-4.0 / 2 = 2.0
-4.0 /= 2 => 2.0
-4.0 ** 2 = 16.0
-4.0 **= 2 => 16.0
-4.0 % 2 = 0.0
-4.0 %= 2 => 0.0
-4.0 + 4.0 = 8.0
-4.0 += 4.0 => 8.0
-4.0 - 4.0 = 0.0
-4.0 -= 4.0 => 0.0
-4.0 * 4.0 = 16.0
-4.0 *= 4.0 => 16.0
-4.0 / 4.0 = 1.0
-4.0 /= 4.0 => 1.0
-4.0 ** 4.0 = 256.0
-4.0 **= 4.0 => 256.0
-4.0 % 4.0 = 0.0
-4.0 %= 4.0 => 0.0
-4.0 + 2 = 6.0
-4.0 += 2 => 6.0
-4.0 - 2 = 2.0
-4.0 -= 2 => 2.0
-4.0 * 2 = 8.0
-4.0 *= 2 => 8.0
-4.0 / 2 = 2.0
-4.0 /= 2 => 2.0
-4.0 ** 2 = 16.0
-4.0 **= 2 => 16.0
-4.0 % 2 = 0.0
-4.0 %= 2 => 0.0
-4.0 + (2+0j) = (6.0 + 0.0j)
-4.0 += (2+0j) => (6.0 + 0.0j)
-4.0 - (2+0j) = (2.0 + 0.0j)
-4.0 -= (2+0j) => (2.0 + 0.0j)
-4.0 * (2+0j) = (8.0 + 0.0j)
-4.0 *= (2+0j) => (8.0 + 0.0j)
-4.0 / (2+0j) = (2.0 + 0.0j)
-4.0 /= (2+0j) => (2.0 + 0.0j)
-4.0 ** (2+0j) = (16.0 + 0.0j)
-4.0 **= (2+0j) => (16.0 + 0.0j)
-4.0 % (2+0j) = (0.0 + 0.0j)
-4.0 %= (2+0j) => (0.0 + 0.0j)
-4.0 + [1] ... exceptions.TypeError
-4.0 += [1] ... exceptions.TypeError
-4.0 - [1] ... exceptions.TypeError
-4.0 -= [1] ... exceptions.TypeError
-4.0 * [1] ... exceptions.TypeError
-4.0 *= [1] ... exceptions.TypeError
-4.0 / [1] ... exceptions.TypeError
-4.0 /= [1] ... exceptions.TypeError
-4.0 ** [1] ... exceptions.TypeError
-4.0 **= [1] ... exceptions.TypeError
-4.0 % [1] ... exceptions.TypeError
-4.0 %= [1] ... exceptions.TypeError
-4.0 + (2,) ... exceptions.TypeError
-4.0 += (2,) ... exceptions.TypeError
-4.0 - (2,) ... exceptions.TypeError
-4.0 -= (2,) ... exceptions.TypeError
-4.0 * (2,) ... exceptions.TypeError
-4.0 *= (2,) ... exceptions.TypeError
-4.0 / (2,) ... exceptions.TypeError
-4.0 /= (2,) ... exceptions.TypeError
-4.0 ** (2,) ... exceptions.TypeError
-4.0 **= (2,) ... exceptions.TypeError
-4.0 % (2,) ... exceptions.TypeError
-4.0 %= (2,) ... exceptions.TypeError
-4.0 + None ... exceptions.TypeError
-4.0 += None ... exceptions.TypeError
-4.0 - None ... exceptions.TypeError
-4.0 -= None ... exceptions.TypeError
-4.0 * None ... exceptions.TypeError
-4.0 *= None ... exceptions.TypeError
-4.0 / None ... exceptions.TypeError
-4.0 /= None ... exceptions.TypeError
-4.0 ** None ... exceptions.TypeError
-4.0 **= None ... exceptions.TypeError
-4.0 % None ... exceptions.TypeError
-4.0 %= None ... exceptions.TypeError
-4.0 + <MethodNumber 2> = 6.0
-4.0 += <MethodNumber 2> => 6.0
-4.0 - <MethodNumber 2> = 2.0
-4.0 -= <MethodNumber 2> => 2.0
-4.0 * <MethodNumber 2> = 8.0
-4.0 *= <MethodNumber 2> => 8.0
-4.0 / <MethodNumber 2> = 2.0
-4.0 /= <MethodNumber 2> => 2.0
-4.0 ** <MethodNumber 2> = 16.0
-4.0 **= <MethodNumber 2> => 16.0
-4.0 % <MethodNumber 2> = 0.0
-4.0 %= <MethodNumber 2> => 0.0
-4.0 + <CoerceNumber 2> = 6.0
-4.0 += <CoerceNumber 2> => 6.0
-4.0 - <CoerceNumber 2> = 2.0
-4.0 -= <CoerceNumber 2> => 2.0
-4.0 * <CoerceNumber 2> = 8.0
-4.0 *= <CoerceNumber 2> => 8.0
-4.0 / <CoerceNumber 2> = 2.0
-4.0 /= <CoerceNumber 2> => 2.0
-4.0 ** <CoerceNumber 2> = 16.0
-4.0 **= <CoerceNumber 2> => 16.0
-4.0 % <CoerceNumber 2> = 0.0
-4.0 %= <CoerceNumber 2> => 0.0
-2 + 2 = 4
-2 += 2 => 4
-2 - 2 = 0
-2 -= 2 => 0
-2 * 2 = 4
-2 *= 2 => 4
-2 / 2 = 1
-2 /= 2 => 1
-2 ** 2 = 4
-2 **= 2 => 4
-2 % 2 = 0
-2 %= 2 => 0
-2 + 4.0 = 6.0
-2 += 4.0 => 6.0
-2 - 4.0 = -2.0
-2 -= 4.0 => -2.0
-2 * 4.0 = 8.0
-2 *= 4.0 => 8.0
-2 / 4.0 = 0.5
-2 /= 4.0 => 0.5
-2 ** 4.0 = 16.0
-2 **= 4.0 => 16.0
-2 % 4.0 = 2.0
-2 %= 4.0 => 2.0
-2 + 2 = 4
-2 += 2 => 4
-2 - 2 = 0
-2 -= 2 => 0
-2 * 2 = 4
-2 *= 2 => 4
-2 / 2 = 1
-2 /= 2 => 1
-2 ** 2 = 4
-2 **= 2 => 4
-2 % 2 = 0
-2 %= 2 => 0
-2 + (2+0j) = (4.0 + 0.0j)
-2 += (2+0j) => (4.0 + 0.0j)
-2 - (2+0j) = (0.0 + 0.0j)
-2 -= (2+0j) => (0.0 + 0.0j)
-2 * (2+0j) = (4.0 + 0.0j)
-2 *= (2+0j) => (4.0 + 0.0j)
-2 / (2+0j) = (1.0 + 0.0j)
-2 /= (2+0j) => (1.0 + 0.0j)
-2 ** (2+0j) = (4.0 + 0.0j)
-2 **= (2+0j) => (4.0 + 0.0j)
-2 % (2+0j) = (0.0 + 0.0j)
-2 %= (2+0j) => (0.0 + 0.0j)
-2 + [1] ... exceptions.TypeError
-2 += [1] ... exceptions.TypeError
-2 - [1] ... exceptions.TypeError
-2 -= [1] ... exceptions.TypeError
-2 * [1] = [1, 1]
-2 *= [1] => [1, 1]
-2 / [1] ... exceptions.TypeError
-2 /= [1] ... exceptions.TypeError
-2 ** [1] ... exceptions.TypeError
-2 **= [1] ... exceptions.TypeError
-2 % [1] ... exceptions.TypeError
-2 %= [1] ... exceptions.TypeError
-2 + (2,) ... exceptions.TypeError
-2 += (2,) ... exceptions.TypeError
-2 - (2,) ... exceptions.TypeError
-2 -= (2,) ... exceptions.TypeError
-2 * (2,) = (2, 2)
-2 *= (2,) => (2, 2)
-2 / (2,) ... exceptions.TypeError
-2 /= (2,) ... exceptions.TypeError
-2 ** (2,) ... exceptions.TypeError
-2 **= (2,) ... exceptions.TypeError
-2 % (2,) ... exceptions.TypeError
-2 %= (2,) ... exceptions.TypeError
-2 + None ... exceptions.TypeError
-2 += None ... exceptions.TypeError
-2 - None ... exceptions.TypeError
-2 -= None ... exceptions.TypeError
-2 * None ... exceptions.TypeError
-2 *= None ... exceptions.TypeError
-2 / None ... exceptions.TypeError
-2 /= None ... exceptions.TypeError
-2 ** None ... exceptions.TypeError
-2 **= None ... exceptions.TypeError
-2 % None ... exceptions.TypeError
-2 %= None ... exceptions.TypeError
-2 + <MethodNumber 2> = 4
-2 += <MethodNumber 2> => 4
-2 - <MethodNumber 2> = 0
-2 -= <MethodNumber 2> => 0
-2 * <MethodNumber 2> = 4
-2 *= <MethodNumber 2> => 4
-2 / <MethodNumber 2> = 1
-2 /= <MethodNumber 2> => 1
-2 ** <MethodNumber 2> = 4
-2 **= <MethodNumber 2> => 4
-2 % <MethodNumber 2> = 0
-2 %= <MethodNumber 2> => 0
-2 + <CoerceNumber 2> = 4
-2 += <CoerceNumber 2> => 4
-2 - <CoerceNumber 2> = 0
-2 -= <CoerceNumber 2> => 0
-2 * <CoerceNumber 2> = 4
-2 *= <CoerceNumber 2> => 4
-2 / <CoerceNumber 2> = 1
-2 /= <CoerceNumber 2> => 1
-2 ** <CoerceNumber 2> = 4
-2 **= <CoerceNumber 2> => 4
-2 % <CoerceNumber 2> = 0
-2 %= <CoerceNumber 2> => 0
-(2+0j) + 2 = (4.0 + 0.0j)
-(2+0j) += 2 => (4.0 + 0.0j)
-(2+0j) - 2 = (0.0 + 0.0j)
-(2+0j) -= 2 => (0.0 + 0.0j)
-(2+0j) * 2 = (4.0 + 0.0j)
-(2+0j) *= 2 => (4.0 + 0.0j)
-(2+0j) / 2 = (1.0 + 0.0j)
-(2+0j) /= 2 => (1.0 + 0.0j)
-(2+0j) ** 2 = (4.0 + 0.0j)
-(2+0j) **= 2 => (4.0 + 0.0j)
-(2+0j) % 2 = (0.0 + 0.0j)
-(2+0j) %= 2 => (0.0 + 0.0j)
-(2+0j) + 4.0 = (6.0 + 0.0j)
-(2+0j) += 4.0 => (6.0 + 0.0j)
-(2+0j) - 4.0 = (-2.0 + 0.0j)
-(2+0j) -= 4.0 => (-2.0 + 0.0j)
-(2+0j) * 4.0 = (8.0 + 0.0j)
-(2+0j) *= 4.0 => (8.0 + 0.0j)
-(2+0j) / 4.0 = (0.5 + 0.0j)
-(2+0j) /= 4.0 => (0.5 + 0.0j)
-(2+0j) ** 4.0 = (16.0 + 0.0j)
-(2+0j) **= 4.0 => (16.0 + 0.0j)
-(2+0j) % 4.0 = (2.0 + 0.0j)
-(2+0j) %= 4.0 => (2.0 + 0.0j)
-(2+0j) + 2 = (4.0 + 0.0j)
-(2+0j) += 2 => (4.0 + 0.0j)
-(2+0j) - 2 = (0.0 + 0.0j)
-(2+0j) -= 2 => (0.0 + 0.0j)
-(2+0j) * 2 = (4.0 + 0.0j)
-(2+0j) *= 2 => (4.0 + 0.0j)
-(2+0j) / 2 = (1.0 + 0.0j)
-(2+0j) /= 2 => (1.0 + 0.0j)
-(2+0j) ** 2 = (4.0 + 0.0j)
-(2+0j) **= 2 => (4.0 + 0.0j)
-(2+0j) % 2 = (0.0 + 0.0j)
-(2+0j) %= 2 => (0.0 + 0.0j)
-(2+0j) + (2+0j) = (4.0 + 0.0j)
-(2+0j) += (2+0j) => (4.0 + 0.0j)
-(2+0j) - (2+0j) = (0.0 + 0.0j)
-(2+0j) -= (2+0j) => (0.0 + 0.0j)
-(2+0j) * (2+0j) = (4.0 + 0.0j)
-(2+0j) *= (2+0j) => (4.0 + 0.0j)
-(2+0j) / (2+0j) = (1.0 + 0.0j)
-(2+0j) /= (2+0j) => (1.0 + 0.0j)
-(2+0j) ** (2+0j) = (4.0 + 0.0j)
-(2+0j) **= (2+0j) => (4.0 + 0.0j)
-(2+0j) % (2+0j) = (0.0 + 0.0j)
-(2+0j) %= (2+0j) => (0.0 + 0.0j)
-(2+0j) + [1] ... exceptions.TypeError
-(2+0j) += [1] ... exceptions.TypeError
-(2+0j) - [1] ... exceptions.TypeError
-(2+0j) -= [1] ... exceptions.TypeError
-(2+0j) * [1] ... exceptions.TypeError
-(2+0j) *= [1] ... exceptions.TypeError
-(2+0j) / [1] ... exceptions.TypeError
-(2+0j) /= [1] ... exceptions.TypeError
-(2+0j) ** [1] ... exceptions.TypeError
-(2+0j) **= [1] ... exceptions.TypeError
-(2+0j) % [1] ... exceptions.TypeError
-(2+0j) %= [1] ... exceptions.TypeError
-(2+0j) + (2,) ... exceptions.TypeError
-(2+0j) += (2,) ... exceptions.TypeError
-(2+0j) - (2,) ... exceptions.TypeError
-(2+0j) -= (2,) ... exceptions.TypeError
-(2+0j) * (2,) ... exceptions.TypeError
-(2+0j) *= (2,) ... exceptions.TypeError
-(2+0j) / (2,) ... exceptions.TypeError
-(2+0j) /= (2,) ... exceptions.TypeError
-(2+0j) ** (2,) ... exceptions.TypeError
-(2+0j) **= (2,) ... exceptions.TypeError
-(2+0j) % (2,) ... exceptions.TypeError
-(2+0j) %= (2,) ... exceptions.TypeError
-(2+0j) + None ... exceptions.TypeError
-(2+0j) += None ... exceptions.TypeError
-(2+0j) - None ... exceptions.TypeError
-(2+0j) -= None ... exceptions.TypeError
-(2+0j) * None ... exceptions.TypeError
-(2+0j) *= None ... exceptions.TypeError
-(2+0j) / None ... exceptions.TypeError
-(2+0j) /= None ... exceptions.TypeError
-(2+0j) ** None ... exceptions.TypeError
-(2+0j) **= None ... exceptions.TypeError
-(2+0j) % None ... exceptions.TypeError
-(2+0j) %= None ... exceptions.TypeError
-(2+0j) + <MethodNumber 2> = (4.0 + 0.0j)
-(2+0j) += <MethodNumber 2> => (4.0 + 0.0j)
-(2+0j) - <MethodNumber 2> = (0.0 + 0.0j)
-(2+0j) -= <MethodNumber 2> => (0.0 + 0.0j)
-(2+0j) * <MethodNumber 2> = (4.0 + 0.0j)
-(2+0j) *= <MethodNumber 2> => (4.0 + 0.0j)
-(2+0j) / <MethodNumber 2> = (1.0 + 0.0j)
-(2+0j) /= <MethodNumber 2> => (1.0 + 0.0j)
-(2+0j) ** <MethodNumber 2> = (4.0 + 0.0j)
-(2+0j) **= <MethodNumber 2> => (4.0 + 0.0j)
-(2+0j) % <MethodNumber 2> = (0.0 + 0.0j)
-(2+0j) %= <MethodNumber 2> => (0.0 + 0.0j)
-(2+0j) + <CoerceNumber 2> = (4.0 + 0.0j)
-(2+0j) += <CoerceNumber 2> => (4.0 + 0.0j)
-(2+0j) - <CoerceNumber 2> = (0.0 + 0.0j)
-(2+0j) -= <CoerceNumber 2> => (0.0 + 0.0j)
-(2+0j) * <CoerceNumber 2> = (4.0 + 0.0j)
-(2+0j) *= <CoerceNumber 2> => (4.0 + 0.0j)
-(2+0j) / <CoerceNumber 2> = (1.0 + 0.0j)
-(2+0j) /= <CoerceNumber 2> => (1.0 + 0.0j)
-(2+0j) ** <CoerceNumber 2> = (4.0 + 0.0j)
-(2+0j) **= <CoerceNumber 2> => (4.0 + 0.0j)
-(2+0j) % <CoerceNumber 2> = (0.0 + 0.0j)
-(2+0j) %= <CoerceNumber 2> => (0.0 + 0.0j)
-[1] + 2 ... exceptions.TypeError
-[1] += 2 ... exceptions.TypeError
-[1] - 2 ... exceptions.TypeError
-[1] -= 2 ... exceptions.TypeError
-[1] * 2 = [1, 1]
-[1] *= 2 => [1, 1]
-[1] / 2 ... exceptions.TypeError
-[1] /= 2 ... exceptions.TypeError
-[1] ** 2 ... exceptions.TypeError
-[1] **= 2 ... exceptions.TypeError
-[1] % 2 ... exceptions.TypeError
-[1] %= 2 ... exceptions.TypeError
-[1] + 4.0 ... exceptions.TypeError
-[1] += 4.0 ... exceptions.TypeError
-[1] - 4.0 ... exceptions.TypeError
-[1] -= 4.0 ... exceptions.TypeError
-[1] * 4.0 ... exceptions.TypeError
-[1] *= 4.0 ... exceptions.TypeError
-[1] / 4.0 ... exceptions.TypeError
-[1] /= 4.0 ... exceptions.TypeError
-[1] ** 4.0 ... exceptions.TypeError
-[1] **= 4.0 ... exceptions.TypeError
-[1] % 4.0 ... exceptions.TypeError
-[1] %= 4.0 ... exceptions.TypeError
-[1] + 2 ... exceptions.TypeError
-[1] += 2 ... exceptions.TypeError
-[1] - 2 ... exceptions.TypeError
-[1] -= 2 ... exceptions.TypeError
-[1] * 2 = [1, 1]
-[1] *= 2 => [1, 1]
-[1] / 2 ... exceptions.TypeError
-[1] /= 2 ... exceptions.TypeError
-[1] ** 2 ... exceptions.TypeError
-[1] **= 2 ... exceptions.TypeError
-[1] % 2 ... exceptions.TypeError
-[1] %= 2 ... exceptions.TypeError
-[1] + (2+0j) ... exceptions.TypeError
-[1] += (2+0j) ... exceptions.TypeError
-[1] - (2+0j) ... exceptions.TypeError
-[1] -= (2+0j) ... exceptions.TypeError
-[1] * (2+0j) ... exceptions.TypeError
-[1] *= (2+0j) ... exceptions.TypeError
-[1] / (2+0j) ... exceptions.TypeError
-[1] /= (2+0j) ... exceptions.TypeError
-[1] ** (2+0j) ... exceptions.TypeError
-[1] **= (2+0j) ... exceptions.TypeError
-[1] % (2+0j) ... exceptions.TypeError
-[1] %= (2+0j) ... exceptions.TypeError
-[1] + [1] = [1, 1]
-[1] += [1] => [1, 1]
-[1] - [1] ... exceptions.TypeError
-[1] -= [1] ... exceptions.TypeError
-[1] * [1] ... exceptions.TypeError
-[1] *= [1] ... exceptions.TypeError
-[1] / [1] ... exceptions.TypeError
-[1] /= [1] ... exceptions.TypeError
-[1] ** [1] ... exceptions.TypeError
-[1] **= [1] ... exceptions.TypeError
-[1] % [1] ... exceptions.TypeError
-[1] %= [1] ... exceptions.TypeError
-[1] + (2,) ... exceptions.TypeError
-[1] += (2,) => [1, 2]
-[1] - (2,) ... exceptions.TypeError
-[1] -= (2,) ... exceptions.TypeError
-[1] * (2,) ... exceptions.TypeError
-[1] *= (2,) ... exceptions.TypeError
-[1] / (2,) ... exceptions.TypeError
-[1] /= (2,) ... exceptions.TypeError
-[1] ** (2,) ... exceptions.TypeError
-[1] **= (2,) ... exceptions.TypeError
-[1] % (2,) ... exceptions.TypeError
-[1] %= (2,) ... exceptions.TypeError
-[1] + None ... exceptions.TypeError
-[1] += None ... exceptions.TypeError
-[1] - None ... exceptions.TypeError
-[1] -= None ... exceptions.TypeError
-[1] * None ... exceptions.TypeError
-[1] *= None ... exceptions.TypeError
-[1] / None ... exceptions.TypeError
-[1] /= None ... exceptions.TypeError
-[1] ** None ... exceptions.TypeError
-[1] **= None ... exceptions.TypeError
-[1] % None ... exceptions.TypeError
-[1] %= None ... exceptions.TypeError
-[1] + <MethodNumber 2> ... exceptions.TypeError
-[1] += <MethodNumber 2> ... exceptions.TypeError
-[1] - <MethodNumber 2> ... exceptions.TypeError
-[1] -= <MethodNumber 2> ... exceptions.TypeError
-[1] * <MethodNumber 2> = [1, 1]
-[1] *= <MethodNumber 2> => [1, 1]
-[1] / <MethodNumber 2> ... exceptions.TypeError
-[1] /= <MethodNumber 2> ... exceptions.TypeError
-[1] ** <MethodNumber 2> ... exceptions.TypeError
-[1] **= <MethodNumber 2> ... exceptions.TypeError
-[1] % <MethodNumber 2> ... exceptions.TypeError
-[1] %= <MethodNumber 2> ... exceptions.TypeError
-[1] + <CoerceNumber 2> ... exceptions.TypeError
-[1] += <CoerceNumber 2> ... exceptions.TypeError
-[1] - <CoerceNumber 2> ... exceptions.TypeError
-[1] -= <CoerceNumber 2> ... exceptions.TypeError
-[1] * <CoerceNumber 2> = [1, 1]
-[1] *= <CoerceNumber 2> => [1, 1]
-[1] / <CoerceNumber 2> ... exceptions.TypeError
-[1] /= <CoerceNumber 2> ... exceptions.TypeError
-[1] ** <CoerceNumber 2> ... exceptions.TypeError
-[1] **= <CoerceNumber 2> ... exceptions.TypeError
-[1] % <CoerceNumber 2> ... exceptions.TypeError
-[1] %= <CoerceNumber 2> ... exceptions.TypeError
-(2,) + 2 ... exceptions.TypeError
-(2,) += 2 ... exceptions.TypeError
-(2,) - 2 ... exceptions.TypeError
-(2,) -= 2 ... exceptions.TypeError
-(2,) * 2 = (2, 2)
-(2,) *= 2 => (2, 2)
-(2,) / 2 ... exceptions.TypeError
-(2,) /= 2 ... exceptions.TypeError
-(2,) ** 2 ... exceptions.TypeError
-(2,) **= 2 ... exceptions.TypeError
-(2,) % 2 ... exceptions.TypeError
-(2,) %= 2 ... exceptions.TypeError
-(2,) + 4.0 ... exceptions.TypeError
-(2,) += 4.0 ... exceptions.TypeError
-(2,) - 4.0 ... exceptions.TypeError
-(2,) -= 4.0 ... exceptions.TypeError
-(2,) * 4.0 ... exceptions.TypeError
-(2,) *= 4.0 ... exceptions.TypeError
-(2,) / 4.0 ... exceptions.TypeError
-(2,) /= 4.0 ... exceptions.TypeError
-(2,) ** 4.0 ... exceptions.TypeError
-(2,) **= 4.0 ... exceptions.TypeError
-(2,) % 4.0 ... exceptions.TypeError
-(2,) %= 4.0 ... exceptions.TypeError
-(2,) + 2 ... exceptions.TypeError
-(2,) += 2 ... exceptions.TypeError
-(2,) - 2 ... exceptions.TypeError
-(2,) -= 2 ... exceptions.TypeError
-(2,) * 2 = (2, 2)
-(2,) *= 2 => (2, 2)
-(2,) / 2 ... exceptions.TypeError
-(2,) /= 2 ... exceptions.TypeError
-(2,) ** 2 ... exceptions.TypeError
-(2,) **= 2 ... exceptions.TypeError
-(2,) % 2 ... exceptions.TypeError
-(2,) %= 2 ... exceptions.TypeError
-(2,) + (2+0j) ... exceptions.TypeError
-(2,) += (2+0j) ... exceptions.TypeError
-(2,) - (2+0j) ... exceptions.TypeError
-(2,) -= (2+0j) ... exceptions.TypeError
-(2,) * (2+0j) ... exceptions.TypeError
-(2,) *= (2+0j) ... exceptions.TypeError
-(2,) / (2+0j) ... exceptions.TypeError
-(2,) /= (2+0j) ... exceptions.TypeError
-(2,) ** (2+0j) ... exceptions.TypeError
-(2,) **= (2+0j) ... exceptions.TypeError
-(2,) % (2+0j) ... exceptions.TypeError
-(2,) %= (2+0j) ... exceptions.TypeError
-(2,) + [1] ... exceptions.TypeError
-(2,) += [1] ... exceptions.TypeError
-(2,) - [1] ... exceptions.TypeError
-(2,) -= [1] ... exceptions.TypeError
-(2,) * [1] ... exceptions.TypeError
-(2,) *= [1] ... exceptions.TypeError
-(2,) / [1] ... exceptions.TypeError
-(2,) /= [1] ... exceptions.TypeError
-(2,) ** [1] ... exceptions.TypeError
-(2,) **= [1] ... exceptions.TypeError
-(2,) % [1] ... exceptions.TypeError
-(2,) %= [1] ... exceptions.TypeError
-(2,) + (2,) = (2, 2)
-(2,) += (2,) => (2, 2)
-(2,) - (2,) ... exceptions.TypeError
-(2,) -= (2,) ... exceptions.TypeError
-(2,) * (2,) ... exceptions.TypeError
-(2,) *= (2,) ... exceptions.TypeError
-(2,) / (2,) ... exceptions.TypeError
-(2,) /= (2,) ... exceptions.TypeError
-(2,) ** (2,) ... exceptions.TypeError
-(2,) **= (2,) ... exceptions.TypeError
-(2,) % (2,) ... exceptions.TypeError
-(2,) %= (2,) ... exceptions.TypeError
-(2,) + None ... exceptions.TypeError
-(2,) += None ... exceptions.TypeError
-(2,) - None ... exceptions.TypeError
-(2,) -= None ... exceptions.TypeError
-(2,) * None ... exceptions.TypeError
-(2,) *= None ... exceptions.TypeError
-(2,) / None ... exceptions.TypeError
-(2,) /= None ... exceptions.TypeError
-(2,) ** None ... exceptions.TypeError
-(2,) **= None ... exceptions.TypeError
-(2,) % None ... exceptions.TypeError
-(2,) %= None ... exceptions.TypeError
-(2,) + <MethodNumber 2> ... exceptions.TypeError
-(2,) += <MethodNumber 2> ... exceptions.TypeError
-(2,) - <MethodNumber 2> ... exceptions.TypeError
-(2,) -= <MethodNumber 2> ... exceptions.TypeError
-(2,) * <MethodNumber 2> = (2, 2)
-(2,) *= <MethodNumber 2> => (2, 2)
-(2,) / <MethodNumber 2> ... exceptions.TypeError
-(2,) /= <MethodNumber 2> ... exceptions.TypeError
-(2,) ** <MethodNumber 2> ... exceptions.TypeError
-(2,) **= <MethodNumber 2> ... exceptions.TypeError
-(2,) % <MethodNumber 2> ... exceptions.TypeError
-(2,) %= <MethodNumber 2> ... exceptions.TypeError
-(2,) + <CoerceNumber 2> ... exceptions.TypeError
-(2,) += <CoerceNumber 2> ... exceptions.TypeError
-(2,) - <CoerceNumber 2> ... exceptions.TypeError
-(2,) -= <CoerceNumber 2> ... exceptions.TypeError
-(2,) * <CoerceNumber 2> = (2, 2)
-(2,) *= <CoerceNumber 2> => (2, 2)
-(2,) / <CoerceNumber 2> ... exceptions.TypeError
-(2,) /= <CoerceNumber 2> ... exceptions.TypeError
-(2,) ** <CoerceNumber 2> ... exceptions.TypeError
-(2,) **= <CoerceNumber 2> ... exceptions.TypeError
-(2,) % <CoerceNumber 2> ... exceptions.TypeError
-(2,) %= <CoerceNumber 2> ... exceptions.TypeError
-None + 2 ... exceptions.TypeError
-None += 2 ... exceptions.TypeError
-None - 2 ... exceptions.TypeError
-None -= 2 ... exceptions.TypeError
-None * 2 ... exceptions.TypeError
-None *= 2 ... exceptions.TypeError
-None / 2 ... exceptions.TypeError
-None /= 2 ... exceptions.TypeError
-None ** 2 ... exceptions.TypeError
-None **= 2 ... exceptions.TypeError
-None % 2 ... exceptions.TypeError
-None %= 2 ... exceptions.TypeError
-None + 4.0 ... exceptions.TypeError
-None += 4.0 ... exceptions.TypeError
-None - 4.0 ... exceptions.TypeError
-None -= 4.0 ... exceptions.TypeError
-None * 4.0 ... exceptions.TypeError
-None *= 4.0 ... exceptions.TypeError
-None / 4.0 ... exceptions.TypeError
-None /= 4.0 ... exceptions.TypeError
-None ** 4.0 ... exceptions.TypeError
-None **= 4.0 ... exceptions.TypeError
-None % 4.0 ... exceptions.TypeError
-None %= 4.0 ... exceptions.TypeError
-None + 2 ... exceptions.TypeError
-None += 2 ... exceptions.TypeError
-None - 2 ... exceptions.TypeError
-None -= 2 ... exceptions.TypeError
-None * 2 ... exceptions.TypeError
-None *= 2 ... exceptions.TypeError
-None / 2 ... exceptions.TypeError
-None /= 2 ... exceptions.TypeError
-None ** 2 ... exceptions.TypeError
-None **= 2 ... exceptions.TypeError
-None % 2 ... exceptions.TypeError
-None %= 2 ... exceptions.TypeError
-None + (2+0j) ... exceptions.TypeError
-None += (2+0j) ... exceptions.TypeError
-None - (2+0j) ... exceptions.TypeError
-None -= (2+0j) ... exceptions.TypeError
-None * (2+0j) ... exceptions.TypeError
-None *= (2+0j) ... exceptions.TypeError
-None / (2+0j) ... exceptions.TypeError
-None /= (2+0j) ... exceptions.TypeError
-None ** (2+0j) ... exceptions.TypeError
-None **= (2+0j) ... exceptions.TypeError
-None % (2+0j) ... exceptions.TypeError
-None %= (2+0j) ... exceptions.TypeError
-None + [1] ... exceptions.TypeError
-None += [1] ... exceptions.TypeError
-None - [1] ... exceptions.TypeError
-None -= [1] ... exceptions.TypeError
-None * [1] ... exceptions.TypeError
-None *= [1] ... exceptions.TypeError
-None / [1] ... exceptions.TypeError
-None /= [1] ... exceptions.TypeError
-None ** [1] ... exceptions.TypeError
-None **= [1] ... exceptions.TypeError
-None % [1] ... exceptions.TypeError
-None %= [1] ... exceptions.TypeError
-None + (2,) ... exceptions.TypeError
-None += (2,) ... exceptions.TypeError
-None - (2,) ... exceptions.TypeError
-None -= (2,) ... exceptions.TypeError
-None * (2,) ... exceptions.TypeError
-None *= (2,) ... exceptions.TypeError
-None / (2,) ... exceptions.TypeError
-None /= (2,) ... exceptions.TypeError
-None ** (2,) ... exceptions.TypeError
-None **= (2,) ... exceptions.TypeError
-None % (2,) ... exceptions.TypeError
-None %= (2,) ... exceptions.TypeError
-None + None ... exceptions.TypeError
-None += None ... exceptions.TypeError
-None - None ... exceptions.TypeError
-None -= None ... exceptions.TypeError
-None * None ... exceptions.TypeError
-None *= None ... exceptions.TypeError
-None / None ... exceptions.TypeError
-None /= None ... exceptions.TypeError
-None ** None ... exceptions.TypeError
-None **= None ... exceptions.TypeError
-None % None ... exceptions.TypeError
-None %= None ... exceptions.TypeError
-None + <MethodNumber 2> ... exceptions.TypeError
-None += <MethodNumber 2> ... exceptions.TypeError
-None - <MethodNumber 2> ... exceptions.TypeError
-None -= <MethodNumber 2> ... exceptions.TypeError
-None * <MethodNumber 2> ... exceptions.TypeError
-None *= <MethodNumber 2> ... exceptions.TypeError
-None / <MethodNumber 2> ... exceptions.TypeError
-None /= <MethodNumber 2> ... exceptions.TypeError
-None ** <MethodNumber 2> ... exceptions.TypeError
-None **= <MethodNumber 2> ... exceptions.TypeError
-None % <MethodNumber 2> ... exceptions.TypeError
-None %= <MethodNumber 2> ... exceptions.TypeError
-None + <CoerceNumber 2> ... exceptions.TypeError
-None += <CoerceNumber 2> ... exceptions.TypeError
-None - <CoerceNumber 2> ... exceptions.TypeError
-None -= <CoerceNumber 2> ... exceptions.TypeError
-None * <CoerceNumber 2> ... exceptions.TypeError
-None *= <CoerceNumber 2> ... exceptions.TypeError
-None / <CoerceNumber 2> ... exceptions.TypeError
-None /= <CoerceNumber 2> ... exceptions.TypeError
-None ** <CoerceNumber 2> ... exceptions.TypeError
-None **= <CoerceNumber 2> ... exceptions.TypeError
-None % <CoerceNumber 2> ... exceptions.TypeError
-None %= <CoerceNumber 2> ... exceptions.TypeError
-<MethodNumber 2> + 2 = 4
-<MethodNumber 2> += 2 => 4
-<MethodNumber 2> - 2 = 0
-<MethodNumber 2> -= 2 => 0
-<MethodNumber 2> * 2 = 4
-<MethodNumber 2> *= 2 => 4
-<MethodNumber 2> / 2 = 1
-<MethodNumber 2> /= 2 => 1
-<MethodNumber 2> ** 2 = 4
-<MethodNumber 2> **= 2 => 4
-<MethodNumber 2> % 2 = 0
-<MethodNumber 2> %= 2 => 0
-<MethodNumber 2> + 4.0 = 6.0
-<MethodNumber 2> += 4.0 => 6.0
-<MethodNumber 2> - 4.0 = -2.0
-<MethodNumber 2> -= 4.0 => -2.0
-<MethodNumber 2> * 4.0 = 8.0
-<MethodNumber 2> *= 4.0 => 8.0
-<MethodNumber 2> / 4.0 = 0.5
-<MethodNumber 2> /= 4.0 => 0.5
-<MethodNumber 2> ** 4.0 = 16.0
-<MethodNumber 2> **= 4.0 => 16.0
-<MethodNumber 2> % 4.0 = 2.0
-<MethodNumber 2> %= 4.0 => 2.0
-<MethodNumber 2> + 2 = 4
-<MethodNumber 2> += 2 => 4
-<MethodNumber 2> - 2 = 0
-<MethodNumber 2> -= 2 => 0
-<MethodNumber 2> * 2 = 4
-<MethodNumber 2> *= 2 => 4
-<MethodNumber 2> / 2 = 1
-<MethodNumber 2> /= 2 => 1
-<MethodNumber 2> ** 2 = 4
-<MethodNumber 2> **= 2 => 4
-<MethodNumber 2> % 2 = 0
-<MethodNumber 2> %= 2 => 0
-<MethodNumber 2> + (2+0j) = (4.0 + 0.0j)
-<MethodNumber 2> += (2+0j) => (4.0 + 0.0j)
-<MethodNumber 2> - (2+0j) = (0.0 + 0.0j)
-<MethodNumber 2> -= (2+0j) => (0.0 + 0.0j)
-<MethodNumber 2> * (2+0j) = (4.0 + 0.0j)
-<MethodNumber 2> *= (2+0j) => (4.0 + 0.0j)
-<MethodNumber 2> / (2+0j) = (1.0 + 0.0j)
-<MethodNumber 2> /= (2+0j) => (1.0 + 0.0j)
-<MethodNumber 2> ** (2+0j) = (4.0 + 0.0j)
-<MethodNumber 2> **= (2+0j) => (4.0 + 0.0j)
-<MethodNumber 2> % (2+0j) = (0.0 + 0.0j)
-<MethodNumber 2> %= (2+0j) => (0.0 + 0.0j)
-<MethodNumber 2> + [1] ... exceptions.TypeError
-<MethodNumber 2> += [1] ... exceptions.TypeError
-<MethodNumber 2> - [1] ... exceptions.TypeError
-<MethodNumber 2> -= [1] ... exceptions.TypeError
-<MethodNumber 2> * [1] = [1, 1]
-<MethodNumber 2> *= [1] => [1, 1]
-<MethodNumber 2> / [1] ... exceptions.TypeError
-<MethodNumber 2> /= [1] ... exceptions.TypeError
-<MethodNumber 2> ** [1] ... exceptions.TypeError
-<MethodNumber 2> **= [1] ... exceptions.TypeError
-<MethodNumber 2> % [1] ... exceptions.TypeError
-<MethodNumber 2> %= [1] ... exceptions.TypeError
-<MethodNumber 2> + (2,) ... exceptions.TypeError
-<MethodNumber 2> += (2,) ... exceptions.TypeError
-<MethodNumber 2> - (2,) ... exceptions.TypeError
-<MethodNumber 2> -= (2,) ... exceptions.TypeError
-<MethodNumber 2> * (2,) = (2, 2)
-<MethodNumber 2> *= (2,) => (2, 2)
-<MethodNumber 2> / (2,) ... exceptions.TypeError
-<MethodNumber 2> /= (2,) ... exceptions.TypeError
-<MethodNumber 2> ** (2,) ... exceptions.TypeError
-<MethodNumber 2> **= (2,) ... exceptions.TypeError
-<MethodNumber 2> % (2,) ... exceptions.TypeError
-<MethodNumber 2> %= (2,) ... exceptions.TypeError
-<MethodNumber 2> + None ... exceptions.TypeError
-<MethodNumber 2> += None ... exceptions.TypeError
-<MethodNumber 2> - None ... exceptions.TypeError
-<MethodNumber 2> -= None ... exceptions.TypeError
-<MethodNumber 2> * None ... exceptions.TypeError
-<MethodNumber 2> *= None ... exceptions.TypeError
-<MethodNumber 2> / None ... exceptions.TypeError
-<MethodNumber 2> /= None ... exceptions.TypeError
-<MethodNumber 2> ** None ... exceptions.TypeError
-<MethodNumber 2> **= None ... exceptions.TypeError
-<MethodNumber 2> % None ... exceptions.TypeError
-<MethodNumber 2> %= None ... exceptions.TypeError
-<MethodNumber 2> + <MethodNumber 2> = 4
-<MethodNumber 2> += <MethodNumber 2> => 4
-<MethodNumber 2> - <MethodNumber 2> = 0
-<MethodNumber 2> -= <MethodNumber 2> => 0
-<MethodNumber 2> * <MethodNumber 2> = 4
-<MethodNumber 2> *= <MethodNumber 2> => 4
-<MethodNumber 2> / <MethodNumber 2> = 1
-<MethodNumber 2> /= <MethodNumber 2> => 1
-<MethodNumber 2> ** <MethodNumber 2> = 4
-<MethodNumber 2> **= <MethodNumber 2> => 4
-<MethodNumber 2> % <MethodNumber 2> = 0
-<MethodNumber 2> %= <MethodNumber 2> => 0
-<MethodNumber 2> + <CoerceNumber 2> = 4
-<MethodNumber 2> += <CoerceNumber 2> => 4
-<MethodNumber 2> - <CoerceNumber 2> = 0
-<MethodNumber 2> -= <CoerceNumber 2> => 0
-<MethodNumber 2> * <CoerceNumber 2> = 4
-<MethodNumber 2> *= <CoerceNumber 2> => 4
-<MethodNumber 2> / <CoerceNumber 2> = 1
-<MethodNumber 2> /= <CoerceNumber 2> => 1
-<MethodNumber 2> ** <CoerceNumber 2> = 4
-<MethodNumber 2> **= <CoerceNumber 2> => 4
-<MethodNumber 2> % <CoerceNumber 2> = 0
-<MethodNumber 2> %= <CoerceNumber 2> => 0
-<CoerceNumber 2> + 2 = 4
-<CoerceNumber 2> += 2 => 4
-<CoerceNumber 2> - 2 = 0
-<CoerceNumber 2> -= 2 => 0
-<CoerceNumber 2> * 2 = 4
-<CoerceNumber 2> *= 2 => 4
-<CoerceNumber 2> / 2 = 1
-<CoerceNumber 2> /= 2 => 1
-<CoerceNumber 2> ** 2 = 4
-<CoerceNumber 2> **= 2 => 4
-<CoerceNumber 2> % 2 = 0
-<CoerceNumber 2> %= 2 => 0
-<CoerceNumber 2> + 4.0 = 6.0
-<CoerceNumber 2> += 4.0 => 6.0
-<CoerceNumber 2> - 4.0 = -2.0
-<CoerceNumber 2> -= 4.0 => -2.0
-<CoerceNumber 2> * 4.0 = 8.0
-<CoerceNumber 2> *= 4.0 => 8.0
-<CoerceNumber 2> / 4.0 = 0.5
-<CoerceNumber 2> /= 4.0 => 0.5
-<CoerceNumber 2> ** 4.0 = 16.0
-<CoerceNumber 2> **= 4.0 => 16.0
-<CoerceNumber 2> % 4.0 = 2.0
-<CoerceNumber 2> %= 4.0 => 2.0
-<CoerceNumber 2> + 2 = 4
-<CoerceNumber 2> += 2 => 4
-<CoerceNumber 2> - 2 = 0
-<CoerceNumber 2> -= 2 => 0
-<CoerceNumber 2> * 2 = 4
-<CoerceNumber 2> *= 2 => 4
-<CoerceNumber 2> / 2 = 1
-<CoerceNumber 2> /= 2 => 1
-<CoerceNumber 2> ** 2 = 4
-<CoerceNumber 2> **= 2 => 4
-<CoerceNumber 2> % 2 = 0
-<CoerceNumber 2> %= 2 => 0
-<CoerceNumber 2> + (2+0j) = (4.0 + 0.0j)
-<CoerceNumber 2> += (2+0j) => (4.0 + 0.0j)
-<CoerceNumber 2> - (2+0j) = (0.0 + 0.0j)
-<CoerceNumber 2> -= (2+0j) => (0.0 + 0.0j)
-<CoerceNumber 2> * (2+0j) = (4.0 + 0.0j)
-<CoerceNumber 2> *= (2+0j) => (4.0 + 0.0j)
-<CoerceNumber 2> / (2+0j) = (1.0 + 0.0j)
-<CoerceNumber 2> /= (2+0j) => (1.0 + 0.0j)
-<CoerceNumber 2> ** (2+0j) = (4.0 + 0.0j)
-<CoerceNumber 2> **= (2+0j) => (4.0 + 0.0j)
-<CoerceNumber 2> % (2+0j) = (0.0 + 0.0j)
-<CoerceNumber 2> %= (2+0j) => (0.0 + 0.0j)
-<CoerceNumber 2> + [1] ... exceptions.TypeError
-<CoerceNumber 2> += [1] ... exceptions.TypeError
-<CoerceNumber 2> - [1] ... exceptions.TypeError
-<CoerceNumber 2> -= [1] ... exceptions.TypeError
-<CoerceNumber 2> * [1] = [1, 1]
-<CoerceNumber 2> *= [1] => [1, 1]
-<CoerceNumber 2> / [1] ... exceptions.TypeError
-<CoerceNumber 2> /= [1] ... exceptions.TypeError
-<CoerceNumber 2> ** [1] ... exceptions.TypeError
-<CoerceNumber 2> **= [1] ... exceptions.TypeError
-<CoerceNumber 2> % [1] ... exceptions.TypeError
-<CoerceNumber 2> %= [1] ... exceptions.TypeError
-<CoerceNumber 2> + (2,) ... exceptions.TypeError
-<CoerceNumber 2> += (2,) ... exceptions.TypeError
-<CoerceNumber 2> - (2,) ... exceptions.TypeError
-<CoerceNumber 2> -= (2,) ... exceptions.TypeError
-<CoerceNumber 2> * (2,) = (2, 2)
-<CoerceNumber 2> *= (2,) => (2, 2)
-<CoerceNumber 2> / (2,) ... exceptions.TypeError
-<CoerceNumber 2> /= (2,) ... exceptions.TypeError
-<CoerceNumber 2> ** (2,) ... exceptions.TypeError
-<CoerceNumber 2> **= (2,) ... exceptions.TypeError
-<CoerceNumber 2> % (2,) ... exceptions.TypeError
-<CoerceNumber 2> %= (2,) ... exceptions.TypeError
-<CoerceNumber 2> + None ... exceptions.TypeError
-<CoerceNumber 2> += None ... exceptions.TypeError
-<CoerceNumber 2> - None ... exceptions.TypeError
-<CoerceNumber 2> -= None ... exceptions.TypeError
-<CoerceNumber 2> * None ... exceptions.TypeError
-<CoerceNumber 2> *= None ... exceptions.TypeError
-<CoerceNumber 2> / None ... exceptions.TypeError
-<CoerceNumber 2> /= None ... exceptions.TypeError
-<CoerceNumber 2> ** None ... exceptions.TypeError
-<CoerceNumber 2> **= None ... exceptions.TypeError
-<CoerceNumber 2> % None ... exceptions.TypeError
-<CoerceNumber 2> %= None ... exceptions.TypeError
-<CoerceNumber 2> + <MethodNumber 2> = 4
-<CoerceNumber 2> += <MethodNumber 2> => 4
-<CoerceNumber 2> - <MethodNumber 2> = 0
-<CoerceNumber 2> -= <MethodNumber 2> => 0
-<CoerceNumber 2> * <MethodNumber 2> = 4
-<CoerceNumber 2> *= <MethodNumber 2> => 4
-<CoerceNumber 2> / <MethodNumber 2> = 1
-<CoerceNumber 2> /= <MethodNumber 2> => 1
-<CoerceNumber 2> ** <MethodNumber 2> = 4
-<CoerceNumber 2> **= <MethodNumber 2> => 4
-<CoerceNumber 2> % <MethodNumber 2> = 0
-<CoerceNumber 2> %= <MethodNumber 2> => 0
-<CoerceNumber 2> + <CoerceNumber 2> = 4
-<CoerceNumber 2> += <CoerceNumber 2> => 4
-<CoerceNumber 2> - <CoerceNumber 2> = 0
-<CoerceNumber 2> -= <CoerceNumber 2> => 0
-<CoerceNumber 2> * <CoerceNumber 2> = 4
-<CoerceNumber 2> *= <CoerceNumber 2> => 4
-<CoerceNumber 2> / <CoerceNumber 2> = 1
-<CoerceNumber 2> /= <CoerceNumber 2> => 1
-<CoerceNumber 2> ** <CoerceNumber 2> = 4
-<CoerceNumber 2> **= <CoerceNumber 2> => 4
-<CoerceNumber 2> % <CoerceNumber 2> = 0
-<CoerceNumber 2> %= <CoerceNumber 2> => 0
-divmod(2, 2) = (1, 0)
-divmod(2, 4.0) = (0.0, 2.0)
-divmod(2, 2) = (1L, 0L)
-divmod(2, (2+0j)) = ((1+0j), 0j)
-divmod(2, [1]) ... exceptions.TypeError
-divmod(2, (2,)) ... exceptions.TypeError
-divmod(2, None) ... exceptions.TypeError
-divmod(2, <MethodNumber 2>) ... exceptions.TypeError
-divmod(2, <CoerceNumber 2>) = (1, 0)
-divmod(4.0, 2) = (2.0, 0.0)
-divmod(4.0, 4.0) = (1.0, 0.0)
-divmod(4.0, 2) = (2.0, 0.0)
-divmod(4.0, (2+0j)) = ((2+0j), 0j)
-divmod(4.0, [1]) ... exceptions.TypeError
-divmod(4.0, (2,)) ... exceptions.TypeError
-divmod(4.0, None) ... exceptions.TypeError
-divmod(4.0, <MethodNumber 2>) ... exceptions.TypeError
-divmod(4.0, <CoerceNumber 2>) = (2.0, 0.0)
-divmod(2, 2) = (1L, 0L)
-divmod(2, 4.0) = (0.0, 2.0)
-divmod(2, 2) = (1L, 0L)
-divmod(2, (2+0j)) = ((1+0j), 0j)
-divmod(2, [1]) ... exceptions.TypeError
-divmod(2, (2,)) ... exceptions.TypeError
-divmod(2, None) ... exceptions.TypeError
-divmod(2, <MethodNumber 2>) ... exceptions.TypeError
-divmod(2, <CoerceNumber 2>) = (1L, 0L)
-divmod((2+0j), 2) = ((1+0j), 0j)
-divmod((2+0j), 4.0) = (0j, (2+0j))
-divmod((2+0j), 2) = ((1+0j), 0j)
-divmod((2+0j), (2+0j)) = ((1+0j), 0j)
-divmod((2+0j), [1]) ... exceptions.TypeError
-divmod((2+0j), (2,)) ... exceptions.TypeError
-divmod((2+0j), None) ... exceptions.TypeError
-divmod((2+0j), <MethodNumber 2>) ... exceptions.TypeError
-divmod((2+0j), <CoerceNumber 2>) = ((1+0j), 0j)
-divmod([1], 2) ... exceptions.TypeError
-divmod([1], 4.0) ... exceptions.TypeError
-divmod([1], 2) ... exceptions.TypeError
-divmod([1], (2+0j)) ... exceptions.TypeError
-divmod([1], [1]) ... exceptions.TypeError
-divmod([1], (2,)) ... exceptions.TypeError
-divmod([1], None) ... exceptions.TypeError
-divmod([1], <MethodNumber 2>) ... exceptions.TypeError
-divmod([1], <CoerceNumber 2>) ... exceptions.TypeError
-divmod((2,), 2) ... exceptions.TypeError
-divmod((2,), 4.0) ... exceptions.TypeError
-divmod((2,), 2) ... exceptions.TypeError
-divmod((2,), (2+0j)) ... exceptions.TypeError
-divmod((2,), [1]) ... exceptions.TypeError
-divmod((2,), (2,)) ... exceptions.TypeError
-divmod((2,), None) ... exceptions.TypeError
-divmod((2,), <MethodNumber 2>) ... exceptions.TypeError
-divmod((2,), <CoerceNumber 2>) ... exceptions.TypeError
-divmod(None, 2) ... exceptions.TypeError
-divmod(None, 4.0) ... exceptions.TypeError
-divmod(None, 2) ... exceptions.TypeError
-divmod(None, (2+0j)) ... exceptions.TypeError
-divmod(None, [1]) ... exceptions.TypeError
-divmod(None, (2,)) ... exceptions.TypeError
-divmod(None, None) ... exceptions.TypeError
-divmod(None, <MethodNumber 2>) ... exceptions.TypeError
-divmod(None, <CoerceNumber 2>) ... exceptions.TypeError
-divmod(<MethodNumber 2>, 2) ... exceptions.TypeError
-divmod(<MethodNumber 2>, 4.0) ... exceptions.TypeError
-divmod(<MethodNumber 2>, 2) ... exceptions.TypeError
-divmod(<MethodNumber 2>, (2+0j)) ... exceptions.TypeError
-divmod(<MethodNumber 2>, [1]) ... exceptions.TypeError
-divmod(<MethodNumber 2>, (2,)) ... exceptions.TypeError
-divmod(<MethodNumber 2>, None) ... exceptions.TypeError
-divmod(<MethodNumber 2>, <MethodNumber 2>) ... exceptions.TypeError
-divmod(<MethodNumber 2>, <CoerceNumber 2>) ... exceptions.TypeError
-divmod(<CoerceNumber 2>, 2) = (1, 0)
-divmod(<CoerceNumber 2>, 4.0) = (0.0, 2.0)
-divmod(<CoerceNumber 2>, 2) = (1L, 0L)
-divmod(<CoerceNumber 2>, (2+0j)) = ((1+0j), 0j)
-divmod(<CoerceNumber 2>, [1]) ... exceptions.TypeError
-divmod(<CoerceNumber 2>, (2,)) ... exceptions.TypeError
-divmod(<CoerceNumber 2>, None) ... exceptions.TypeError
-divmod(<CoerceNumber 2>, <MethodNumber 2>) ... exceptions.TypeError
-divmod(<CoerceNumber 2>, <CoerceNumber 2>) = (1, 0)
diff --git a/Lib/test/output/test_compare b/Lib/test/output/test_compare
deleted file mode 100644
index 210bd97..0000000
--- a/Lib/test/output/test_compare
+++ /dev/null
@@ -1,101 +0,0 @@
-test_compare
-2 == 2
-2 == 2.0
-2 == 2
-2 == (2+0j)
-2 != [1]
-2 != (3,)
-2 != None
-2 != <Empty>
-2 == <Coerce 2>
-2 == <Cmp 2.0>
-2.0 == 2
-2.0 == 2.0
-2.0 == 2
-2.0 == (2+0j)
-2.0 != [1]
-2.0 != (3,)
-2.0 != None
-2.0 != <Empty>
-2.0 == <Coerce 2>
-2.0 == <Cmp 2.0>
-2 == 2
-2 == 2.0
-2 == 2
-2 == (2+0j)
-2 != [1]
-2 != (3,)
-2 != None
-2 != <Empty>
-2 == <Coerce 2>
-2 == <Cmp 2.0>
-(2+0j) == 2
-(2+0j) == 2.0
-(2+0j) == 2
-(2+0j) == (2+0j)
-(2+0j) != [1]
-(2+0j) != (3,)
-(2+0j) != None
-(2+0j) != <Empty>
-(2+0j) == <Coerce 2>
-(2+0j) == <Cmp 2.0>
-[1] != 2
-[1] != 2.0
-[1] != 2
-[1] != (2+0j)
-[1] == [1]
-[1] != (3,)
-[1] != None
-[1] != <Empty>
-[1] != <Coerce 2>
-[1] != <Cmp 2.0>
-(3,) != 2
-(3,) != 2.0
-(3,) != 2
-(3,) != (2+0j)
-(3,) != [1]
-(3,) == (3,)
-(3,) != None
-(3,) != <Empty>
-(3,) != <Coerce 2>
-(3,) != <Cmp 2.0>
-None != 2
-None != 2.0
-None != 2
-None != (2+0j)
-None != [1]
-None != (3,)
-None == None
-None != <Empty>
-None != <Coerce 2>
-None != <Cmp 2.0>
-<Empty> != 2
-<Empty> != 2.0
-<Empty> != 2
-<Empty> != (2+0j)
-<Empty> != [1]
-<Empty> != (3,)
-<Empty> != None
-<Empty> == <Empty>
-<Empty> != <Coerce 2>
-<Empty> != <Cmp 2.0>
-<Coerce 2> == 2
-<Coerce 2> == 2.0
-<Coerce 2> == 2
-<Coerce 2> == (2+0j)
-<Coerce 2> != [1]
-<Coerce 2> != (3,)
-<Coerce 2> != None
-<Coerce 2> != <Empty>
-<Coerce 2> == <Coerce 2>
-<Coerce 2> == <Cmp 2.0>
-<Cmp 2.0> == 2
-<Cmp 2.0> == 2.0
-<Cmp 2.0> == 2
-<Cmp 2.0> == (2+0j)
-<Cmp 2.0> != [1]
-<Cmp 2.0> != (3,)
-<Cmp 2.0> != None
-<Cmp 2.0> != <Empty>
-<Cmp 2.0> == <Coerce 2>
-<Cmp 2.0> == <Cmp 2.0>
diff --git a/Lib/test/regrtest.py b/Lib/test/regrtest.py
index 85f57a6..566e54b 100755
--- a/Lib/test/regrtest.py
+++ b/Lib/test/regrtest.py
@@ -520,7 +520,7 @@
                 import gc
                 def cleanup():
                     import _strptime, linecache, warnings, dircache
-                    import urlparse, urllib, urllib2
+                    import urlparse, urllib, urllib2, mimetypes, doctest
                     from distutils.dir_util import _path_created
                     _path_created.clear()
                     warnings.filters[:] = fs
@@ -536,6 +536,8 @@
                     sys.path_importer_cache.update(pic)
                     dircache.reset()
                     linecache.clearcache()
+                    mimetypes._default_mime_types()
+                    doctest.master = None
                 if indirect_test:
                     def run_the_test():
                         indirect_test()
@@ -547,6 +549,7 @@
                 print >> sys.stderr, "beginning", repcount, "repetitions"
                 print >> sys.stderr, \
                       ("1234567890"*(repcount//10 + 1))[:repcount]
+                cleanup()
                 for i in range(repcount):
                     rc = sys.gettotalrefcount()
                     run_the_test()
@@ -744,6 +747,8 @@
         test_sunaudiodev
         test_threadsignals
         test_timing
+        test_wait3
+        test_wait4
         """,
     'linux2':
         """
@@ -761,6 +766,8 @@
         test_nis
         test_ntpath
         test_ossaudiodev
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         """,
    'mac':
@@ -800,6 +807,8 @@
         test_pwd
         test_resource
         test_signal
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         test_sundry
         test_tarfile
@@ -824,6 +833,8 @@
         test_openpty
         test_pyexpat
         test_sax
+        test_startfile
+        test_sqlite
         test_sunaudiodev
         test_sundry
         """,
@@ -846,6 +857,8 @@
         test_openpty
         test_pyexpat
         test_sax
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         test_sundry
         """,
@@ -873,6 +886,8 @@
         test_pyexpat
         test_queue
         test_sax
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         test_sundry
         test_thread
@@ -913,6 +928,8 @@
         test_pty
         test_pwd
         test_strop
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         test_sundry
         test_thread
@@ -930,7 +947,6 @@
         test_cd
         test_cl
         test_curses
-        test_dl
         test_gdbm
         test_gl
         test_imgfile
@@ -942,6 +958,8 @@
         test_ntpath
         test_ossaudiodev
         test_poll
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         """,
     'sunos5':
@@ -960,6 +978,8 @@
         test_imgfile
         test_linuxaudiodev
         test_openpty
+        test_sqlite
+        test_startfile
         test_zipfile
         test_zlib
         """,
@@ -986,6 +1006,8 @@
         test_openpty
         test_pyexpat
         test_sax
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         test_zipfile
         test_zlib
@@ -1011,6 +1033,8 @@
         test_poll
         test_popen2
         test_resource
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         """,
     'cygwin':
@@ -1032,6 +1056,7 @@
         test_nis
         test_ossaudiodev
         test_socketserver
+        test_sqlite
         test_sunaudiodev
         """,
     'os2emx':
@@ -1058,6 +1083,8 @@
         test_pty
         test_resource
         test_signal
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         """,
     'freebsd4':
@@ -1084,6 +1111,8 @@
         test_scriptpackages
         test_socket_ssl
         test_socketserver
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         test_tcl
         test_timeout
@@ -1113,6 +1142,8 @@
         test_macostools
         test_nis
         test_ossaudiodev
+        test_sqlite
+        test_startfile
         test_sunaudiodev
         test_tcl
         test_winreg
@@ -1120,6 +1151,38 @@
         test_zipimport
         test_zlib
         """,
+    'openbsd3':
+        """
+        test_aepack
+        test_al
+        test_applesingle
+        test_bsddb
+        test_bsddb3
+        test_cd
+        test_cl
+        test_ctypes
+        test_dl
+        test_gdbm
+        test_gl
+        test_imgfile
+        test_linuxaudiodev
+        test_locale
+        test_macfs
+        test_macostools
+        test_nis
+        test_normalization
+        test_ossaudiodev
+        test_pep277
+        test_plistlib
+        test_scriptpackages
+        test_tcl
+        test_sqlite
+        test_startfile
+        test_sunaudiodev
+        test_unicode_file
+        test_winreg
+        test_winsound
+        """,
 }
 _expectations['freebsd5'] = _expectations['freebsd4']
 _expectations['freebsd6'] = _expectations['freebsd4']
diff --git a/Lib/test/test___all__.py b/Lib/test/test___all__.py
index 0b2e7da..c45e139 100644
--- a/Lib/test/test___all__.py
+++ b/Lib/test/test___all__.py
@@ -5,8 +5,6 @@
 import sys
 import warnings
 
-warnings.filterwarnings("ignore", ".* regsub .*", DeprecationWarning,
-                        r'^regsub$')
 warnings.filterwarnings("ignore",
                         "the gopherlib module is deprecated",
                         DeprecationWarning,
@@ -128,8 +126,6 @@
         self.check_all("quopri")
         self.check_all("random")
         self.check_all("re")
-        self.check_all("reconvert")
-        self.check_all("regsub")
         self.check_all("repr")
         self.check_all("rexec")
         self.check_all("rfc822")
diff --git a/Lib/test/test_applesingle.py b/Lib/test/test_applesingle.py
index 2a2d60a..d533f1a 100644
--- a/Lib/test/test_applesingle.py
+++ b/Lib/test/test_applesingle.py
@@ -15,8 +15,8 @@
 dataforkdata = 'hello\r\0world\n'
 resourceforkdata = 'goodbye\ncruel\0world\r'
 
-applesingledata = struct.pack("ll16sh", AS_MAGIC, AS_VERSION, "foo", 2) + \
-    struct.pack("llllll", 1, 50, len(dataforkdata),
+applesingledata = struct.pack(">ll16sh", AS_MAGIC, AS_VERSION, "foo", 2) + \
+    struct.pack(">llllll", 1, 50, len(dataforkdata),
         2, 50+len(dataforkdata), len(resourceforkdata)) + \
     dataforkdata + \
     resourceforkdata
diff --git a/Lib/test/test_array.py b/Lib/test/test_array.py
index 87d395d..62361fc 100755
--- a/Lib/test/test_array.py
+++ b/Lib/test/test_array.py
@@ -61,7 +61,7 @@
         bi = a.buffer_info()
         self.assert_(isinstance(bi, tuple))
         self.assertEqual(len(bi), 2)
-        self.assert_(isinstance(bi[0], int))
+        self.assert_(isinstance(bi[0], (int, long)))
         self.assert_(isinstance(bi[1], int))
         self.assertEqual(bi[1], len(a))
 
diff --git a/Lib/test/test_ast.py b/Lib/test/test_ast.py
index b42caa3..c64ad28 100644
--- a/Lib/test/test_ast.py
+++ b/Lib/test/test_ast.py
@@ -119,7 +119,8 @@
 # excepthandler, arguments, keywords, alias
 
 if __name__=='__main__' and sys.argv[1:] == ['-g']:
-    for statements, kind in ((exec_tests, "exec"), (single_tests, "single"), (eval_tests, "eval")):
+    for statements, kind in ((exec_tests, "exec"), (single_tests, "single"),
+                             (eval_tests, "eval")):
         print kind+"_results = ["
         for s in statements:
             print repr(to_tuple(compile(s, "?", kind, 0x400)))+","
@@ -131,7 +132,7 @@
 
     if not isinstance(ast_node, _ast.AST) or ast_node._fields == None:
         return
-    if isinstance(ast_node, (_ast.expr, _ast.stmt)):
+    if isinstance(ast_node, (_ast.expr, _ast.stmt, _ast.excepthandler)):
         node_pos = (ast_node.lineno, ast_node.col_offset)
         assert node_pos >= parent_pos, (node_pos, parent_pos)
         parent_pos = (ast_node.lineno, ast_node.col_offset)
@@ -145,8 +146,8 @@
 
 def run_tests():
     for input, output, kind in ((exec_tests, exec_results, "exec"),
-                                        (single_tests, single_results, "single"),
-                                        (eval_tests, eval_results, "eval")):
+                                (single_tests, single_results, "single"),
+                                (eval_tests, eval_results, "eval")):
         for i, o in itertools.izip(input, output):
             ast_tree = compile(i, "?", kind, 0x400)
             assert to_tuple(ast_tree) == o
@@ -165,7 +166,7 @@
 ('Module', [('While', (1, 0), ('Name', (1, 6), 'v', ('Load',)), [('Pass', (1, 8))], [])]),
 ('Module', [('If', (1, 0), ('Name', (1, 3), 'v', ('Load',)), [('Pass', (1, 5))], [])]),
 ('Module', [('Raise', (1, 0), ('Name', (1, 6), 'Exception', ('Load',)), ('Str', (1, 17), 'string'), None)]),
-('Module', [('TryExcept', (1, 0), [('Pass', (2, 2))], [('excepthandler', ('Name', (3, 7), 'Exception', ('Load',)), None, [('Pass', (4, 2))])], [])]),
+('Module', [('TryExcept', (1, 0), [('Pass', (2, 2))], [('excepthandler', (3, 0), ('Name', (3, 7), 'Exception', ('Load',)), None, [('Pass', (4, 2))], 3, 0)], [])]),
 ('Module', [('TryFinally', (1, 0), [('Pass', (2, 2))], [('Pass', (4, 2))])]),
 ('Module', [('Assert', (1, 0), ('Name', (1, 7), 'v', ('Load',)), None)]),
 ('Module', [('Import', (1, 0), [('alias', 'sys', None)])]),
diff --git a/Lib/test/test_audioop.py b/Lib/test/test_audioop.py
index 440adab..f585733 100644
--- a/Lib/test/test_audioop.py
+++ b/Lib/test/test_audioop.py
@@ -136,12 +136,30 @@
         return 0
     return 1
 
+def testlin2alaw(data):
+    if verbose:
+        print 'lin2alaw'
+    if audioop.lin2alaw(data[0], 1) != '\xd5\xc5\xf5' or \
+              audioop.lin2alaw(data[1], 2) != '\xd5\xd5\xd5' or \
+              audioop.lin2alaw(data[2], 4) != '\xd5\xd5\xd5':
+        return 0
+    return 1
+
+def testalaw2lin(data):
+    if verbose:
+        print 'alaw2lin'
+    # Cursory
+    d = audioop.lin2alaw(data[0], 1)
+    if audioop.alaw2lin(d, 1) != data[0]:
+        return 0
+    return 1
+
 def testlin2ulaw(data):
     if verbose:
         print 'lin2ulaw'
-    if audioop.lin2ulaw(data[0], 1) != '\377\347\333' or \
-              audioop.lin2ulaw(data[1], 2) != '\377\377\377' or \
-              audioop.lin2ulaw(data[2], 4) != '\377\377\377':
+    if audioop.lin2ulaw(data[0], 1) != '\xff\xe7\xdb' or \
+              audioop.lin2ulaw(data[1], 2) != '\xff\xff\xff' or \
+              audioop.lin2ulaw(data[2], 4) != '\xff\xff\xff':
         return 0
     return 1
 
diff --git a/Lib/test/test_augassign.py b/Lib/test/test_augassign.py
index 22cca44..0309d6e 100644
--- a/Lib/test/test_augassign.py
+++ b/Lib/test/test_augassign.py
@@ -1,258 +1,312 @@
 # Augmented assignment test.
 
-x = 2
-x += 1
-x *= 2
-x **= 2
-x -= 8
-x %= 12
-x >>= 1
-x &= 2
-x |= 5
-x ^= 1
-x <<= 2
-x /= 2
-x //= 2
-
-print x
-print int(x)
-
-x = [2]
-x[0] += 1
-x[0] *= 2
-x[0] **= 2
-x[0] -= 8
-x[0] %= 12
-x[0] >>= 1
-x[0] &= 2
-x[0] |= 5
-x[0] ^= 1
-x[0] <<= 2
-x[0] /= 2
-x[0] //= 2
-
-print x
-print int(x[0])
-
-x = {0: 2}
-x[0] += 1
-x[0] *= 2
-x[0] **= 2
-x[0] -= 8
-x[0] %= 12
-x[0] >>= 1
-x[0] &= 2
-x[0] |= 5
-x[0] ^= 1
-x[0] <<= 2
-x[0] /= 2
-x[0] //= 2
-
-print x[0]
-print int(x[0])
-
-x = [1,2]
-x += [3,4]
-x *= 2
-
-print x
-
-x = [1, 2, 3]
-y = x
-x[1:2] *= 2
-y[1:2] += [1]
-
-print x
-print x is y
-
-class aug_test:
-    def __init__(self, value):
-        self.val = value
-    def __radd__(self, val):
-        return self.val + val
-    def __add__(self, val):
-        return aug_test(self.val + val)
+from test.test_support import run_unittest
+import unittest
 
 
-class aug_test2(aug_test):
-    def __iadd__(self, val):
-        self.val = self.val + val
-        return self
+class AugAssignTest(unittest.TestCase):
+    def testBasic(self):
+        x = 2
+        x += 1
+        x *= 2
+        x **= 2
+        x -= 8
+        x //= 5
+        x %= 3
+        x &= 2
+        x |= 5
+        x ^= 1
+        x /= 2
+        self.assertEquals(x, 3.0)
 
-class aug_test3(aug_test):
-    def __iadd__(self, val):
-        return aug_test3(self.val + val)
+    def testInList(self):
+        x = [2]
+        x[0] += 1
+        x[0] *= 2
+        x[0] **= 2
+        x[0] -= 8
+        x[0] //= 5
+        x[0] %= 3
+        x[0] &= 2
+        x[0] |= 5
+        x[0] ^= 1
+        x[0] /= 2
+        self.assertEquals(x[0], 3.0)
 
-x = aug_test(1)
-y = x
-x += 10
+    def testInDict(self):
+        x = {0: 2}
+        x[0] += 1
+        x[0] *= 2
+        x[0] **= 2
+        x[0] -= 8
+        x[0] //= 5
+        x[0] %= 3
+        x[0] &= 2
+        x[0] |= 5
+        x[0] ^= 1
+        x[0] /= 2
+        self.assertEquals(x[0], 3.0)
 
-print isinstance(x, aug_test)
-print y is not x
-print x.val
+    def testSequences(self):
+        x = [1,2]
+        x += [3,4]
+        x *= 2
 
-x = aug_test2(2)
-y = x
-x += 10
+        self.assertEquals(x, [1, 2, 3, 4, 1, 2, 3, 4])
 
-print y is x
-print x.val
+        x = [1, 2, 3]
+        y = x
+        x[1:2] *= 2
+        y[1:2] += [1]
 
-x = aug_test3(3)
-y = x
-x += 10
+        self.assertEquals(x, [1, 2, 1, 2, 3])
+        self.assert_(x is y)
 
-print isinstance(x, aug_test3)
-print y is not x
-print x.val
+    def testCustomMethods1(self):
 
-class testall:
+        class aug_test:
+            def __init__(self, value):
+                self.val = value
+            def __radd__(self, val):
+                return self.val + val
+            def __add__(self, val):
+                return aug_test(self.val + val)
 
-    def __add__(self, val):
-        print "__add__ called"
-    def __radd__(self, val):
-        print "__radd__ called"
-    def __iadd__(self, val):
-        print "__iadd__ called"
-        return self
+        class aug_test2(aug_test):
+            def __iadd__(self, val):
+                self.val = self.val + val
+                return self
 
-    def __sub__(self, val):
-        print "__sub__ called"
-    def __rsub__(self, val):
-        print "__rsub__ called"
-    def __isub__(self, val):
-        print "__isub__ called"
-        return self
+        class aug_test3(aug_test):
+            def __iadd__(self, val):
+                return aug_test3(self.val + val)
 
-    def __mul__(self, val):
-        print "__mul__ called"
-    def __rmul__(self, val):
-        print "__rmul__ called"
-    def __imul__(self, val):
-        print "__imul__ called"
-        return self
+        x = aug_test(1)
+        y = x
+        x += 10
 
-    def __floordiv__(self, val):
-        print "__floordiv__ called"
-        return self
-    def __ifloordiv__(self, val):
-        print "__ifloordiv__ called"
-        return self
-    def __rfloordiv__(self, val):
-        print "__rfloordiv__ called"
-        return self
+        self.assert_(isinstance(x, aug_test))
+        self.assert_(y is not x)
+        self.assertEquals(x.val, 11)
 
-    def __truediv__(self, val):
-        print "__truediv__ called"
-        return self
-    def __itruediv__(self, val):
-        print "__itruediv__ called"
-        return self
-    def __rtruediv__(self, val):
-        print "__rtruediv__ called"
-        return self
+        x = aug_test2(2)
+        y = x
+        x += 10
 
-    def __mod__(self, val):
-        print "__mod__ called"
-    def __rmod__(self, val):
-        print "__rmod__ called"
-    def __imod__(self, val):
-        print "__imod__ called"
-        return self
+        self.assert_(y is x)
+        self.assertEquals(x.val, 12)
 
-    def __pow__(self, val):
-        print "__pow__ called"
-    def __rpow__(self, val):
-        print "__rpow__ called"
-    def __ipow__(self, val):
-        print "__ipow__ called"
-        return self
+        x = aug_test3(3)
+        y = x
+        x += 10
 
-    def __or__(self, val):
-        print "__or__ called"
-    def __ror__(self, val):
-        print "__ror__ called"
-    def __ior__(self, val):
-        print "__ior__ called"
-        return self
+        self.assert_(isinstance(x, aug_test3))
+        self.assert_(y is not x)
+        self.assertEquals(x.val, 13)
 
-    def __and__(self, val):
-        print "__and__ called"
-    def __rand__(self, val):
-        print "__rand__ called"
-    def __iand__(self, val):
-        print "__iand__ called"
-        return self
 
-    def __xor__(self, val):
-        print "__xor__ called"
-    def __rxor__(self, val):
-        print "__rxor__ called"
-    def __ixor__(self, val):
-        print "__ixor__ called"
-        return self
+    def testCustomMethods2(test_self):
+        output = []
 
-    def __rshift__(self, val):
-        print "__rshift__ called"
-    def __rrshift__(self, val):
-        print "__rrshift__ called"
-    def __irshift__(self, val):
-        print "__irshift__ called"
-        return self
+        class testall:
+            def __add__(self, val):
+                output.append("__add__ called")
+            def __radd__(self, val):
+                output.append("__radd__ called")
+            def __iadd__(self, val):
+                output.append("__iadd__ called")
+                return self
 
-    def __lshift__(self, val):
-        print "__lshift__ called"
-    def __rlshift__(self, val):
-        print "__rlshift__ called"
-    def __ilshift__(self, val):
-        print "__ilshift__ called"
-        return self
+            def __sub__(self, val):
+                output.append("__sub__ called")
+            def __rsub__(self, val):
+                output.append("__rsub__ called")
+            def __isub__(self, val):
+                output.append("__isub__ called")
+                return self
 
-x = testall()
-x + 1
-1 + x
-x += 1
+            def __mul__(self, val):
+                output.append("__mul__ called")
+            def __rmul__(self, val):
+                output.append("__rmul__ called")
+            def __imul__(self, val):
+                output.append("__imul__ called")
+                return self
 
-x - 1
-1 - x
-x -= 1
+            def __div__(self, val):
+                output.append("__div__ called")
+            def __rdiv__(self, val):
+                output.append("__rdiv__ called")
+            def __idiv__(self, val):
+                output.append("__idiv__ called")
+                return self
 
-x * 1
-1 * x
-x *= 1
+            def __floordiv__(self, val):
+                output.append("__floordiv__ called")
+                return self
+            def __ifloordiv__(self, val):
+                output.append("__ifloordiv__ called")
+                return self
+            def __rfloordiv__(self, val):
+                output.append("__rfloordiv__ called")
+                return self
 
-x / 1
-1 / x
-x /= 1
+            def __truediv__(self, val):
+                output.append("__truediv__ called")
+                return self
+            def __rtruediv__(self, val):
+                output.append("__rtruediv__ called")
+                return self
+            def __itruediv__(self, val):
+                output.append("__itruediv__ called")
+                return self
 
-x // 1
-1 // x
-x //= 1
+            def __mod__(self, val):
+                output.append("__mod__ called")
+            def __rmod__(self, val):
+                output.append("__rmod__ called")
+            def __imod__(self, val):
+                output.append("__imod__ called")
+                return self
 
-x % 1
-1 % x
-x %= 1
+            def __pow__(self, val):
+                output.append("__pow__ called")
+            def __rpow__(self, val):
+                output.append("__rpow__ called")
+            def __ipow__(self, val):
+                output.append("__ipow__ called")
+                return self
 
-x ** 1
-1 ** x
-x **= 1
+            def __or__(self, val):
+                output.append("__or__ called")
+            def __ror__(self, val):
+                output.append("__ror__ called")
+            def __ior__(self, val):
+                output.append("__ior__ called")
+                return self
 
-x | 1
-1 | x
-x |= 1
+            def __and__(self, val):
+                output.append("__and__ called")
+            def __rand__(self, val):
+                output.append("__rand__ called")
+            def __iand__(self, val):
+                output.append("__iand__ called")
+                return self
 
-x & 1
-1 & x
-x &= 1
+            def __xor__(self, val):
+                output.append("__xor__ called")
+            def __rxor__(self, val):
+                output.append("__rxor__ called")
+            def __ixor__(self, val):
+                output.append("__ixor__ called")
+                return self
 
-x ^ 1
-1 ^ x
-x ^= 1
+            def __rshift__(self, val):
+                output.append("__rshift__ called")
+            def __rrshift__(self, val):
+                output.append("__rrshift__ called")
+            def __irshift__(self, val):
+                output.append("__irshift__ called")
+                return self
 
-x >> 1
-1 >> x
-x >>= 1
+            def __lshift__(self, val):
+                output.append("__lshift__ called")
+            def __rlshift__(self, val):
+                output.append("__rlshift__ called")
+            def __ilshift__(self, val):
+                output.append("__ilshift__ called")
+                return self
 
-x << 1
-1 << x
-x <<= 1
+        x = testall()
+        x + 1
+        1 + x
+        x += 1
+
+        x - 1
+        1 - x
+        x -= 1
+
+        x * 1
+        1 * x
+        x *= 1
+
+        x / 1
+        1 / x
+        x /= 1
+
+        x // 1
+        1 // x
+        x //= 1
+
+        x % 1
+        1 % x
+        x %= 1
+
+        x ** 1
+        1 ** x
+        x **= 1
+
+        x | 1
+        1 | x
+        x |= 1
+
+        x & 1
+        1 & x
+        x &= 1
+
+        x ^ 1
+        1 ^ x
+        x ^= 1
+
+        x >> 1
+        1 >> x
+        x >>= 1
+
+        x << 1
+        1 << x
+        x <<= 1
+
+        test_self.assertEquals(output, '''\
+__add__ called
+__radd__ called
+__iadd__ called
+__sub__ called
+__rsub__ called
+__isub__ called
+__mul__ called
+__rmul__ called
+__imul__ called
+__truediv__ called
+__rtruediv__ called
+__itruediv__ called
+__floordiv__ called
+__rfloordiv__ called
+__ifloordiv__ called
+__mod__ called
+__rmod__ called
+__imod__ called
+__pow__ called
+__rpow__ called
+__ipow__ called
+__or__ called
+__ror__ called
+__ior__ called
+__and__ called
+__rand__ called
+__iand__ called
+__xor__ called
+__rxor__ called
+__ixor__ called
+__rshift__ called
+__rrshift__ called
+__irshift__ called
+__lshift__ called
+__rlshift__ called
+__ilshift__ called
+'''.splitlines())
+
+def test_main():
+    run_unittest(AugAssignTest)
+
+if __name__ == '__main__':
+    test_main()
diff --git a/Lib/test/test_bsddb.py b/Lib/test/test_bsddb.py
index 1ec4801..513e541 100755
--- a/Lib/test/test_bsddb.py
+++ b/Lib/test/test_bsddb.py
@@ -11,9 +11,10 @@
 from sets import Set
 
 class TestBSDDB(unittest.TestCase):
+    openflag = 'c'
 
     def setUp(self):
-        self.f = self.openmethod[0](self.fname, 'c')
+        self.f = self.openmethod[0](self.fname, self.openflag, cachesize=32768)
         self.d = dict(q='Guido', w='van', e='Rossum', r='invented', t='Python', y='')
         for k, v in self.d.iteritems():
             self.f[k] = v
@@ -267,6 +268,11 @@
     fname = None
     openmethod = [bsddb.btopen]
 
+class TestBTree_InMemory_Truncate(TestBSDDB):
+    fname = None
+    openflag = 'n'
+    openmethod = [bsddb.btopen]
+
 class TestHashTable(TestBSDDB):
     fname = test_support.TESTFN
     openmethod = [bsddb.hashopen]
@@ -285,6 +291,7 @@
         TestHashTable,
         TestBTree_InMemory,
         TestHashTable_InMemory,
+        TestBTree_InMemory_Truncate,
     )
 
 if __name__ == "__main__":
diff --git a/Lib/test/test_builtin.py b/Lib/test/test_builtin.py
index 6f11fdd..ef4f407 100644
--- a/Lib/test/test_builtin.py
+++ b/Lib/test/test_builtin.py
@@ -108,6 +108,7 @@
         __import__('string')
         self.assertRaises(ImportError, __import__, 'spamspam')
         self.assertRaises(TypeError, __import__, 1, 2, 3, 4)
+        self.assertRaises(ValueError, __import__, '')
 
     def test_abs(self):
         # int
@@ -1317,6 +1318,9 @@
 
         self.assertEqual(round(-8.0, -1), -10.0)
 
+        # test new kwargs
+        self.assertEqual(round(number=-8.0, ndigits=-1), -10.0)
+
         self.assertRaises(TypeError, round)
 
     def test_setattr(self):
diff --git a/Lib/test/test_calendar.py b/Lib/test/test_calendar.py
index 34d365b..e414324 100644
--- a/Lib/test/test_calendar.py
+++ b/Lib/test/test_calendar.py
@@ -4,6 +4,202 @@
 from test import test_support
 
 
+result_2004_text = """
+                                  2004
+
+      January                   February                   March
+Mo Tu We Th Fr Sa Su      Mo Tu We Th Fr Sa Su      Mo Tu We Th Fr Sa Su
+          1  2  3  4                         1       1  2  3  4  5  6  7
+ 5  6  7  8  9 10 11       2  3  4  5  6  7  8       8  9 10 11 12 13 14
+12 13 14 15 16 17 18       9 10 11 12 13 14 15      15 16 17 18 19 20 21
+19 20 21 22 23 24 25      16 17 18 19 20 21 22      22 23 24 25 26 27 28
+26 27 28 29 30 31         23 24 25 26 27 28 29      29 30 31
+
+       April                      May                       June
+Mo Tu We Th Fr Sa Su      Mo Tu We Th Fr Sa Su      Mo Tu We Th Fr Sa Su
+          1  2  3  4                      1  2          1  2  3  4  5  6
+ 5  6  7  8  9 10 11       3  4  5  6  7  8  9       7  8  9 10 11 12 13
+12 13 14 15 16 17 18      10 11 12 13 14 15 16      14 15 16 17 18 19 20
+19 20 21 22 23 24 25      17 18 19 20 21 22 23      21 22 23 24 25 26 27
+26 27 28 29 30            24 25 26 27 28 29 30      28 29 30
+                          31
+
+        July                     August                  September
+Mo Tu We Th Fr Sa Su      Mo Tu We Th Fr Sa Su      Mo Tu We Th Fr Sa Su
+          1  2  3  4                         1             1  2  3  4  5
+ 5  6  7  8  9 10 11       2  3  4  5  6  7  8       6  7  8  9 10 11 12
+12 13 14 15 16 17 18       9 10 11 12 13 14 15      13 14 15 16 17 18 19
+19 20 21 22 23 24 25      16 17 18 19 20 21 22      20 21 22 23 24 25 26
+26 27 28 29 30 31         23 24 25 26 27 28 29      27 28 29 30
+                          30 31
+
+      October                   November                  December
+Mo Tu We Th Fr Sa Su      Mo Tu We Th Fr Sa Su      Mo Tu We Th Fr Sa Su
+             1  2  3       1  2  3  4  5  6  7             1  2  3  4  5
+ 4  5  6  7  8  9 10       8  9 10 11 12 13 14       6  7  8  9 10 11 12
+11 12 13 14 15 16 17      15 16 17 18 19 20 21      13 14 15 16 17 18 19
+18 19 20 21 22 23 24      22 23 24 25 26 27 28      20 21 22 23 24 25 26
+25 26 27 28 29 30 31      29 30                     27 28 29 30 31
+"""
+
+result_2004_html = """
+<?xml version="1.0" encoding="ascii"?>
+<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<html>
+<head>
+<meta http-equiv="Content-Type" content="text/html; charset=ascii" />
+<link rel="stylesheet" type="text/css" href="calendar.css" />
+<title>Calendar for 2004</title
+</head>
+<body>
+<table border="0" cellpadding="0" cellspacing="0" class="year">
+<tr><th colspan="3" class="year">2004</th></tr><tr><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">January</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="thu">1</td><td class="fri">2</td><td class="sat">3</td><td class="sun">4</td></tr>
+<tr><td class="mon">5</td><td class="tue">6</td><td class="wed">7</td><td class="thu">8</td><td class="fri">9</td><td class="sat">10</td><td class="sun">11</td></tr>
+<tr><td class="mon">12</td><td class="tue">13</td><td class="wed">14</td><td class="thu">15</td><td class="fri">16</td><td class="sat">17</td><td class="sun">18</td></tr>
+<tr><td class="mon">19</td><td class="tue">20</td><td class="wed">21</td><td class="thu">22</td><td class="fri">23</td><td class="sat">24</td><td class="sun">25</td></tr>
+<tr><td class="mon">26</td><td class="tue">27</td><td class="wed">28</td><td class="thu">29</td><td class="fri">30</td><td class="sat">31</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">February</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="sun">1</td></tr>
+<tr><td class="mon">2</td><td class="tue">3</td><td class="wed">4</td><td class="thu">5</td><td class="fri">6</td><td class="sat">7</td><td class="sun">8</td></tr>
+<tr><td class="mon">9</td><td class="tue">10</td><td class="wed">11</td><td class="thu">12</td><td class="fri">13</td><td class="sat">14</td><td class="sun">15</td></tr>
+<tr><td class="mon">16</td><td class="tue">17</td><td class="wed">18</td><td class="thu">19</td><td class="fri">20</td><td class="sat">21</td><td class="sun">22</td></tr>
+<tr><td class="mon">23</td><td class="tue">24</td><td class="wed">25</td><td class="thu">26</td><td class="fri">27</td><td class="sat">28</td><td class="sun">29</td></tr>
+</table>
+</td><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">March</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="mon">1</td><td class="tue">2</td><td class="wed">3</td><td class="thu">4</td><td class="fri">5</td><td class="sat">6</td><td class="sun">7</td></tr>
+<tr><td class="mon">8</td><td class="tue">9</td><td class="wed">10</td><td class="thu">11</td><td class="fri">12</td><td class="sat">13</td><td class="sun">14</td></tr>
+<tr><td class="mon">15</td><td class="tue">16</td><td class="wed">17</td><td class="thu">18</td><td class="fri">19</td><td class="sat">20</td><td class="sun">21</td></tr>
+<tr><td class="mon">22</td><td class="tue">23</td><td class="wed">24</td><td class="thu">25</td><td class="fri">26</td><td class="sat">27</td><td class="sun">28</td></tr>
+<tr><td class="mon">29</td><td class="tue">30</td><td class="wed">31</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td></tr><tr><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">April</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="thu">1</td><td class="fri">2</td><td class="sat">3</td><td class="sun">4</td></tr>
+<tr><td class="mon">5</td><td class="tue">6</td><td class="wed">7</td><td class="thu">8</td><td class="fri">9</td><td class="sat">10</td><td class="sun">11</td></tr>
+<tr><td class="mon">12</td><td class="tue">13</td><td class="wed">14</td><td class="thu">15</td><td class="fri">16</td><td class="sat">17</td><td class="sun">18</td></tr>
+<tr><td class="mon">19</td><td class="tue">20</td><td class="wed">21</td><td class="thu">22</td><td class="fri">23</td><td class="sat">24</td><td class="sun">25</td></tr>
+<tr><td class="mon">26</td><td class="tue">27</td><td class="wed">28</td><td class="thu">29</td><td class="fri">30</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">May</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="sat">1</td><td class="sun">2</td></tr>
+<tr><td class="mon">3</td><td class="tue">4</td><td class="wed">5</td><td class="thu">6</td><td class="fri">7</td><td class="sat">8</td><td class="sun">9</td></tr>
+<tr><td class="mon">10</td><td class="tue">11</td><td class="wed">12</td><td class="thu">13</td><td class="fri">14</td><td class="sat">15</td><td class="sun">16</td></tr>
+<tr><td class="mon">17</td><td class="tue">18</td><td class="wed">19</td><td class="thu">20</td><td class="fri">21</td><td class="sat">22</td><td class="sun">23</td></tr>
+<tr><td class="mon">24</td><td class="tue">25</td><td class="wed">26</td><td class="thu">27</td><td class="fri">28</td><td class="sat">29</td><td class="sun">30</td></tr>
+<tr><td class="mon">31</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">June</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="tue">1</td><td class="wed">2</td><td class="thu">3</td><td class="fri">4</td><td class="sat">5</td><td class="sun">6</td></tr>
+<tr><td class="mon">7</td><td class="tue">8</td><td class="wed">9</td><td class="thu">10</td><td class="fri">11</td><td class="sat">12</td><td class="sun">13</td></tr>
+<tr><td class="mon">14</td><td class="tue">15</td><td class="wed">16</td><td class="thu">17</td><td class="fri">18</td><td class="sat">19</td><td class="sun">20</td></tr>
+<tr><td class="mon">21</td><td class="tue">22</td><td class="wed">23</td><td class="thu">24</td><td class="fri">25</td><td class="sat">26</td><td class="sun">27</td></tr>
+<tr><td class="mon">28</td><td class="tue">29</td><td class="wed">30</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td></tr><tr><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">July</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="thu">1</td><td class="fri">2</td><td class="sat">3</td><td class="sun">4</td></tr>
+<tr><td class="mon">5</td><td class="tue">6</td><td class="wed">7</td><td class="thu">8</td><td class="fri">9</td><td class="sat">10</td><td class="sun">11</td></tr>
+<tr><td class="mon">12</td><td class="tue">13</td><td class="wed">14</td><td class="thu">15</td><td class="fri">16</td><td class="sat">17</td><td class="sun">18</td></tr>
+<tr><td class="mon">19</td><td class="tue">20</td><td class="wed">21</td><td class="thu">22</td><td class="fri">23</td><td class="sat">24</td><td class="sun">25</td></tr>
+<tr><td class="mon">26</td><td class="tue">27</td><td class="wed">28</td><td class="thu">29</td><td class="fri">30</td><td class="sat">31</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">August</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="sun">1</td></tr>
+<tr><td class="mon">2</td><td class="tue">3</td><td class="wed">4</td><td class="thu">5</td><td class="fri">6</td><td class="sat">7</td><td class="sun">8</td></tr>
+<tr><td class="mon">9</td><td class="tue">10</td><td class="wed">11</td><td class="thu">12</td><td class="fri">13</td><td class="sat">14</td><td class="sun">15</td></tr>
+<tr><td class="mon">16</td><td class="tue">17</td><td class="wed">18</td><td class="thu">19</td><td class="fri">20</td><td class="sat">21</td><td class="sun">22</td></tr>
+<tr><td class="mon">23</td><td class="tue">24</td><td class="wed">25</td><td class="thu">26</td><td class="fri">27</td><td class="sat">28</td><td class="sun">29</td></tr>
+<tr><td class="mon">30</td><td class="tue">31</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">September</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="wed">1</td><td class="thu">2</td><td class="fri">3</td><td class="sat">4</td><td class="sun">5</td></tr>
+<tr><td class="mon">6</td><td class="tue">7</td><td class="wed">8</td><td class="thu">9</td><td class="fri">10</td><td class="sat">11</td><td class="sun">12</td></tr>
+<tr><td class="mon">13</td><td class="tue">14</td><td class="wed">15</td><td class="thu">16</td><td class="fri">17</td><td class="sat">18</td><td class="sun">19</td></tr>
+<tr><td class="mon">20</td><td class="tue">21</td><td class="wed">22</td><td class="thu">23</td><td class="fri">24</td><td class="sat">25</td><td class="sun">26</td></tr>
+<tr><td class="mon">27</td><td class="tue">28</td><td class="wed">29</td><td class="thu">30</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td></tr><tr><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">October</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="fri">1</td><td class="sat">2</td><td class="sun">3</td></tr>
+<tr><td class="mon">4</td><td class="tue">5</td><td class="wed">6</td><td class="thu">7</td><td class="fri">8</td><td class="sat">9</td><td class="sun">10</td></tr>
+<tr><td class="mon">11</td><td class="tue">12</td><td class="wed">13</td><td class="thu">14</td><td class="fri">15</td><td class="sat">16</td><td class="sun">17</td></tr>
+<tr><td class="mon">18</td><td class="tue">19</td><td class="wed">20</td><td class="thu">21</td><td class="fri">22</td><td class="sat">23</td><td class="sun">24</td></tr>
+<tr><td class="mon">25</td><td class="tue">26</td><td class="wed">27</td><td class="thu">28</td><td class="fri">29</td><td class="sat">30</td><td class="sun">31</td></tr>
+</table>
+</td><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">November</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="mon">1</td><td class="tue">2</td><td class="wed">3</td><td class="thu">4</td><td class="fri">5</td><td class="sat">6</td><td class="sun">7</td></tr>
+<tr><td class="mon">8</td><td class="tue">9</td><td class="wed">10</td><td class="thu">11</td><td class="fri">12</td><td class="sat">13</td><td class="sun">14</td></tr>
+<tr><td class="mon">15</td><td class="tue">16</td><td class="wed">17</td><td class="thu">18</td><td class="fri">19</td><td class="sat">20</td><td class="sun">21</td></tr>
+<tr><td class="mon">22</td><td class="tue">23</td><td class="wed">24</td><td class="thu">25</td><td class="fri">26</td><td class="sat">27</td><td class="sun">28</td></tr>
+<tr><td class="mon">29</td><td class="tue">30</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td><td><table border="0" cellpadding="0" cellspacing="0" class="month">
+<tr><th colspan="7" class="month">December</th></tr>
+<tr><th class="mon">Mon</th><th class="tue">Tue</th><th class="wed">Wed</th><th class="thu">Thu</th><th class="fri">Fri</th><th class="sat">Sat</th><th class="sun">Sun</th></tr>
+<tr><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td><td class="wed">1</td><td class="thu">2</td><td class="fri">3</td><td class="sat">4</td><td class="sun">5</td></tr>
+<tr><td class="mon">6</td><td class="tue">7</td><td class="wed">8</td><td class="thu">9</td><td class="fri">10</td><td class="sat">11</td><td class="sun">12</td></tr>
+<tr><td class="mon">13</td><td class="tue">14</td><td class="wed">15</td><td class="thu">16</td><td class="fri">17</td><td class="sat">18</td><td class="sun">19</td></tr>
+<tr><td class="mon">20</td><td class="tue">21</td><td class="wed">22</td><td class="thu">23</td><td class="fri">24</td><td class="sat">25</td><td class="sun">26</td></tr>
+<tr><td class="mon">27</td><td class="tue">28</td><td class="wed">29</td><td class="thu">30</td><td class="fri">31</td><td class="noday">&nbsp;</td><td class="noday">&nbsp;</td></tr>
+</table>
+</td></tr></table></body>
+</html>
+"""
+
+
+class OutputTestCase(unittest.TestCase):
+    def normalize_calendar(self, s):
+        # Filters out locale dependant strings
+        def neitherspacenordigit(c):
+            return not c.isspace() and not c.isdigit()
+
+        lines = []
+        for line in s.splitlines(False):
+            # Drop texts, as they are locale dependent
+            if line and not filter(neitherspacenordigit, line):
+                lines.append(line)
+        return lines
+
+    def test_output(self):
+        self.assertEqual(
+            self.normalize_calendar(calendar.calendar(2004)),
+            self.normalize_calendar(result_2004_text)
+        )
+
+    def test_output_textcalendar(self):
+        self.assertEqual(
+            calendar.TextCalendar().formatyear(2004).strip(),
+            result_2004_text.strip()
+        )
+
+    def test_output_htmlcalendar(self):
+        self.assertEqual(
+            calendar.HTMLCalendar().formatyearpage(2004).strip(),
+            result_2004_html.strip()
+        )
+
+
 class CalendarTestCase(unittest.TestCase):
     def test_isleap(self):
         # Make sure that the return is right for a few years, and
@@ -72,57 +268,57 @@
     firstweekday = calendar.MONDAY
 
     def test_february(self):
-        # A 28-day february starting of monday (7+7+7+7 days)
+        # A 28-day february starting on monday (7+7+7+7 days)
         self.check_weeks(1999, 2, (7, 7, 7, 7))
 
-        # A 28-day february starting of tuesday (6+7+7+7+1 days)
+        # A 28-day february starting on tuesday (6+7+7+7+1 days)
         self.check_weeks(2005, 2, (6, 7, 7, 7, 1))
 
-        # A 28-day february starting of sunday (1+7+7+7+6 days)
+        # A 28-day february starting on sunday (1+7+7+7+6 days)
         self.check_weeks(1987, 2, (1, 7, 7, 7, 6))
 
-        # A 29-day february starting of monday (7+7+7+7+1 days)
+        # A 29-day february starting on monday (7+7+7+7+1 days)
         self.check_weeks(1988, 2, (7, 7, 7, 7, 1))
 
-        # A 29-day february starting of tuesday (6+7+7+7+2 days)
+        # A 29-day february starting on tuesday (6+7+7+7+2 days)
         self.check_weeks(1972, 2, (6, 7, 7, 7, 2))
 
-        # A 29-day february starting of sunday (1+7+7+7+7 days)
+        # A 29-day february starting on sunday (1+7+7+7+7 days)
         self.check_weeks(2004, 2, (1, 7, 7, 7, 7))
 
     def test_april(self):
-        # A 30-day april starting of monday (7+7+7+7+2 days)
+        # A 30-day april starting on monday (7+7+7+7+2 days)
         self.check_weeks(1935, 4, (7, 7, 7, 7, 2))
 
-        # A 30-day april starting of tuesday (6+7+7+7+3 days)
+        # A 30-day april starting on tuesday (6+7+7+7+3 days)
         self.check_weeks(1975, 4, (6, 7, 7, 7, 3))
 
-        # A 30-day april starting of sunday (1+7+7+7+7+1 days)
+        # A 30-day april starting on sunday (1+7+7+7+7+1 days)
         self.check_weeks(1945, 4, (1, 7, 7, 7, 7, 1))
 
-        # A 30-day april starting of saturday (2+7+7+7+7 days)
+        # A 30-day april starting on saturday (2+7+7+7+7 days)
         self.check_weeks(1995, 4, (2, 7, 7, 7, 7))
 
-        # A 30-day april starting of friday (3+7+7+7+6 days)
+        # A 30-day april starting on friday (3+7+7+7+6 days)
         self.check_weeks(1994, 4, (3, 7, 7, 7, 6))
 
     def test_december(self):
-        # A 31-day december starting of monday (7+7+7+7+3 days)
+        # A 31-day december starting on monday (7+7+7+7+3 days)
         self.check_weeks(1980, 12, (7, 7, 7, 7, 3))
 
-        # A 31-day december starting of tuesday (6+7+7+7+4 days)
+        # A 31-day december starting on tuesday (6+7+7+7+4 days)
         self.check_weeks(1987, 12, (6, 7, 7, 7, 4))
 
-        # A 31-day december starting of sunday (1+7+7+7+7+2 days)
+        # A 31-day december starting on sunday (1+7+7+7+7+2 days)
         self.check_weeks(1968, 12, (1, 7, 7, 7, 7, 2))
 
-        # A 31-day december starting of thursday (4+7+7+7+6 days)
+        # A 31-day december starting on thursday (4+7+7+7+6 days)
         self.check_weeks(1988, 12, (4, 7, 7, 7, 6))
 
-        # A 31-day december starting of friday (3+7+7+7+7 days)
+        # A 31-day december starting on friday (3+7+7+7+7 days)
         self.check_weeks(2017, 12, (3, 7, 7, 7, 7))
 
-        # A 31-day december starting of saturday (2+7+7+7+7+1 days)
+        # A 31-day december starting on saturday (2+7+7+7+7+1 days)
         self.check_weeks(2068, 12, (2, 7, 7, 7, 7, 1))
 
 
@@ -130,62 +326,63 @@
     firstweekday = calendar.SUNDAY
 
     def test_february(self):
-        # A 28-day february starting of sunday (7+7+7+7 days)
+        # A 28-day february starting on sunday (7+7+7+7 days)
         self.check_weeks(2009, 2, (7, 7, 7, 7))
 
-        # A 28-day february starting of monday (6+7+7+7+1 days)
+        # A 28-day february starting on monday (6+7+7+7+1 days)
         self.check_weeks(1999, 2, (6, 7, 7, 7, 1))
 
-        # A 28-day february starting of saturday (1+7+7+7+6 days)
+        # A 28-day february starting on saturday (1+7+7+7+6 days)
         self.check_weeks(1997, 2, (1, 7, 7, 7, 6))
 
-        # A 29-day february starting of sunday (7+7+7+7+1 days)
+        # A 29-day february starting on sunday (7+7+7+7+1 days)
         self.check_weeks(2004, 2, (7, 7, 7, 7, 1))
 
-        # A 29-day february starting of monday (6+7+7+7+2 days)
+        # A 29-day february starting on monday (6+7+7+7+2 days)
         self.check_weeks(1960, 2, (6, 7, 7, 7, 2))
 
-        # A 29-day february starting of saturday (1+7+7+7+7 days)
+        # A 29-day february starting on saturday (1+7+7+7+7 days)
         self.check_weeks(1964, 2, (1, 7, 7, 7, 7))
 
     def test_april(self):
-        # A 30-day april starting of sunday (7+7+7+7+2 days)
+        # A 30-day april starting on sunday (7+7+7+7+2 days)
         self.check_weeks(1923, 4, (7, 7, 7, 7, 2))
 
-        # A 30-day april starting of monday (6+7+7+7+3 days)
+        # A 30-day april starting on monday (6+7+7+7+3 days)
         self.check_weeks(1918, 4, (6, 7, 7, 7, 3))
 
-        # A 30-day april starting of saturday (1+7+7+7+7+1 days)
+        # A 30-day april starting on saturday (1+7+7+7+7+1 days)
         self.check_weeks(1950, 4, (1, 7, 7, 7, 7, 1))
 
-        # A 30-day april starting of friday (2+7+7+7+7 days)
+        # A 30-day april starting on friday (2+7+7+7+7 days)
         self.check_weeks(1960, 4, (2, 7, 7, 7, 7))
 
-        # A 30-day april starting of thursday (3+7+7+7+6 days)
+        # A 30-day april starting on thursday (3+7+7+7+6 days)
         self.check_weeks(1909, 4, (3, 7, 7, 7, 6))
 
     def test_december(self):
-        # A 31-day december starting of sunday (7+7+7+7+3 days)
+        # A 31-day december starting on sunday (7+7+7+7+3 days)
         self.check_weeks(2080, 12, (7, 7, 7, 7, 3))
 
-        # A 31-day december starting of monday (6+7+7+7+4 days)
+        # A 31-day december starting on monday (6+7+7+7+4 days)
         self.check_weeks(1941, 12, (6, 7, 7, 7, 4))
 
-        # A 31-day december starting of saturday (1+7+7+7+7+2 days)
+        # A 31-day december starting on saturday (1+7+7+7+7+2 days)
         self.check_weeks(1923, 12, (1, 7, 7, 7, 7, 2))
 
-        # A 31-day december starting of wednesday (4+7+7+7+6 days)
+        # A 31-day december starting on wednesday (4+7+7+7+6 days)
         self.check_weeks(1948, 12, (4, 7, 7, 7, 6))
 
-        # A 31-day december starting of thursday (3+7+7+7+7 days)
+        # A 31-day december starting on thursday (3+7+7+7+7 days)
         self.check_weeks(1927, 12, (3, 7, 7, 7, 7))
 
-        # A 31-day december starting of friday (2+7+7+7+7+1 days)
+        # A 31-day december starting on friday (2+7+7+7+7+1 days)
         self.check_weeks(1995, 12, (2, 7, 7, 7, 7, 1))
 
 
 def test_main():
     test_support.run_unittest(
+        OutputTestCase,
         CalendarTestCase,
         MondayTestCase,
         SundayTestCase
diff --git a/Lib/test/test_capi.py b/Lib/test/test_capi.py
index 1dd2461..cdd84bb 100644
--- a/Lib/test/test_capi.py
+++ b/Lib/test/test_capi.py
@@ -5,44 +5,51 @@
 from test import test_support
 import _testcapi
 
-for name in dir(_testcapi):
-    if name.startswith('test_'):
-        test = getattr(_testcapi, name)
+def test_main():
+
+    for name in dir(_testcapi):
+        if name.startswith('test_'):
+            test = getattr(_testcapi, name)
+            if test_support.verbose:
+                print "internal", name
+            try:
+                test()
+            except _testcapi.error:
+                raise test_support.TestFailed, sys.exc_info()[1]
+
+    # some extra thread-state tests driven via _testcapi
+    def TestThreadState():
+        import thread
+        import time
+
         if test_support.verbose:
-            print "internal", name
-        try:
-            test()
-        except _testcapi.error:
-            raise test_support.TestFailed, sys.exc_info()[1]
+            print "auto-thread-state"
 
-# some extra thread-state tests driven via _testcapi
-def TestThreadState():
-    import thread
-    import time
+        idents = []
 
-    if test_support.verbose:
-        print "auto-thread-state"
+        def callback():
+            idents.append(thread.get_ident())
 
-    idents = []
+        _testcapi._test_thread_state(callback)
+        a = b = callback
+        time.sleep(1)
+        # Check our main thread is in the list exactly 3 times.
+        if idents.count(thread.get_ident()) != 3:
+            raise test_support.TestFailed, \
+                  "Couldn't find main thread correctly in the list"
 
-    def callback():
-        idents.append(thread.get_ident())
+    try:
+        _testcapi._test_thread_state
+        have_thread_state = True
+    except AttributeError:
+        have_thread_state = False
 
-    _testcapi._test_thread_state(callback)
-    time.sleep(1)
-    # Check our main thread is in the list exactly 3 times.
-    if idents.count(thread.get_ident()) != 3:
-        raise test_support.TestFailed, \
-              "Couldn't find main thread correctly in the list"
+    if have_thread_state:
+        TestThreadState()
+        import threading
+        t=threading.Thread(target=TestThreadState)
+        t.start()
+        t.join()
 
-try:
-    _testcapi._test_thread_state
-    have_thread_state = True
-except AttributeError:
-    have_thread_state = False
-
-if have_thread_state:
-    TestThreadState()
-    import threading
-    t=threading.Thread(target=TestThreadState)
-    t.start()
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_cmd_line.py b/Lib/test/test_cmd_line.py
index a4a656d..018bec6 100644
--- a/Lib/test/test_cmd_line.py
+++ b/Lib/test/test_cmd_line.py
@@ -10,6 +10,9 @@
         infp.close()
         data = outfp.read()
         outfp.close()
+        # try to cleanup the child so we don't appear to leak when running
+        # with regrtest -R.  This should be a no-op on Windows.
+        popen2._cleanup()
         return data
 
     def exit_code(self, cmd_line):
diff --git a/Lib/test/test_codecs.py b/Lib/test/test_codecs.py
index 913aa91..6ea49cc 100644
--- a/Lib/test/test_codecs.py
+++ b/Lib/test/test_codecs.py
@@ -1,7 +1,7 @@
 from test import test_support
 import unittest
 import codecs
-import sys, StringIO
+import sys, StringIO, _testcapi
 
 class Queue(object):
     """
@@ -781,9 +781,18 @@
                 except Exception,e:
                     raise test_support.TestFailed("Test 3.%d: %s" % (pos+1, str(e)))
 
-class CodecTest(unittest.TestCase):
-    def test_builtin(self):
+class IDNACodecTest(unittest.TestCase):
+    def test_builtin_decode(self):
         self.assertEquals(unicode("python.org", "idna"), u"python.org")
+        self.assertEquals(unicode("python.org.", "idna"), u"python.org.")
+        self.assertEquals(unicode("xn--pythn-mua.org", "idna"), u"pyth\xf6n.org")
+        self.assertEquals(unicode("xn--pythn-mua.org.", "idna"), u"pyth\xf6n.org.")
+
+    def test_builtin_encode(self):
+        self.assertEquals(u"python.org".encode("idna"), "python.org")
+        self.assertEquals("python.org.".encode("idna"), "python.org.")
+        self.assertEquals(u"pyth\xf6n.org".encode("idna"), "xn--pythn-mua.org")
+        self.assertEquals(u"pyth\xf6n.org.".encode("idna"), "xn--pythn-mua.org.")
 
     def test_stream(self):
         import StringIO
@@ -791,6 +800,64 @@
         r.read(3)
         self.assertEquals(r.read(), u"")
 
+    def test_incremental_decode(self):
+        self.assertEquals(
+            "".join(codecs.iterdecode("python.org", "idna")),
+            u"python.org"
+        )
+        self.assertEquals(
+            "".join(codecs.iterdecode("python.org.", "idna")),
+            u"python.org."
+        )
+        self.assertEquals(
+            "".join(codecs.iterdecode("xn--pythn-mua.org.", "idna")),
+            u"pyth\xf6n.org."
+        )
+        self.assertEquals(
+            "".join(codecs.iterdecode("xn--pythn-mua.org.", "idna")),
+            u"pyth\xf6n.org."
+        )
+
+        decoder = codecs.getincrementaldecoder("idna")()
+        self.assertEquals(decoder.decode("xn--xam", ), u"")
+        self.assertEquals(decoder.decode("ple-9ta.o", ), u"\xe4xample.")
+        self.assertEquals(decoder.decode(u"rg"), u"")
+        self.assertEquals(decoder.decode(u"", True), u"org")
+
+        decoder.reset()
+        self.assertEquals(decoder.decode("xn--xam", ), u"")
+        self.assertEquals(decoder.decode("ple-9ta.o", ), u"\xe4xample.")
+        self.assertEquals(decoder.decode("rg."), u"org.")
+        self.assertEquals(decoder.decode("", True), u"")
+
+    def test_incremental_encode(self):
+        self.assertEquals(
+            "".join(codecs.iterencode(u"python.org", "idna")),
+            "python.org"
+        )
+        self.assertEquals(
+            "".join(codecs.iterencode(u"python.org.", "idna")),
+            "python.org."
+        )
+        self.assertEquals(
+            "".join(codecs.iterencode(u"pyth\xf6n.org.", "idna")),
+            "xn--pythn-mua.org."
+        )
+        self.assertEquals(
+            "".join(codecs.iterencode(u"pyth\xf6n.org.", "idna")),
+            "xn--pythn-mua.org."
+        )
+
+        encoder = codecs.getincrementalencoder("idna")()
+        self.assertEquals(encoder.encode(u"\xe4x"), "")
+        self.assertEquals(encoder.encode(u"ample.org"), "xn--xample-9ta.")
+        self.assertEquals(encoder.encode(u"", True), "org")
+
+        encoder.reset()
+        self.assertEquals(encoder.encode(u"\xe4x"), "")
+        self.assertEquals(encoder.encode(u"ample.org."), "xn--xample-9ta.org.")
+        self.assertEquals(encoder.encode(u"", True), "")
+
 class CodecsModuleTest(unittest.TestCase):
 
     def test_decode(self):
@@ -1032,9 +1099,11 @@
                     decodedresult += reader.read()
                 self.assertEqual(decodedresult, s, "%r != %r (encoding=%r)" % (decodedresult, s, encoding))
 
-                # check incremental decoder/encoder and iterencode()/iterdecode()
+                # check incremental decoder/encoder (fetched via the Python
+                # and C API) and iterencode()/iterdecode()
                 try:
                     encoder = codecs.getincrementalencoder(encoding)()
+                    cencoder = _testcapi.codec_incrementalencoder(encoding)
                 except LookupError: # no IncrementalEncoder
                     pass
                 else:
@@ -1042,10 +1111,24 @@
                     encodedresult = ""
                     for c in s:
                         encodedresult += encoder.encode(c)
+                    encodedresult += encoder.encode(u"", True)
                     decoder = codecs.getincrementaldecoder(encoding)()
                     decodedresult = u""
                     for c in encodedresult:
                         decodedresult += decoder.decode(c)
+                    decodedresult += decoder.decode("", True)
+                    self.assertEqual(decodedresult, s, "%r != %r (encoding=%r)" % (decodedresult, s, encoding))
+
+                    # check C API
+                    encodedresult = ""
+                    for c in s:
+                        encodedresult += cencoder.encode(c)
+                    encodedresult += cencoder.encode(u"", True)
+                    cdecoder = _testcapi.codec_incrementaldecoder(encoding)
+                    decodedresult = u""
+                    for c in encodedresult:
+                        decodedresult += cdecoder.decode(c)
+                    decodedresult += cdecoder.decode("", True)
                     self.assertEqual(decodedresult, s, "%r != %r (encoding=%r)" % (decodedresult, s, encoding))
 
                     # check iterencode()/iterdecode()
@@ -1142,7 +1225,7 @@
         PunycodeTest,
         UnicodeInternalTest,
         NameprepTest,
-        CodecTest,
+        IDNACodecTest,
         CodecsModuleTest,
         StreamReaderTest,
         Str2StrTest,
diff --git a/Lib/test/test_coercion.py b/Lib/test/test_coercion.py
index e12ef0d..964f161 100644
--- a/Lib/test/test_coercion.py
+++ b/Lib/test/test_coercion.py
@@ -1,6 +1,8 @@
 import copy
 import sys
 import warnings
+import unittest
+from test.test_support import run_unittest
 
 # Fake a number that implements numeric methods through __coerce__
 class CoerceNumber:
@@ -16,10 +18,19 @@
         else:
             return (self.arg, other)
 
+# New-style class version of CoerceNumber
+class CoerceTo(object):
+    def __init__(self, arg):
+        self.arg = arg
+    def __coerce__(self, other):
+        if isinstance(other, CoerceTo):
+            return self.arg, other.arg
+        else:
+            return self.arg, other
+
 
 # Fake a number that implements numeric ops through methods.
 class MethodNumber:
-
     def __init__(self,arg):
         self.arg = arg
 
@@ -50,6 +61,18 @@
     def __rtruediv__(self,other):
         return other / self.arg
 
+    def __truediv__(self,other):
+        return self.arg / other
+
+    def __rtruediv__(self,other):
+        return other / self.arg
+
+    def __floordiv__(self,other):
+        return self.arg // other
+
+    def __rfloordiv__(self,other):
+        return other // self.arg
+
     def __pow__(self,other):
         return self.arg ** other
 
@@ -66,11 +89,157 @@
         return cmp(self.arg, other)
 
 
-candidates = [ 2, 4.0, 2L, 2+0j, [1], (2,), None,
-               MethodNumber(2), CoerceNumber(2)]
+candidates = [2, 2L, 4.0, 2+0j, [1], (2,), None,
+              MethodNumber(2), CoerceNumber(2)]
 
-infix_binops = [ '+', '-', '*', '/', '**', '%' ]
+infix_binops = [ '+', '-', '*', '**', '%', '//', '/' ]
+
+TE = TypeError
+# b = both normal and augmented give same result list
+# s = single result lists for normal and augmented
+# e = equals other results
+# result lists: ['+', '-', '*', '**', '%', '//', ('classic /', 'new /')]
+#                                                ^^^^^^^^^^^^^^^^^^^^^^
+#                                               2-tuple if results differ
+#                                                 else only one value
+infix_results = {
+    # 2
+    (0,0): ('b', [4, 0, 4, 4, 0, 1, (1, 1.0)]),
+    (0,1): ('e', (0,0)),
+    (0,2): ('b', [6.0, -2.0, 8.0, 16.0, 2.0, 0.0, 0.5]),
+    (0,3): ('b', [4+0j, 0+0j, 4+0j, 4+0j, 0+0j, 1+0j, 1+0j]),
+    (0,4): ('b', [TE, TE, [1, 1], TE, TE, TE, TE]),
+    (0,5): ('b', [TE, TE, (2, 2), TE, TE, TE, TE]),
+    (0,6): ('b', [TE, TE, TE, TE, TE, TE, TE]),
+    (0,7): ('e', (0,0)),
+    (0,8): ('e', (0,0)),
+
+    # 2L
+    (1,0): ('e', (0,0)),
+    (1,1): ('e', (0,1)),
+    (1,2): ('e', (0,2)),
+    (1,3): ('e', (0,3)),
+    (1,4): ('e', (0,4)),
+    (1,5): ('e', (0,5)),
+    (1,6): ('e', (0,6)),
+    (1,7): ('e', (0,7)),
+    (1,8): ('e', (0,8)),
+
+    # 4.0
+    (2,0): ('b', [6.0, 2.0, 8.0, 16.0, 0.0, 2.0, 2.0]),
+    (2,1): ('e', (2,0)),
+    (2,2): ('b', [8.0, 0.0, 16.0, 256.0, 0.0, 1.0, 1.0]),
+    (2,3): ('b', [6+0j, 2+0j, 8+0j, 16+0j, 0+0j, 2+0j, 2+0j]),
+    (2,4): ('b', [TE, TE, TE, TE, TE, TE, TE]),
+    (2,5): ('e', (2,4)),
+    (2,6): ('e', (2,4)),
+    (2,7): ('e', (2,0)),
+    (2,8): ('e', (2,0)),
+
+    # (2+0j)
+    (3,0): ('b', [4+0j, 0+0j, 4+0j, 4+0j, 0+0j, 1+0j, 1+0j]),
+    (3,1): ('e', (3,0)),
+    (3,2): ('b', [6+0j, -2+0j, 8+0j, 16+0j, 2+0j, 0+0j, 0.5+0j]),
+    (3,3): ('b', [4+0j, 0+0j, 4+0j, 4+0j, 0+0j, 1+0j, 1+0j]),
+    (3,4): ('b', [TE, TE, TE, TE, TE, TE, TE]),
+    (3,5): ('e', (3,4)),
+    (3,6): ('e', (3,4)),
+    (3,7): ('e', (3,0)),
+    (3,8): ('e', (3,0)),
+
+    # [1]
+    (4,0): ('b', [TE, TE, [1, 1], TE, TE, TE, TE]),
+    (4,1): ('e', (4,0)),
+    (4,2): ('b', [TE, TE, TE, TE, TE, TE, TE]),
+    (4,3): ('b', [TE, TE, TE, TE, TE, TE, TE]),
+    (4,4): ('b', [[1, 1], TE, TE, TE, TE, TE, TE]),
+    (4,5): ('s', [TE, TE, TE, TE, TE, TE, TE], [[1, 2], TE, TE, TE, TE, TE, TE]),
+    (4,6): ('b', [TE, TE, TE, TE, TE, TE, TE]),
+    (4,7): ('e', (4,0)),
+    (4,8): ('e', (4,0)),
+
+    # (2,)
+    (5,0): ('b', [TE, TE, (2, 2), TE, TE, TE, TE]),
+    (5,1): ('e', (5,0)),
+    (5,2): ('b', [TE, TE, TE, TE, TE, TE, TE]),
+    (5,3): ('e', (5,2)),
+    (5,4): ('e', (5,2)),
+    (5,5): ('b', [(2, 2), TE, TE, TE, TE, TE, TE]),
+    (5,6): ('b', [TE, TE, TE, TE, TE, TE, TE]),
+    (5,7): ('e', (5,0)),
+    (5,8): ('e', (5,0)),
+
+    # None
+    (6,0): ('b', [TE, TE, TE, TE, TE, TE, TE]),
+    (6,1): ('e', (6,0)),
+    (6,2): ('e', (6,0)),
+    (6,3): ('e', (6,0)),
+    (6,4): ('e', (6,0)),
+    (6,5): ('e', (6,0)),
+    (6,6): ('e', (6,0)),
+    (6,7): ('e', (6,0)),
+    (6,8): ('e', (6,0)),
+
+    # MethodNumber(2)
+    (7,0): ('e', (0,0)),
+    (7,1): ('e', (0,1)),
+    (7,2): ('e', (0,2)),
+    (7,3): ('e', (0,3)),
+    (7,4): ('e', (0,4)),
+    (7,5): ('e', (0,5)),
+    (7,6): ('e', (0,6)),
+    (7,7): ('e', (0,7)),
+    (7,8): ('e', (0,8)),
+
+    # CoerceNumber(2)
+    (8,0): ('e', (0,0)),
+    (8,1): ('e', (0,1)),
+    (8,2): ('e', (0,2)),
+    (8,3): ('e', (0,3)),
+    (8,4): ('e', (0,4)),
+    (8,5): ('e', (0,5)),
+    (8,6): ('e', (0,6)),
+    (8,7): ('e', (0,7)),
+    (8,8): ('e', (0,8)),
+}
+
+def process_infix_results():
+    for key in sorted(infix_results):
+        val = infix_results[key]
+        if val[0] == 'e':
+            infix_results[key] = infix_results[val[1]]
+        else:
+            if val[0] == 's':
+                res = (val[1], val[2])
+            elif val[0] == 'b':
+                res = (val[1], val[1])
+            for i in range(1):
+                if isinstance(res[i][6], tuple):
+                    if 1/2 == 0:
+                        # testing with classic (floor) division
+                        res[i][6] = res[i][6][0]
+                    else:
+                        # testing with -Qnew
+                        res[i][6] = res[i][6][1]
+            infix_results[key] = res
+
+
+
+process_infix_results()
+# now infix_results has two lists of results for every pairing.
+
 prefix_binops = [ 'divmod' ]
+prefix_results = [
+    [(1,0), (1L,0L), (0.0,2.0), ((1+0j),0j), TE, TE, TE, TE, (1,0)],
+    [(1L,0L), (1L,0L), (0.0,2.0), ((1+0j),0j), TE, TE, TE, TE, (1L,0L)],
+    [(2.0,0.0), (2.0,0.0), (1.0,0.0), ((2+0j),0j), TE, TE, TE, TE, (2.0,0.0)],
+    [((1+0j),0j), ((1+0j),0j), (0j,(2+0j)), ((1+0j),0j), TE, TE, TE, TE, ((1+0j),0j)],
+    [TE, TE, TE, TE, TE, TE, TE, TE, TE],
+    [TE, TE, TE, TE, TE, TE, TE, TE, TE],
+    [TE, TE, TE, TE, TE, TE, TE, TE, TE],
+    [TE, TE, TE, TE, TE, TE, TE, TE, TE],
+    [(1,0), (1L,0L), (0.0,2.0), ((1+0j),0j), TE, TE, TE, TE, (1,0)]
+]
 
 def format_float(value):
     if abs(value) < 0.01:
@@ -87,83 +256,74 @@
         return format_float(value)
     return str(value)
 
-def do_infix_binops():
-    for a in candidates:
-        for b in candidates:
-            for op in infix_binops:
-                print '%s %s %s' % (a, op, b),
-                try:
-                    x = eval('a %s b' % op)
-                except:
-                    error = sys.exc_info()[:2]
-                    print '... %s.%s' % (error[0].__module__, error[0].__name__)
-                else:
-                    print '=', format_result(x)
-                try:
-                    z = copy.copy(a)
-                except copy.Error:
-                    z = a # assume it has no inplace ops
-                print '%s %s= %s' % (a, op, b),
-                try:
-                    exec('z %s= b' % op)
-                except:
-                    error = sys.exc_info()[:2]
-                    print '... %s.%s' % (error[0].__module__, error[0].__name__)
-                else:
-                    print '=>', format_result(z)
+class CoercionTest(unittest.TestCase):
+    def test_infix_binops(self):
+        for ia, a in enumerate(candidates):
+            for ib, b in enumerate(candidates):
+                results = infix_results[(ia, ib)]
+                for op, res, ires in zip(infix_binops, results[0], results[1]):
+                    if res is TE:
+                        self.assertRaises(TypeError, eval,
+                                          'a %s b' % op, {'a': a, 'b': b})
+                    else:
+                        self.assertEquals(format_result(res),
+                                          format_result(eval('a %s b' % op)),
+                                          '%s %s %s == %s failed' % (a, op, b, res))
+                    try:
+                        z = copy.copy(a)
+                    except copy.Error:
+                        z = a # assume it has no inplace ops
+                    if ires is TE:
+                        try:
+                            exec 'z %s= b' % op
+                        except TypeError:
+                            pass
+                        else:
+                            self.fail("TypeError not raised")
+                    else:
+                        exec('z %s= b' % op)
+                        self.assertEquals(ires, z)
 
-def do_prefix_binops():
-    for a in candidates:
-        for b in candidates:
-            for op in prefix_binops:
-                print '%s(%s, %s)' % (op, a, b),
-                try:
-                    x = eval('%s(a, b)' % op)
-                except:
-                    error = sys.exc_info()[:2]
-                    print '... %s.%s' % (error[0].__module__, error[0].__name__)
-                else:
-                    print '=', format_result(x)
+    def test_prefix_binops(self):
+        for ia, a in enumerate(candidates):
+            for ib, b in enumerate(candidates):
+                for op in prefix_binops:
+                    res = prefix_results[ia][ib]
+                    if res is TE:
+                        self.assertRaises(TypeError, eval,
+                                          '%s(a, b)' % op, {'a': a, 'b': b})
+                    else:
+                        self.assertEquals(format_result(res),
+                                          format_result(eval('%s(a, b)' % op)),
+                                          '%s(%s, %s) == %s failed' % (op, a, b, res))
 
-# New-style class version of CoerceNumber
-class CoerceTo(object):
-    def __init__(self, arg):
-        self.arg = arg
-    def __coerce__(self, other):
-        if isinstance(other, CoerceTo):
-            return self.arg, other.arg
-        else:
-            return self.arg, other
+    def test_cmptypes(self):
+        # Built-in tp_compare slots expect their arguments to have the
+        # same type, but a user-defined __coerce__ doesn't have to obey.
+        # SF #980352
+        evil_coercer = CoerceTo(42)
+        # Make sure these don't crash any more
+        self.assertNotEquals(cmp(u'fish', evil_coercer), 0)
+        self.assertNotEquals(cmp(slice(1), evil_coercer), 0)
+        # ...but that this still works
+        class WackyComparer(object):
+            def __cmp__(slf, other):
+                self.assert_(other == 42, 'expected evil_coercer, got %r' % other)
+                return 0
+        self.assertEquals(cmp(WackyComparer(), evil_coercer), 0)
+        # ...and classic classes too, since that code path is a little different
+        class ClassicWackyComparer:
+            def __cmp__(slf, other):
+                self.assert_(other == 42, 'expected evil_coercer, got %r' % other)
+                return 0
+        self.assertEquals(cmp(ClassicWackyComparer(), evil_coercer), 0)
 
-def assert_(expr, msg=None):
-    if not expr:
-        raise AssertionError, msg
+def test_main():
+    warnings.filterwarnings("ignore",
+                            r'complex divmod\(\), // and % are deprecated',
+                            DeprecationWarning,
+                            r'test.test_coercion$')
+    run_unittest(CoercionTest)
 
-def do_cmptypes():
-    # Built-in tp_compare slots expect their arguments to have the
-    # same type, but a user-defined __coerce__ doesn't have to obey.
-    # SF #980352
-    evil_coercer = CoerceTo(42)
-    # Make sure these don't crash any more
-    assert_(cmp(u'fish', evil_coercer) != 0)
-    assert_(cmp(slice(1), evil_coercer) != 0)
-    # ...but that this still works
-    class WackyComparer(object):
-        def __cmp__(self, other):
-            assert_(other == 42, 'expected evil_coercer, got %r' % other)
-            return 0
-    assert_(cmp(WackyComparer(), evil_coercer) == 0)
-    # ...and classic classes too, since that code path is a little different
-    class ClassicWackyComparer:
-        def __cmp__(self, other):
-            assert_(other == 42, 'expected evil_coercer, got %r' % other)
-            return 0
-    assert_(cmp(ClassicWackyComparer(), evil_coercer) == 0)
-
-warnings.filterwarnings("ignore",
-                        r'complex divmod\(\), // and % are deprecated',
-                        DeprecationWarning,
-                        r'test.test_coercion$')
-do_infix_binops()
-do_prefix_binops()
-do_cmptypes()
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_compare.py b/Lib/test/test_compare.py
index 6899926..2fde614 100644
--- a/Lib/test/test_compare.py
+++ b/Lib/test/test_compare.py
@@ -1,4 +1,6 @@
 import sys
+import unittest
+from test import test_support
 
 class Empty:
     def __repr__(self):
@@ -27,28 +29,31 @@
     def __cmp__(self, other):
         return cmp(self.arg, other)
 
+class ComparisonTest(unittest.TestCase):
+    set1 = [2, 2.0, 2L, 2+0j, Coerce(2), Cmp(2.0)]
+    set2 = [[1], (3,), None, Empty()]
+    candidates = set1 + set2
 
-candidates = [2, 2.0, 2L, 2+0j, [1], (3,), None, Empty(), Coerce(2), Cmp(2.0)]
-
-def test():
-    for a in candidates:
-        for b in candidates:
-            try:
-                x = a == b
-            except:
-                print 'cmp(%s, %s) => %s' % (a, b, sys.exc_info()[0])
-            else:
-                if x:
-                    print "%s == %s" % (a, b)
+    def test_comparisons(self):
+        for a in self.candidates:
+            for b in self.candidates:
+                if ((a in self.set1) and (b in self.set1)) or a is b:
+                    self.assertEqual(a, b)
                 else:
-                    print "%s != %s" % (a, b)
-    # Ensure default comparison compares id() of args
-    L = []
-    for i in range(10):
-        L.insert(len(L)//2, Empty())
-    for a in L:
-        for b in L:
-            if cmp(a, b) != cmp(id(a), id(b)):
-                print "ERROR:", cmp(a, b), cmp(id(a), id(b)), id(a), id(b)
+                    self.assertNotEqual(a, b)
 
-test()
+    def test_id_comparisons(self):
+        # Ensure default comparison compares id() of args
+        L = []
+        for i in range(10):
+            L.insert(len(L)//2, Empty())
+        for a in L:
+            for b in L:
+                self.assertEqual(cmp(a, b), cmp(id(a), id(b)),
+                                 'a=%r, b=%r' % (a, b))
+
+def test_main():
+    test_support.run_unittest(ComparisonTest)
+
+if __name__ == '__main__':
+    test_main()
diff --git a/Lib/test/test_compile.py b/Lib/test/test_compile.py
index 1d47f91..72c4f7e 100644
--- a/Lib/test/test_compile.py
+++ b/Lib/test/test_compile.py
@@ -284,6 +284,10 @@
         f1, f2 = f()
         self.assertNotEqual(id(f1.func_code), id(f2.func_code))
 
+    def test_unicode_encoding(self):
+        code = u"# -*- coding: utf-8 -*-\npass\n"
+        self.assertRaises(SyntaxError, compile, code, "tmp", "exec")
+
     def test_subscripts(self):
         # SF bug 1448804
         # Class to make testing subscript results easy
diff --git a/Lib/test/test_compiler.py b/Lib/test/test_compiler.py
index 5e7b15c..a59d6aa 100644
--- a/Lib/test/test_compiler.py
+++ b/Lib/test/test_compiler.py
@@ -1,10 +1,12 @@
 import compiler
 from compiler.ast import flatten
-import os
+import os, sys, time, unittest
 import test.test_support
-import unittest
 from random import random
 
+# How much time in seconds can pass before we print a 'Still working' message.
+_PRINT_WORKING_MSG_INTERVAL = 5 * 60
+
 class CompilerTest(unittest.TestCase):
 
     def testCompileLibrary(self):
@@ -13,11 +15,18 @@
         # that any of the code is correct, merely the compiler is able
         # to generate some kind of code for it.
 
+        next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
         libdir = os.path.dirname(unittest.__file__)
         testdir = os.path.dirname(test.test_support.__file__)
 
         for dir in [libdir, testdir]:
             for basename in os.listdir(dir):
+                # Print still working message since this test can be really slow
+                if next_time <= time.time():
+                    next_time = time.time() + _PRINT_WORKING_MSG_INTERVAL
+                    print >>sys.__stdout__, \
+                       '  testCompileLibrary still working, be patient...'
+
                 if not basename.endswith(".py"):
                     continue
                 if not TEST_ALL and random() < 0.98:
diff --git a/Lib/test/test_contextlib.py b/Lib/test/test_contextlib.py
index f8db88c..97470c7 100644
--- a/Lib/test/test_contextlib.py
+++ b/Lib/test/test_contextlib.py
@@ -2,12 +2,14 @@
 
 from __future__ import with_statement
 
+import sys
 import os
 import decimal
 import tempfile
 import unittest
 import threading
 from contextlib import *  # Tests __all__
+from test.test_support import run_suite
 
 class ContextManagerTestCase(unittest.TestCase):
 
@@ -45,6 +47,28 @@
             self.fail("Expected ZeroDivisionError")
         self.assertEqual(state, [1, 42, 999])
 
+    def test_contextmanager_no_reraise(self):
+        @contextmanager
+        def whee():
+            yield
+        ctx = whee().__context__()
+        ctx.__enter__()
+        # Calling __exit__ should not result in an exception
+        self.failIf(ctx.__exit__(TypeError, TypeError("foo"), None))
+
+    def test_contextmanager_trap_yield_after_throw(self):
+        @contextmanager
+        def whoo():
+            try:
+                yield
+            except:
+                yield
+        ctx = whoo().__context__()
+        ctx.__enter__()
+        self.assertRaises(
+            RuntimeError, ctx.__exit__, TypeError, TypeError("foo"), None
+        )
+
     def test_contextmanager_except(self):
         state = []
         @contextmanager
@@ -62,6 +86,21 @@
             raise ZeroDivisionError(999)
         self.assertEqual(state, [1, 42, 999])
 
+    def test_contextmanager_attribs(self):
+        def attribs(**kw):
+            def decorate(func):
+                for k,v in kw.items():
+                    setattr(func,k,v)
+                return func
+            return decorate
+        @contextmanager
+        @attribs(foo='bar')
+        def baz(spam):
+            """Whee!"""
+        self.assertEqual(baz.__name__,'baz')
+        self.assertEqual(baz.foo, 'bar')
+        self.assertEqual(baz.__doc__, "Whee!")
+
 class NestedTestCase(unittest.TestCase):
 
     # XXX This needs more work
@@ -274,21 +313,31 @@
 
     def testBasic(self):
         ctx = decimal.getcontext()
-        ctx.prec = save_prec = decimal.ExtendedContext.prec + 5
-        with decimal.ExtendedContext:
-            self.assertEqual(decimal.getcontext().prec,
-                             decimal.ExtendedContext.prec)
-        self.assertEqual(decimal.getcontext().prec, save_prec)
+        orig_context = ctx.copy()
         try:
+            ctx.prec = save_prec = decimal.ExtendedContext.prec + 5
             with decimal.ExtendedContext:
                 self.assertEqual(decimal.getcontext().prec,
                                  decimal.ExtendedContext.prec)
-                1/0
-        except ZeroDivisionError:
             self.assertEqual(decimal.getcontext().prec, save_prec)
-        else:
-            self.fail("Didn't raise ZeroDivisionError")
+            try:
+                with decimal.ExtendedContext:
+                    self.assertEqual(decimal.getcontext().prec,
+                                     decimal.ExtendedContext.prec)
+                    1/0
+            except ZeroDivisionError:
+                self.assertEqual(decimal.getcontext().prec, save_prec)
+            else:
+                self.fail("Didn't raise ZeroDivisionError")
+        finally:
+            decimal.setcontext(orig_context)
 
 
+# This is needed to make the test actually run under regrtest.py!
+def test_main():
+    run_suite(
+        unittest.defaultTestLoader.loadTestsFromModule(sys.modules[__name__])
+    )
+
 if __name__ == "__main__":
-    unittest.main()
+    test_main()
diff --git a/Lib/test/test_copy_reg.py b/Lib/test/test_copy_reg.py
index c41946a..c3d3964 100644
--- a/Lib/test/test_copy_reg.py
+++ b/Lib/test/test_copy_reg.py
@@ -8,6 +8,22 @@
     pass
 
 
+class WithoutSlots(object):
+    pass
+
+class WithWeakref(object):
+    __slots__ = ('__weakref__',)
+
+class WithPrivate(object):
+    __slots__ = ('__spam',)
+
+class WithSingleString(object):
+    __slots__ = 'spam'
+
+class WithInherited(WithSingleString):
+    __slots__ = ('eggs',)
+
+
 class CopyRegTestCase(unittest.TestCase):
 
     def test_class(self):
@@ -84,6 +100,19 @@
             self.assertRaises(ValueError, copy_reg.add_extension,
                               mod, func, code)
 
+    def test_slotnames(self):
+        self.assertEquals(copy_reg._slotnames(WithoutSlots), [])
+        self.assertEquals(copy_reg._slotnames(WithWeakref), [])
+        expected = ['_WithPrivate__spam']
+        self.assertEquals(copy_reg._slotnames(WithPrivate), expected)
+        self.assertEquals(copy_reg._slotnames(WithSingleString), ['spam'])
+        expected = ['eggs', 'spam']
+        expected.sort()
+        result = copy_reg._slotnames(WithInherited)
+        result.sort()
+        self.assertEquals(result, expected)
+
+
 def test_main():
     test_support.run_unittest(CopyRegTestCase)
 
diff --git a/Lib/test/test_curses.py b/Lib/test/test_curses.py
index a4a45a7..dc2f20b 100644
--- a/Lib/test/test_curses.py
+++ b/Lib/test/test_curses.py
@@ -24,6 +24,9 @@
 if not term or term == 'unknown':
     raise TestSkipped, "$TERM=%r, calling initscr() may cause exit" % term
 
+if sys.platform == "cygwin":
+    raise TestSkipped("cygwin's curses mostly just hangs")
+
 def window_funcs(stdscr):
     "Test the methods of windows"
     win = curses.newwin(10,10)
@@ -201,11 +204,13 @@
         curses.has_key(13)
 
     if hasattr(curses, 'getmouse'):
-        curses.mousemask(curses.BUTTON1_PRESSED)
-        curses.mouseinterval(10)
-        # just verify these don't cause errors
-        m = curses.getmouse()
-        curses.ungetmouse(*m)
+        (availmask, oldmask) = curses.mousemask(curses.BUTTON1_PRESSED)
+        # availmask indicates that mouse stuff not available.
+        if availmask != 0:
+            curses.mouseinterval(10)
+            # just verify these don't cause errors
+            m = curses.getmouse()
+            curses.ungetmouse(*m)
 
 def unit_tests():
     from curses import ascii
diff --git a/Lib/test/test_datetime.py b/Lib/test/test_datetime.py
index 27f42c6..2528b4a 100644
--- a/Lib/test/test_datetime.py
+++ b/Lib/test/test_datetime.py
@@ -1168,6 +1168,17 @@
         self.assertEqual(dt2 - dt1, us)
         self.assert_(dt1 < dt2)
 
+    def test_strftime_with_bad_tzname_replace(self):
+        # verify ok if tzinfo.tzname().replace() returns a non-string
+        class MyTzInfo(FixedOffset):
+            def tzname(self, dt):
+                class MyStr(str):
+                    def replace(self, *args):
+                        return None
+                return MyStr('name')
+        t = self.theclass(2005, 3, 2, 0, 0, 0, 0, MyTzInfo(3, 'name'))
+        self.assertRaises(TypeError, t.strftime, '%Z')
+
     def test_bad_constructor_arguments(self):
         # bad years
         self.theclass(MINYEAR, 1, 1)  # no exception
diff --git a/Lib/test/test_decimal.py b/Lib/test/test_decimal.py
index 1d33ec4..341ad6d 100644
--- a/Lib/test/test_decimal.py
+++ b/Lib/test/test_decimal.py
@@ -29,7 +29,8 @@
 import os, sys
 import pickle, copy
 from decimal import *
-from test.test_support import TestSkipped, run_unittest, run_doctest, is_resource_enabled
+from test.test_support import (TestSkipped, run_unittest, run_doctest,
+                               is_resource_enabled)
 import random
 try:
     import threading
@@ -39,12 +40,15 @@
 # Useful Test Constant
 Signals = getcontext().flags.keys()
 
-# Tests are built around these assumed context defaults
-DefaultContext.prec=9
-DefaultContext.rounding=ROUND_HALF_EVEN
-DefaultContext.traps=dict.fromkeys(Signals, 0)
-setcontext(DefaultContext)
-
+# Tests are built around these assumed context defaults.
+# test_main() restores the original context.
+def init():
+    global ORIGINAL_CONTEXT
+    ORIGINAL_CONTEXT = getcontext().copy()
+    DefaultContext.prec = 9
+    DefaultContext.rounding = ROUND_HALF_EVEN
+    DefaultContext.traps = dict.fromkeys(Signals, 0)
+    setcontext(DefaultContext)
 
 TESTDATADIR = 'decimaltestdata'
 if __name__ == '__main__':
@@ -503,16 +507,17 @@
         self.assertEqual(eval('Decimal(10) != E()'), 'ne 10')
 
         # insert operator methods and then exercise them
-        for sym, lop, rop in (
-                ('+', '__add__', '__radd__'),
-                ('-', '__sub__', '__rsub__'),
-                ('*', '__mul__', '__rmul__'),
-                ('/', '__truediv__', '__rtruediv__'),
-                ('%', '__mod__', '__rmod__'),
-                ('//', '__floordiv__', '__rfloordiv__'),
-                ('**', '__pow__', '__rpow__'),
-            ):
+        oplist = [
+            ('+', '__add__', '__radd__'),
+            ('-', '__sub__', '__rsub__'),
+            ('*', '__mul__', '__rmul__'),
+            ('/', '__truediv__', '__rtruediv__')
+            ('%', '__mod__', '__rmod__'),
+            ('//', '__floordiv__', '__rfloordiv__'),
+            ('**', '__pow__', '__rpow__')
+        ]
 
+        for sym, lop, rop in oplist:
             setattr(E, lop, lambda self, other: 'str' + lop + str(other))
             setattr(E, rop, lambda self, other: str(other) + rop + 'str')
             self.assertEqual(eval('E()' + sym + 'Decimal(10)'),
@@ -1059,6 +1064,7 @@
     is enabled in regrtest.py
     """
 
+    init()
     global TEST_ALL
     TEST_ALL = arith or is_resource_enabled('decimal')
 
@@ -1073,10 +1079,12 @@
         DecimalTest,
     ]
 
-    run_unittest(*test_classes)
-    import decimal as DecimalModule
-    run_doctest(DecimalModule, verbose)
-
+    try:
+        run_unittest(*test_classes)
+        import decimal as DecimalModule
+        run_doctest(DecimalModule, verbose)
+    finally:
+        setcontext(ORIGINAL_CONTEXT)
 
 if __name__ == '__main__':
     # Calling with no arguments runs all tests.
diff --git a/Lib/test/test_descr.py b/Lib/test/test_descr.py
index 185edb3..89cebb0 100644
--- a/Lib/test/test_descr.py
+++ b/Lib/test/test_descr.py
@@ -1638,7 +1638,9 @@
     c1 = C()
     c2 = C()
     verify(not not c1)
-    vereq(hash(c1), id(c1))
+    verify(id(c1) != id(c2))
+    hash(c1)
+    hash(c2)
     vereq(cmp(c1, c2), cmp(id(c1), id(c2)))
     vereq(c1, c1)
     verify(c1 != c2)
@@ -1660,7 +1662,9 @@
     d1 = D()
     d2 = D()
     verify(not not d1)
-    vereq(hash(d1), id(d1))
+    verify(id(d1) != id(d2))
+    hash(d1)
+    hash(d2)
     vereq(cmp(d1, d2), cmp(id(d1), id(d2)))
     vereq(d1, d1)
     verify(d1 != d2)
@@ -2914,7 +2918,7 @@
     class D(B, C):
         pass
     d = D()
-    vereq(hash(d), id(d))
+    orig_hash = hash(d) # related to id(d) in platform-dependent ways
     A.__hash__ = lambda self: 42
     vereq(hash(d), 42)
     C.__hash__ = lambda self: 314
@@ -2930,7 +2934,7 @@
     del C.__hash__
     vereq(hash(d), 42)
     del A.__hash__
-    vereq(hash(d), id(d))
+    vereq(hash(d), orig_hash)
     d.foo = 42
     d.bar = 42
     vereq(d.foo, 42)
diff --git a/Lib/test/test_difflib.py b/Lib/test/test_difflib.py
index 52feef0..83fad15 100644
--- a/Lib/test/test_difflib.py
+++ b/Lib/test/test_difflib.py
@@ -152,6 +152,10 @@
         difflib.SequenceMatcher(None, old, new).get_opcodes()
 
 
-Doctests = doctest.DocTestSuite(difflib)
+def test_main():
+    difflib.HtmlDiff._default_prefix = 0
+    Doctests = doctest.DocTestSuite(difflib)
+    run_unittest(TestSFpatches, TestSFbugs, Doctests)
 
-run_unittest(TestSFpatches, TestSFbugs, Doctests)
+if __name__ == '__main__':
+    test_main()
diff --git a/Lib/test/test_dl.py b/Lib/test/test_dl.py
index d1f73b2..b70a4cf 100755
--- a/Lib/test/test_dl.py
+++ b/Lib/test/test_dl.py
@@ -10,6 +10,7 @@
     ('/usr/lib/libc.so', 'getpid'),
     ('/lib/libc.so.6', 'getpid'),
     ('/usr/bin/cygwin1.dll', 'getpid'),
+    ('/usr/lib/libc.dylib', 'getpid'),
     ]
 
 for s, func in sharedlibs:
diff --git a/Lib/test/test_doctest.py b/Lib/test/test_doctest.py
index 1f89ac2..b17607d 100644
--- a/Lib/test/test_doctest.py
+++ b/Lib/test/test_doctest.py
@@ -604,8 +604,8 @@
     ...     >>> for x in range(10):
     ...     ...     print x,
     ...     0 1 2 3 4 5 6 7 8 9
-    ...     >>> x/2
-    ...     6.0
+    ...     >>> x//2
+    ...     6
     ...     '''
     >>> test = doctest.DocTestFinder().find(f)[0]
     >>> [e.lineno for e in test.examples]
@@ -679,8 +679,8 @@
     ...     >>> x = 12
     ...     >>> print x
     ...     12
-    ...     >>> x/2
-    ...     6.0
+    ...     >>> x//2
+    ...     6
     ...     '''
     >>> test = doctest.DocTestFinder().find(f)[0]
 
@@ -700,8 +700,8 @@
     ...     >>> x = 12
     ...     >>> print x
     ...     14
-    ...     >>> x/2
-    ...     6.0
+    ...     >>> x//2
+    ...     6
     ...     '''
     >>> test = doctest.DocTestFinder().find(f)[0]
     >>> doctest.DocTestRunner(verbose=True).run(test)
@@ -723,9 +723,9 @@
     Got:
         12
     Trying:
-        x/2
+        x//2
     Expecting:
-        6.0
+        6
     ok
     (1, 3)
 """
@@ -738,8 +738,8 @@
     ...     >>> x = 12
     ...     >>> print x
     ...     12
-    ...     >>> x/2
-    ...     6.0
+    ...     >>> x//2
+    ...     6
     ...     '''
     >>> test = doctest.DocTestFinder().find(f)[0]
 
@@ -754,9 +754,9 @@
         12
     ok
     Trying:
-        x/2
+        x//2
     Expecting:
-        6.0
+        6
     ok
     (0, 3)
 
@@ -784,9 +784,9 @@
         12
     ok
     Trying:
-        x/2
+        x//2
     Expecting:
-        6.0
+        6
     ok
     (0, 3)
 
@@ -806,9 +806,9 @@
     >>> def f(x):
     ...     '''
     ...     >>> x = 12
-    ...     >>> print x/0
+    ...     >>> print x//0
     ...     Traceback (most recent call last):
-    ...     ZeroDivisionError: float division
+    ...     ZeroDivisionError: integer division or modulo by zero
     ...     '''
     >>> test = doctest.DocTestFinder().find(f)[0]
     >>> doctest.DocTestRunner(verbose=False).run(test)
@@ -822,10 +822,10 @@
     >>> def f(x):
     ...     '''
     ...     >>> x = 12
-    ...     >>> print 'pre-exception output', x/0
+    ...     >>> print 'pre-exception output', x//0
     ...     pre-exception output
     ...     Traceback (most recent call last):
-    ...     ZeroDivisionError: float division
+    ...     ZeroDivisionError: integer division or modulo by zero
     ...     '''
     >>> test = doctest.DocTestFinder().find(f)[0]
     >>> doctest.DocTestRunner(verbose=False).run(test)
@@ -833,10 +833,10 @@
     **********************************************************************
     File ..., line 4, in f
     Failed example:
-        print 'pre-exception output', x/0
+        print 'pre-exception output', x//0
     Exception raised:
         ...
-        ZeroDivisionError: float division
+        ZeroDivisionError: integer division or modulo by zero
     (1, 2)
 
 Exception messages may contain newlines:
@@ -920,7 +920,7 @@
 
     >>> def f(x):
     ...     r'''
-    ...     >>> 1/0
+    ...     >>> 1//0
     ...     0
     ...     '''
     >>> test = doctest.DocTestFinder().find(f)[0]
@@ -929,11 +929,11 @@
     **********************************************************************
     File ..., line 3, in f
     Failed example:
-        1/0
+        1//0
     Exception raised:
         Traceback (most recent call last):
         ...
-        ZeroDivisionError: float division
+        ZeroDivisionError: integer division or modulo by zero
     (1, 1)
 """
     def optionflags(): r"""
diff --git a/Lib/test/test_email_renamed.py b/Lib/test/test_email_renamed.py
new file mode 100644
index 0000000..c3af598
--- /dev/null
+++ b/Lib/test/test_email_renamed.py
@@ -0,0 +1,13 @@
+# Copyright (C) 2001-2006 Python Software Foundation
+# email package unit tests
+
+import unittest
+# The specific tests now live in Lib/email/test
+from email.test.test_email_renamed import suite
+from test.test_support import run_suite
+
+def test_main():
+    run_suite(suite())
+
+if __name__ == '__main__':
+    test_main()
diff --git a/Lib/test/test_file.py b/Lib/test/test_file.py
index fd5670a..ab3da86 100644
--- a/Lib/test/test_file.py
+++ b/Lib/test/test_file.py
@@ -100,12 +100,18 @@
     print "writelines accepted sequence of non-string objects"
 f.close()
 
-try:
-    sys.stdin.seek(-1)
-except IOError:
-    pass
+# This causes the interpreter to exit on OSF1 v5.1.
+if sys.platform != 'osf1V5':
+    try:
+        sys.stdin.seek(-1)
+    except IOError:
+        pass
+    else:
+        print "should not be able to seek on sys.stdin"
 else:
-    print "should not be able to seek on sys.stdin"
+    print >>sys.__stdout__, (
+        '  Skipping sys.stdin.seek(-1), it may crash the interpreter.'
+        ' Test manually.')
 
 try:
     sys.stdin.truncate()
diff --git a/Lib/test/test_fileinput.py b/Lib/test/test_fileinput.py
index f3a7841..301769e 100644
--- a/Lib/test/test_fileinput.py
+++ b/Lib/test/test_fileinput.py
@@ -162,7 +162,10 @@
     print "15. Unicode filenames"
 try:
     t1 = writeTmp(1, ["A\nB"])
-    fi = FileInput(files=unicode(t1, sys.getfilesystemencoding()))
+    encoding = sys.getfilesystemencoding()
+    if encoding is None:
+        encoding = 'ascii'
+    fi = FileInput(files=unicode(t1, encoding))
     lines = list(fi)
     verify(lines == ["A\n", "B"])
 finally:
diff --git a/Lib/test/test_fork1.py b/Lib/test/test_fork1.py
index aca7a84..cba5fc7 100644
--- a/Lib/test/test_fork1.py
+++ b/Lib/test/test_fork1.py
@@ -1,75 +1,23 @@
 """This test checks for correct fork() behavior.
-
-We want fork1() semantics -- only the forking thread survives in the
-child after a fork().
-
-On some systems (e.g. Solaris without posix threads) we find that all
-active threads survive in the child after a fork(); this is an error.
-
-While BeOS doesn't officially support fork and native threading in
-the same application, the present example should work just fine.  DC
 """
 
-import os, sys, time, thread
-from test.test_support import verify, verbose, TestSkipped
+import os
+from test.fork_wait import ForkWait
+from test.test_support import TestSkipped, run_unittest
 
 try:
     os.fork
 except AttributeError:
     raise TestSkipped, "os.fork not defined -- skipping test_fork1"
 
-LONGSLEEP = 2
-
-SHORTSLEEP = 0.5
-
-NUM_THREADS = 4
-
-alive = {}
-
-stop = 0
-
-def f(id):
-    while not stop:
-        alive[id] = os.getpid()
-        try:
-            time.sleep(SHORTSLEEP)
-        except IOError:
-            pass
-
-def main():
-    for i in range(NUM_THREADS):
-        thread.start_new(f, (i,))
-
-    time.sleep(LONGSLEEP)
-
-    a = alive.keys()
-    a.sort()
-    verify(a == range(NUM_THREADS))
-
-    prefork_lives = alive.copy()
-
-    if sys.platform in ['unixware7']:
-        cpid = os.fork1()
-    else:
-        cpid = os.fork()
-
-    if cpid == 0:
-        # Child
-        time.sleep(LONGSLEEP)
-        n = 0
-        for key in alive.keys():
-            if alive[key] != prefork_lives[key]:
-                n = n+1
-        os._exit(n)
-    else:
-        # Parent
+class ForkTest(ForkWait):
+    def wait_impl(self, cpid):
         spid, status = os.waitpid(cpid, 0)
-        verify(spid == cpid)
-        verify(status == 0,
-                "cause = %d, exit = %d" % (status&0xff, status>>8) )
-        global stop
-        # Tell threads to die
-        stop = 1
-        time.sleep(2*SHORTSLEEP) # Wait for threads to die
+        self.assertEqual(spid, cpid)
+        self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
 
-main()
+def test_main():
+    run_unittest(ForkTest)
+
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_generators.py b/Lib/test/test_generators.py
index 4be1b4c..a60a768 100644
--- a/Lib/test/test_generators.py
+++ b/Lib/test/test_generators.py
@@ -421,7 +421,6 @@
 ...         self.name = name
 ...         self.parent = None
 ...         self.generator = self.generate()
-...         self.close = self.generator.close
 ...
 ...     def generate(self):
 ...         while not self.parent:
@@ -484,8 +483,6 @@
 merged A into G
 A->G B->G C->G D->G E->G F->G G->G H->G I->G J->G K->G L->G M->G
 
->>> for s in sets: s.close()    # break cycles
-
 """
 # Emacs turd '
 
@@ -593,7 +590,6 @@
 ...     def __init__(self, g):
 ...         self.sofar = []
 ...         self.fetch = g.next
-...         self.close = g.close
 ...
 ...     def __getitem__(self, i):
 ...         sofar, fetch = self.sofar, self.fetch
@@ -624,8 +620,6 @@
 [200, 216, 225, 240, 243, 250, 256, 270, 288, 300, 320, 324, 360, 375, 384]
 [400, 405, 432, 450, 480, 486, 500, 512, 540, 576, 600, 625, 640, 648, 675]
 
->>> m235.close()
-
 Ye olde Fibonacci generator, LazyList style.
 
 >>> def fibgen(a, b):
@@ -648,7 +642,6 @@
 >>> fib = LazyList(fibgen(1, 2))
 >>> firstn(iter(fib), 17)
 [1, 2, 3, 5, 8, 13, 21, 34, 55, 89, 144, 233, 377, 610, 987, 1597, 2584]
->>> fib.close()
 
 
 Running after your tail with itertools.tee (new in version 2.4)
@@ -685,7 +678,8 @@
 ...                        merge(times(3, m3),
 ...                              times(5, m5))):
 ...             yield n
-...     m2, m3, m5, mRes = tee(_m235(), 4)
+...     m1 = _m235()
+...     m2, m3, m5, mRes = tee(m1, 4)
 ...     return mRes
 
 >>> it = m235()
@@ -702,10 +696,9 @@
 iterators, whereupon it is deleted. You can therefore print the hamming
 sequence during hours without increasing memory usage, or very little.
 
-The beauty of it is that recursive running after their tail FP algorithms
+The beauty of it is that recursive running-after-their-tail FP algorithms
 are quite straightforwardly expressed with this Python idiom.
 
-
 Ye olde Fibonacci generator, tee style.
 
 >>> def fib():
@@ -721,7 +714,8 @@
 ...         for res in _isum(fibHead, fibTail):
 ...             yield res
 ...
-...     fibHead, fibTail, fibRes = tee(_fib(), 3)
+...     realfib = _fib()
+...     fibHead, fibTail, fibRes = tee(realfib, 3)
 ...     return fibRes
 
 >>> firstn(fib(), 17)
@@ -1545,6 +1539,9 @@
 >>> g.throw(ValueError, TypeError(1))  # mismatched type, rewrapped
 caught ValueError (1)
 
+>>> g.throw(ValueError, ValueError(1), None)   # explicit None traceback
+caught ValueError (1)
+
 >>> g.throw(ValueError(1), "foo")       # bad args
 Traceback (most recent call last):
   ...
@@ -1592,8 +1589,7 @@
 >>> f().throw("abc")     # throw on just-opened generator
 Traceback (most recent call last):
   ...
-TypeError: exceptions must be classes, or instances, not str
-
+abc
 
 Now let's try closing a generator:
 
@@ -1711,6 +1707,81 @@
 
 """
 
+refleaks_tests = """
+Prior to adding cycle-GC support to itertools.tee, this code would leak
+references. We add it to the standard suite so the routine refleak-tests
+would trigger if it starts being uncleanable again.
+
+>>> import itertools
+>>> def leak():
+...     class gen:
+...         def __iter__(self):
+...             return self
+...         def next(self):
+...             return self.item
+...     g = gen()
+...     head, tail = itertools.tee(g)
+...     g.item = head
+...     return head
+>>> it = leak()
+
+Make sure to also test the involvement of the tee-internal teedataobject,
+which stores returned items.
+
+>>> item = it.next()
+
+
+
+This test leaked at one point due to generator finalization/destruction.
+It was copied from Lib/test/leakers/test_generator_cycle.py before the file
+was removed.
+
+>>> def leak():
+...    def gen():
+...        while True:
+...            yield g
+...    g = gen()
+
+>>> leak()
+
+
+
+This test isn't really generator related, but rather exception-in-cleanup
+related. The coroutine tests (above) just happen to cause an exception in
+the generator's __del__ (tp_del) method. We can also test for this
+explicitly, without generators. We do have to redirect stderr to avoid
+printing warnings and to doublecheck that we actually tested what we wanted
+to test.
+
+>>> import sys, StringIO
+>>> old = sys.stderr
+>>> try:
+...     sys.stderr = StringIO.StringIO()
+...     class Leaker:
+...         def __del__(self):
+...             raise RuntimeError
+...
+...     l = Leaker()
+...     del l
+...     err = sys.stderr.getvalue().strip()
+...     err.startswith(
+...         "Exception exceptions.RuntimeError: RuntimeError() in <"
+...     )
+...     err.endswith("> ignored")
+...     len(err.splitlines())
+... finally:
+...     sys.stderr = old
+True
+True
+1
+
+
+
+These refleak tests should perhaps be in a testfile of their own,
+test_generators just happened to be the test that drew these out.
+
+"""
+
 __test__ = {"tut":      tutorial_tests,
             "pep":      pep_tests,
             "email":    email_tests,
@@ -1719,6 +1790,7 @@
             "conjoin":  conjoin_tests,
             "weakref":  weakref_tests,
             "coroutine":  coroutine_tests,
+            "refleaks": refleaks_tests,
             }
 
 # Magic test name that regrtest.py invokes *after* importing this module.
diff --git a/Lib/test/test_genexps.py b/Lib/test/test_genexps.py
index 1556604..e414757 100644
--- a/Lib/test/test_genexps.py
+++ b/Lib/test/test_genexps.py
@@ -129,7 +129,7 @@
 Verify re-use of tuples (a side benefit of using genexps over listcomps)
 
     >>> tupleids = map(id, ((i,i) for i in xrange(10)))
-    >>> max(tupleids) - min(tupleids)
+    >>> int(max(tupleids) - min(tupleids))
     0
 
 Verify that syntax error's are raised for genexps used as lvalues
diff --git a/Lib/test/test_getargs2.py b/Lib/test/test_getargs2.py
index d4c681a..748ad44 100644
--- a/Lib/test/test_getargs2.py
+++ b/Lib/test/test_getargs2.py
@@ -48,7 +48,7 @@
 VERY_LARGE = 0xFF0000121212121212121242L
 
 from _testcapi import UCHAR_MAX, USHRT_MAX, UINT_MAX, ULONG_MAX, INT_MAX, \
-     INT_MIN, LONG_MIN, LONG_MAX
+     INT_MIN, LONG_MIN, LONG_MAX, PY_SSIZE_T_MIN, PY_SSIZE_T_MAX
 
 # fake, they are not defined in Python's header files
 LLONG_MAX = 2**63-1
@@ -182,6 +182,23 @@
         self.failUnlessEqual(42, getargs_l(42L))
         self.assertRaises(OverflowError, getargs_l, VERY_LARGE)
 
+    def test_n(self):
+        from _testcapi import getargs_n
+        # n returns 'Py_ssize_t', and does range checking
+        # (PY_SSIZE_T_MIN ... PY_SSIZE_T_MAX)
+        self.failUnlessEqual(3, getargs_n(3.14))
+        self.failUnlessEqual(99, getargs_n(Long()))
+        self.failUnlessEqual(99, getargs_n(Int()))
+
+        self.assertRaises(OverflowError, getargs_n, PY_SSIZE_T_MIN-1)
+        self.failUnlessEqual(PY_SSIZE_T_MIN, getargs_n(PY_SSIZE_T_MIN))
+        self.failUnlessEqual(PY_SSIZE_T_MAX, getargs_n(PY_SSIZE_T_MAX))
+        self.assertRaises(OverflowError, getargs_n, PY_SSIZE_T_MAX+1)
+
+        self.failUnlessEqual(42, getargs_n(42))
+        self.failUnlessEqual(42, getargs_n(42L))
+        self.assertRaises(OverflowError, getargs_n, VERY_LARGE)
+
 
 class LongLong_TestCase(unittest.TestCase):
     def test_L(self):
diff --git a/Lib/test/test_glob.py b/Lib/test/test_glob.py
index 8a6ef7f..5ce09f9 100644
--- a/Lib/test/test_glob.py
+++ b/Lib/test/test_glob.py
@@ -80,6 +80,14 @@
         eq(self.glob('?a?', '*F'), map(self.norm, [os.path.join('aaa', 'zzzF'),
                                                    os.path.join('aab', 'F')]))
 
+    def test_glob_directory_with_trailing_slash(self):
+        # We are verifying that when there is wildcard pattern which
+        # ends with os.sep doesn't blow up.
+        res = glob.glob(self.tempdir + '*' + os.sep)
+        self.assertEqual(len(res), 1)
+        # either of these results are reasonable
+        self.assertTrue(res[0] in [self.tempdir, self.tempdir + os.sep])
+
     def test_glob_broken_symlinks(self):
         if hasattr(os, 'symlink'):
             eq = self.assertSequencesEqual_noorder
diff --git a/Lib/test/test_grammar.py b/Lib/test/test_grammar.py
index 5b20ab3..4bb4e45 100644
--- a/Lib/test/test_grammar.py
+++ b/Lib/test/test_grammar.py
@@ -255,6 +255,10 @@
 d22v(*(1, 2, 3, 4))
 d22v(1, 2, *(3, 4, 5))
 d22v(1, *(2, 3), **{'d': 4})
+def d31v((x)): pass
+d31v(1)
+def d32v((x,)): pass
+d32v((1,))
 
 ### lambdef: 'lambda' [varargslist] ':' test
 print 'lambdef'
@@ -811,6 +815,11 @@
 x = 5; t = True;
 verify([(i,j) for i in range(10) for j in range(5)] == list(g))
 
+# Grammar allows multiple adjacent 'if's in listcomps and genexps,
+# even though it's silly. Make sure it works (ifelse broke this.)
+verify([ x for x in range(10) if x % 2 if x % 3 ], [1, 5, 7])
+verify((x for x in range(10) if x % 2 if x % 3), [1, 5, 7])
+
 # Test ifelse expressions in various cases
 def _checkeval(msg, ret):
     "helper to check that evaluation of expressions is done correctly"
diff --git a/Lib/test/test_index.py b/Lib/test/test_index.py
index e69de29..45b3b2b 100644
--- a/Lib/test/test_index.py
+++ b/Lib/test/test_index.py
@@ -0,0 +1,137 @@
+import unittest
+from test import test_support
+import operator
+
+class oldstyle:
+    def __index__(self):
+        return self.ind
+
+class newstyle(object):
+    def __index__(self):
+        return self.ind
+
+class BaseTestCase(unittest.TestCase):
+    def setUp(self):
+        self.o = oldstyle()
+        self.n = newstyle()
+        self.o2 = oldstyle()
+        self.n2 = newstyle()
+
+    def test_basic(self):
+        self.o.ind = -2
+        self.n.ind = 2
+        assert(self.seq[self.n] == self.seq[2])
+        assert(self.seq[self.o] == self.seq[-2])
+        assert(operator.index(self.o) == -2)
+        assert(operator.index(self.n) == 2)
+
+    def test_error(self):
+        self.o.ind = 'dumb'
+        self.n.ind = 'bad'
+        myfunc = lambda x, obj: obj.seq[x]
+        self.failUnlessRaises(TypeError, operator.index, self.o)
+        self.failUnlessRaises(TypeError, operator.index, self.n)
+        self.failUnlessRaises(TypeError, myfunc, self.o, self)
+        self.failUnlessRaises(TypeError, myfunc, self.n, self)
+
+    def test_slice(self):
+        self.o.ind = 1
+        self.o2.ind = 3
+        self.n.ind = 2
+        self.n2.ind = 4
+        assert(self.seq[self.o:self.o2] == self.seq[1:3])
+        assert(self.seq[self.n:self.n2] == self.seq[2:4])
+
+    def test_repeat(self):
+        self.o.ind = 3
+        self.n.ind = 2
+        assert(self.seq * self.o == self.seq * 3)
+        assert(self.seq * self.n == self.seq * 2)
+        assert(self.o * self.seq == self.seq * 3)
+        assert(self.n * self.seq == self.seq * 2)
+
+    def test_wrappers(self):
+        n = self.n
+        n.ind = 5
+        assert n.__index__() == 5
+        assert 6 .__index__() == 6
+        assert -7L.__index__() == -7
+        assert self.seq.__getitem__(n) == self.seq[5]
+        assert self.seq.__mul__(n) == self.seq * 5
+        assert self.seq.__rmul__(n) == self.seq * 5
+
+    def test_infinite_recusion(self):
+        class Trap1(int):
+            def __index__(self):
+                return self
+        class Trap2(long):
+            def __index__(self):
+                return self
+        self.failUnlessRaises(TypeError, operator.getitem, self.seq, Trap1())
+        self.failUnlessRaises(TypeError, operator.getitem, self.seq, Trap2())
+
+
+class ListTestCase(BaseTestCase):
+    seq = [0,10,20,30,40,50]
+
+    def test_setdelitem(self):
+        self.o.ind = -2
+        self.n.ind = 2
+        lst = list('ab!cdefghi!j')
+        del lst[self.o]
+        del lst[self.n]
+        lst[self.o] = 'X'
+        lst[self.n] = 'Y'
+        assert lst == list('abYdefghXj')
+
+        lst = [5, 6, 7, 8, 9, 10, 11]
+        lst.__setitem__(self.n, "here")
+        assert lst == [5, 6, "here", 8, 9, 10, 11]
+        lst.__delitem__(self.n)
+        assert lst == [5, 6, 8, 9, 10, 11]
+
+    def test_inplace_repeat(self):
+        self.o.ind = 2
+        self.n.ind = 3
+        lst = [6, 4]
+        lst *= self.o
+        assert lst == [6, 4, 6, 4]
+        lst *= self.n
+        assert lst == [6, 4, 6, 4] * 3
+
+        lst = [5, 6, 7, 8, 9, 11]
+        l2 = lst.__imul__(self.n)
+        assert l2 is lst
+        assert lst == [5, 6, 7, 8, 9, 11] * 3
+
+
+class TupleTestCase(BaseTestCase):
+    seq = (0,10,20,30,40,50)
+
+class StringTestCase(BaseTestCase):
+    seq = "this is a test"
+
+class UnicodeTestCase(BaseTestCase):
+    seq = u"this is a test"
+
+
+class XRangeTestCase(unittest.TestCase):
+
+    def test_xrange(self):
+        n = newstyle()
+        n.ind = 5
+        assert xrange(1, 20)[n] == 6
+        assert xrange(1, 20).__getitem__(n) == 6
+
+
+def test_main():
+    test_support.run_unittest(
+        ListTestCase,
+        TupleTestCase,
+        StringTestCase,
+        UnicodeTestCase,
+        XRangeTestCase,
+    )
+
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_inspect.py b/Lib/test/test_inspect.py
index 0bdf959..d9fd93d 100644
--- a/Lib/test/test_inspect.py
+++ b/Lib/test/test_inspect.py
@@ -295,10 +295,12 @@
         self.assertArgSpecEquals(A.m, ['self'])
 
     def test_getargspec_sublistofone(self):
-        def sublistOfOne((foo)): return 1
-
+        def sublistOfOne((foo,)): return 1
         self.assertArgSpecEquals(sublistOfOne, [['foo']])
 
+        def fakeSublistOfOne((foo)): return 1
+        self.assertArgSpecEquals(fakeSublistOfOne, ['foo'])
+
     def test_classify_newstyle(self):
         class A(object):
 
diff --git a/Lib/test/test_mimetypes.py b/Lib/test/test_mimetypes.py
index 5939ff5..8c584ad 100644
--- a/Lib/test/test_mimetypes.py
+++ b/Lib/test/test_mimetypes.py
@@ -8,6 +8,7 @@
 # Tell it we don't know about external files:
 mimetypes.knownfiles = []
 mimetypes.inited = False
+mimetypes._default_mime_types()
 
 
 class MimeTypesTestCase(unittest.TestCase):
diff --git a/Lib/test/test_multibytecodec.py b/Lib/test/test_multibytecodec.py
index aef7931..4d02dee 100644
--- a/Lib/test/test_multibytecodec.py
+++ b/Lib/test/test_multibytecodec.py
@@ -7,13 +7,114 @@
 
 from test import test_support
 from test import test_multibytecodec_support
-import unittest, StringIO, codecs
+import unittest, StringIO, codecs, sys
+
+class Test_MultibyteCodec(unittest.TestCase):
+
+    def test_nullcoding(self):
+        self.assertEqual(''.decode('gb18030'), u'')
+        self.assertEqual(unicode('', 'gb18030'), u'')
+        self.assertEqual(u''.encode('gb18030'), '')
+
+    def test_str_decode(self):
+        self.assertEqual('abcd'.encode('gb18030'), 'abcd')
+
+    def test_errorcallback_longindex(self):
+        dec = codecs.getdecoder('euc-kr')
+        myreplace  = lambda exc: (u'', sys.maxint+1)
+        codecs.register_error('test.cjktest', myreplace)
+        self.assertRaises(IndexError, dec,
+                          'apple\x92ham\x93spam', 'test.cjktest')
+
+class Test_IncrementalEncoder(unittest.TestCase):
+
+    def test_stateless(self):
+        # cp949 encoder isn't stateful at all.
+        encoder = codecs.getincrementalencoder('cp949')()
+        self.assertEqual(encoder.encode(u'\ud30c\uc774\uc36c \ub9c8\uc744'),
+                         '\xc6\xc4\xc0\xcc\xbd\xe3 \xb8\xb6\xc0\xbb')
+        self.assertEqual(encoder.reset(), None)
+        self.assertEqual(encoder.encode(u'\u2606\u223c\u2606', True),
+                         '\xa1\xd9\xa1\xad\xa1\xd9')
+        self.assertEqual(encoder.reset(), None)
+        self.assertEqual(encoder.encode(u'', True), '')
+        self.assertEqual(encoder.encode(u'', False), '')
+        self.assertEqual(encoder.reset(), None)
+
+    def test_stateful(self):
+        # jisx0213 encoder is stateful for a few codepoints. eg)
+        #   U+00E6 => A9DC
+        #   U+00E6 U+0300 => ABC4
+        #   U+0300 => ABDC
+
+        encoder = codecs.getincrementalencoder('jisx0213')()
+        self.assertEqual(encoder.encode(u'\u00e6\u0300'), '\xab\xc4')
+        self.assertEqual(encoder.encode(u'\u00e6'), '')
+        self.assertEqual(encoder.encode(u'\u0300'), '\xab\xc4')
+        self.assertEqual(encoder.encode(u'\u00e6', True), '\xa9\xdc')
+
+        self.assertEqual(encoder.reset(), None)
+        self.assertEqual(encoder.encode(u'\u0300'), '\xab\xdc')
+
+        self.assertEqual(encoder.encode(u'\u00e6'), '')
+        self.assertEqual(encoder.encode('', True), '\xa9\xdc')
+        self.assertEqual(encoder.encode('', True), '')
+
+    def test_stateful_keep_buffer(self):
+        encoder = codecs.getincrementalencoder('jisx0213')()
+        self.assertEqual(encoder.encode(u'\u00e6'), '')
+        self.assertRaises(UnicodeEncodeError, encoder.encode, u'\u0123')
+        self.assertEqual(encoder.encode(u'\u0300\u00e6'), '\xab\xc4')
+        self.assertRaises(UnicodeEncodeError, encoder.encode, u'\u0123')
+        self.assertEqual(encoder.reset(), None)
+        self.assertEqual(encoder.encode(u'\u0300'), '\xab\xdc')
+        self.assertEqual(encoder.encode(u'\u00e6'), '')
+        self.assertRaises(UnicodeEncodeError, encoder.encode, u'\u0123')
+        self.assertEqual(encoder.encode(u'', True), '\xa9\xdc')
+
+
+class Test_IncrementalDecoder(unittest.TestCase):
+
+    def test_dbcs(self):
+        # cp949 decoder is simple with only 1 or 2 bytes sequences.
+        decoder = codecs.getincrementaldecoder('cp949')()
+        self.assertEqual(decoder.decode('\xc6\xc4\xc0\xcc\xbd'),
+                         u'\ud30c\uc774')
+        self.assertEqual(decoder.decode('\xe3 \xb8\xb6\xc0\xbb'),
+                         u'\uc36c \ub9c8\uc744')
+        self.assertEqual(decoder.decode(''), u'')
+
+    def test_dbcs_keep_buffer(self):
+        decoder = codecs.getincrementaldecoder('cp949')()
+        self.assertEqual(decoder.decode('\xc6\xc4\xc0'), u'\ud30c')
+        self.assertRaises(UnicodeDecodeError, decoder.decode, '', True)
+        self.assertEqual(decoder.decode('\xcc'), u'\uc774')
+
+        self.assertEqual(decoder.decode('\xc6\xc4\xc0'), u'\ud30c')
+        self.assertRaises(UnicodeDecodeError, decoder.decode, '\xcc\xbd', True)
+        self.assertEqual(decoder.decode('\xcc'), u'\uc774')
+
+    def test_iso2022(self):
+        decoder = codecs.getincrementaldecoder('iso2022-jp')()
+        ESC = '\x1b'
+        self.assertEqual(decoder.decode(ESC + '('), u'')
+        self.assertEqual(decoder.decode('B', True), u'')
+        self.assertEqual(decoder.decode(ESC + '$'), u'')
+        self.assertEqual(decoder.decode('B@$'), u'\u4e16')
+        self.assertEqual(decoder.decode('@$@'), u'\u4e16')
+        self.assertEqual(decoder.decode('$', True), u'\u4e16')
+        self.assertEqual(decoder.reset(), None)
+        self.assertEqual(decoder.decode('@$'), u'@$')
+        self.assertEqual(decoder.decode(ESC + '$'), u'')
+        self.assertRaises(UnicodeDecodeError, decoder.decode, '', True)
+        self.assertEqual(decoder.decode('B@$'), u'\u4e16')
+
 
 class Test_StreamWriter(unittest.TestCase):
     if len(u'\U00012345') == 2: # UCS2
         def test_gb18030(self):
             s= StringIO.StringIO()
-            c = codecs.lookup('gb18030')[3](s)
+            c = codecs.getwriter('gb18030')(s)
             c.write(u'123')
             self.assertEqual(s.getvalue(), '123')
             c.write(u'\U00012345')
@@ -30,15 +131,16 @@
             self.assertEqual(s.getvalue(),
                     '123\x907\x959\x907\x959\x907\x959\x827\xcf5\x810\x851')
 
-        # standard utf-8 codecs has broken StreamReader
-        if test_multibytecodec_support.__cjkcodecs__:
-            def test_utf_8(self):
-                s= StringIO.StringIO()
-                c = codecs.lookup('utf-8')[3](s)
-                c.write(u'123')
-                self.assertEqual(s.getvalue(), '123')
-                c.write(u'\U00012345')
-                self.assertEqual(s.getvalue(), '123\xf0\x92\x8d\x85')
+        def test_utf_8(self):
+            s= StringIO.StringIO()
+            c = codecs.getwriter('utf-8')(s)
+            c.write(u'123')
+            self.assertEqual(s.getvalue(), '123')
+            c.write(u'\U00012345')
+            self.assertEqual(s.getvalue(), '123\xf0\x92\x8d\x85')
+
+            # Python utf-8 codec can't buffer surrogate pairs yet.
+            if 0:
                 c.write(u'\U00012345'[0])
                 self.assertEqual(s.getvalue(), '123\xf0\x92\x8d\x85')
                 c.write(u'\U00012345'[1] + u'\U00012345' + u'\uac00\u00ac')
@@ -61,14 +163,6 @@
     else: # UCS4
         pass
 
-    def test_nullcoding(self):
-        self.assertEqual(''.decode('gb18030'), u'')
-        self.assertEqual(unicode('', 'gb18030'), u'')
-        self.assertEqual(u''.encode('gb18030'), '')
-
-    def test_str_decode(self):
-        self.assertEqual('abcd'.encode('gb18030'), 'abcd')
-
     def test_streamwriter_strwrite(self):
         s = StringIO.StringIO()
         wr = codecs.getwriter('gb18030')(s)
@@ -83,6 +177,9 @@
 
 def test_main():
     suite = unittest.TestSuite()
+    suite.addTest(unittest.makeSuite(Test_MultibyteCodec))
+    suite.addTest(unittest.makeSuite(Test_IncrementalEncoder))
+    suite.addTest(unittest.makeSuite(Test_IncrementalDecoder))
     suite.addTest(unittest.makeSuite(Test_StreamWriter))
     suite.addTest(unittest.makeSuite(Test_ISO2022))
     test_support.run_suite(suite)
diff --git a/Lib/test/test_multibytecodec_support.py b/Lib/test/test_multibytecodec_support.py
index 45a63e7..bec32de 100644
--- a/Lib/test/test_multibytecodec_support.py
+++ b/Lib/test/test_multibytecodec_support.py
@@ -3,15 +3,12 @@
 # test_multibytecodec_support.py
 #   Common Unittest Routines for CJK codecs
 #
-# $CJKCodecs: test_multibytecodec_support.py,v 1.6 2004/06/19 06:09:55 perky Exp $
 
 import sys, codecs, os.path
 import unittest
 from test import test_support
 from StringIO import StringIO
 
-__cjkcodecs__ = 0 # define this as 0 for python
-
 class TestBase:
     encoding        = ''   # codec name
     codec           = None # codec tuple (with 4 elements)
@@ -21,11 +18,17 @@
     roundtriptest   = 1    # set if roundtrip is possible with unicode
     has_iso10646    = 0    # set if this encoding contains whole iso10646 map
     xmlcharnametest = None # string to test xmlcharrefreplace
+    unmappedunicode = u'\udeee' # a unicode codepoint that is not mapped.
 
     def setUp(self):
         if self.codec is None:
             self.codec = codecs.lookup(self.encoding)
-        self.encode, self.decode, self.reader, self.writer = self.codec
+        self.encode = self.codec.encode
+        self.decode = self.codec.decode
+        self.reader = self.codec.streamreader
+        self.writer = self.codec.streamwriter
+        self.incrementalencoder = self.codec.incrementalencoder
+        self.incrementaldecoder = self.codec.incrementaldecoder
 
     def test_chunkcoding(self):
         for native, utf8 in zip(*[StringIO(f).readlines()
@@ -47,51 +50,155 @@
             else:
                 self.assertRaises(UnicodeError, func, source, scheme)
 
-    if sys.hexversion >= 0x02030000:
-        def test_xmlcharrefreplace(self):
-            if self.has_iso10646:
-                return
+    def test_xmlcharrefreplace(self):
+        if self.has_iso10646:
+            return
 
-            s = u"\u0b13\u0b23\u0b60 nd eggs"
-            self.assertEqual(
-                self.encode(s, "xmlcharrefreplace")[0],
-                "&#2835;&#2851;&#2912; nd eggs"
-            )
+        s = u"\u0b13\u0b23\u0b60 nd eggs"
+        self.assertEqual(
+            self.encode(s, "xmlcharrefreplace")[0],
+            "&#2835;&#2851;&#2912; nd eggs"
+        )
 
-        def test_customreplace(self):
-            if self.has_iso10646:
-                return
+    def test_customreplace_encode(self):
+        if self.has_iso10646:
+            return
 
-            import htmlentitydefs
+        from htmlentitydefs import codepoint2name
 
-            names = {}
-            for (key, value) in htmlentitydefs.entitydefs.items():
-                if len(value)==1:
-                    names[value.decode('latin-1')] = self.decode(key)[0]
+        def xmlcharnamereplace(exc):
+            if not isinstance(exc, UnicodeEncodeError):
+                raise TypeError("don't know how to handle %r" % exc)
+            l = []
+            for c in exc.object[exc.start:exc.end]:
+                if ord(c) in codepoint2name:
+                    l.append(u"&%s;" % codepoint2name[ord(c)])
                 else:
-                    names[unichr(int(value[2:-1]))] = self.decode(key)[0]
+                    l.append(u"&#%d;" % ord(c))
+            return (u"".join(l), exc.end)
 
-            def xmlcharnamereplace(exc):
-                if not isinstance(exc, UnicodeEncodeError):
-                    raise TypeError("don't know how to handle %r" % exc)
-                l = []
-                for c in exc.object[exc.start:exc.end]:
-                    try:
-                        l.append(u"&%s;" % names[c])
-                    except KeyError:
-                        l.append(u"&#%d;" % ord(c))
-                return (u"".join(l), exc.end)
+        codecs.register_error("test.xmlcharnamereplace", xmlcharnamereplace)
 
-            codecs.register_error(
-                "test.xmlcharnamereplace", xmlcharnamereplace)
+        if self.xmlcharnametest:
+            sin, sout = self.xmlcharnametest
+        else:
+            sin = u"\xab\u211c\xbb = \u2329\u1234\u232a"
+            sout = "&laquo;&real;&raquo; = &lang;&#4660;&rang;"
+        self.assertEqual(self.encode(sin,
+                                    "test.xmlcharnamereplace")[0], sout)
 
-            if self.xmlcharnametest:
-                sin, sout = self.xmlcharnametest
+    def test_callback_wrong_objects(self):
+        def myreplace(exc):
+            return (ret, exc.end)
+        codecs.register_error("test.cjktest", myreplace)
+
+        for ret in ([1, 2, 3], [], None, object(), 'string', ''):
+            self.assertRaises(TypeError, self.encode, self.unmappedunicode,
+                              'test.cjktest')
+
+    def test_callback_long_index(self):
+        def myreplace(exc):
+            return (u'x', long(exc.end))
+        codecs.register_error("test.cjktest", myreplace)
+        self.assertEqual(self.encode(u'abcd' + self.unmappedunicode + u'efgh',
+                                     'test.cjktest'), ('abcdxefgh', 9))
+
+        def myreplace(exc):
+            return (u'x', sys.maxint + 1)
+        codecs.register_error("test.cjktest", myreplace)
+        self.assertRaises(IndexError, self.encode, self.unmappedunicode,
+                          'test.cjktest')
+
+    def test_callback_None_index(self):
+        def myreplace(exc):
+            return (u'x', None)
+        codecs.register_error("test.cjktest", myreplace)
+        self.assertRaises(TypeError, self.encode, self.unmappedunicode,
+                          'test.cjktest')
+
+    def test_callback_backward_index(self):
+        def myreplace(exc):
+            if myreplace.limit > 0:
+                myreplace.limit -= 1
+                return (u'REPLACED', 0)
             else:
-                sin = u"\xab\u211c\xbb = \u2329\u1234\u232a"
-                sout = "&laquo;&real;&raquo; = &lang;&#4660;&rang;"
-            self.assertEqual(self.encode(sin,
-                                        "test.xmlcharnamereplace")[0], sout)
+                return (u'TERMINAL', exc.end)
+        myreplace.limit = 3
+        codecs.register_error("test.cjktest", myreplace)
+        self.assertEqual(self.encode(u'abcd' + self.unmappedunicode + u'efgh',
+                                     'test.cjktest'),
+                ('abcdREPLACEDabcdREPLACEDabcdREPLACEDabcdTERMINALefgh', 9))
+
+    def test_callback_forward_index(self):
+        def myreplace(exc):
+            return (u'REPLACED', exc.end + 2)
+        codecs.register_error("test.cjktest", myreplace)
+        self.assertEqual(self.encode(u'abcd' + self.unmappedunicode + u'efgh',
+                                     'test.cjktest'), ('abcdREPLACEDgh', 9))
+
+    def test_callback_index_outofbound(self):
+        def myreplace(exc):
+            return (u'TERM', 100)
+        codecs.register_error("test.cjktest", myreplace)
+        self.assertRaises(IndexError, self.encode, self.unmappedunicode,
+                          'test.cjktest')
+
+    def test_incrementalencoder(self):
+        UTF8Reader = codecs.getreader('utf-8')
+        for sizehint in [None] + range(1, 33) + \
+                        [64, 128, 256, 512, 1024]:
+            istream = UTF8Reader(StringIO(self.tstring[1]))
+            ostream = StringIO()
+            encoder = self.incrementalencoder()
+            while 1:
+                if sizehint is not None:
+                    data = istream.read(sizehint)
+                else:
+                    data = istream.read()
+
+                if not data:
+                    break
+                e = encoder.encode(data)
+                ostream.write(e)
+
+            self.assertEqual(ostream.getvalue(), self.tstring[0])
+
+    def test_incrementaldecoder(self):
+        UTF8Writer = codecs.getwriter('utf-8')
+        for sizehint in [None, -1] + range(1, 33) + \
+                        [64, 128, 256, 512, 1024]:
+            istream = StringIO(self.tstring[0])
+            ostream = UTF8Writer(StringIO())
+            decoder = self.incrementaldecoder()
+            while 1:
+                data = istream.read(sizehint)
+                if not data:
+                    break
+                else:
+                    u = decoder.decode(data)
+                    ostream.write(u)
+
+            self.assertEqual(ostream.getvalue(), self.tstring[1])
+
+    def test_incrementalencoder_error_callback(self):
+        inv = self.unmappedunicode
+
+        e = self.incrementalencoder()
+        self.assertRaises(UnicodeEncodeError, e.encode, inv, True)
+
+        e.errors = 'ignore'
+        self.assertEqual(e.encode(inv, True), '')
+
+        e.reset()
+        def tempreplace(exc):
+            return (u'called', exc.end)
+        codecs.register_error('test.incremental_error_callback', tempreplace)
+        e.errors = 'test.incremental_error_callback'
+        self.assertEqual(e.encode(inv, True), 'called')
+
+        # again
+        e.errors = 'ignore'
+        self.assertEqual(e.encode(inv, True), '')
 
     def test_streamreader(self):
         UTF8Writer = codecs.getwriter('utf-8')
@@ -113,11 +220,7 @@
                 self.assertEqual(ostream.getvalue(), self.tstring[1])
 
     def test_streamwriter(self):
-        if __cjkcodecs__:
-            readfuncs = ('read', 'readline', 'readlines')
-        else:
-            # standard utf8 codec has broken readline and readlines.
-            readfuncs = ('read',)
+        readfuncs = ('read', 'readline', 'readlines')
         UTF8Reader = codecs.getreader('utf-8')
         for name in readfuncs:
             for sizehint in [None] + range(1, 33) + \
@@ -211,10 +314,5 @@
             self.assertEqual(unicode(csetch, self.encoding), unich)
 
 def load_teststring(encoding):
-    if __cjkcodecs__:
-        etxt = open(os.path.join('sampletexts', encoding) + '.txt').read()
-        utxt = open(os.path.join('sampletexts', encoding) + '.utf8').read()
-        return (etxt, utxt)
-    else:
-        from test import cjkencodings_test
-        return cjkencodings_test.teststring[encoding]
+    from test import cjkencodings_test
+    return cjkencodings_test.teststring[encoding]
diff --git a/Lib/test/test_optparse.py b/Lib/test/test_optparse.py
index cf83d75..f656b9f 100644
--- a/Lib/test/test_optparse.py
+++ b/Lib/test/test_optparse.py
@@ -1382,8 +1382,15 @@
 
 class TestHelp(BaseTest):
     def setUp(self):
+        self.orig_columns = os.environ.get('COLUMNS')
         self.parser = self.make_parser(80)
 
+    def tearDown(self):
+        if self.orig_columns is None:
+            del os.environ['COLUMNS']
+        else:
+            os.environ['COLUMNS'] = self.orig_columns
+
     def make_parser(self, columns):
         options = [
             make_option("-a", type="string", dest='a',
diff --git a/Lib/test/test_parser.py b/Lib/test/test_parser.py
index 771fe9d..8aa1657 100644
--- a/Lib/test/test_parser.py
+++ b/Lib/test/test_parser.py
@@ -51,6 +51,10 @@
         self.check_expr("[1, 2, 3]")
         self.check_expr("[x**3 for x in range(20)]")
         self.check_expr("[x**3 for x in range(20) if x % 3]")
+        self.check_expr("[x**3 for x in range(20) if x % 2 if x % 3]")
+        self.check_expr("list(x**3 for x in range(20))")
+        self.check_expr("list(x**3 for x in range(20) if x % 3)")
+        self.check_expr("list(x**3 for x in range(20) if x % 2 if x % 3)")
         self.check_expr("foo(*args)")
         self.check_expr("foo(*args, **kw)")
         self.check_expr("foo(**kw)")
diff --git a/Lib/test/test_platform.py b/Lib/test/test_platform.py
index 200fba5..22307cd 100644
--- a/Lib/test/test_platform.py
+++ b/Lib/test/test_platform.py
@@ -63,7 +63,12 @@
         res = platform.dist()
 
     def test_libc_ver(self):
-        res = platform.libc_ver()
+        from sys import executable
+        import os
+        if os.path.isdir(executable) and os.path.exists(executable+'.exe'):
+            # Cygwin horror
+            executable = executable + '.exe'
+        res = platform.libc_ver(executable)
 
 def test_main():
     test_support.run_unittest(
diff --git a/Lib/test/test_popen2.py b/Lib/test/test_popen2.py
index 18142ec..4db3cd1 100644
--- a/Lib/test/test_popen2.py
+++ b/Lib/test/test_popen2.py
@@ -35,6 +35,9 @@
     # same test as popen2._test(), but using the os.popen*() API
     print "Testing os module:"
     import popen2
+    # When the test runs, there shouldn't be any open pipes
+    popen2._cleanup()
+    assert not popen2._active, "Active pipes when test starts " + repr([c.cmd for c in popen2._active])
     cmd  = "cat"
     teststr = "ab cd\n"
     if os.name == "nt":
@@ -65,6 +68,7 @@
         raise ValueError("unexpected %r on stderr" % (got,))
     for inst in popen2._active[:]:
         inst.wait()
+    popen2._cleanup()
     if popen2._active:
         raise ValueError("_active not empty")
     print "All OK"
diff --git a/Lib/test/test_posix.py b/Lib/test/test_posix.py
index 1ccc62b..f98c723 100644
--- a/Lib/test/test_posix.py
+++ b/Lib/test/test_posix.py
@@ -73,6 +73,11 @@
             finally:
                 fp.close()
 
+    def test_confstr(self):
+        if hasattr(posix, 'confstr'):
+            self.assertRaises(ValueError, posix.confstr, "CS_garbage")
+            self.assertEqual(len(posix.confstr("CS_PATH")) > 0, True)
+
     def test_dup2(self):
         if hasattr(posix, 'dup2'):
             fp1 = open(test_support.TESTFN)
diff --git a/Lib/test/test_pty.py b/Lib/test/test_pty.py
index f8ae479..99e01b6 100644
--- a/Lib/test/test_pty.py
+++ b/Lib/test/test_pty.py
@@ -18,6 +18,27 @@
     def debug(msg):
         pass
 
+def normalize_output(data):
+    # Some operating systems do conversions on newline.  We could possibly
+    # fix that by doing the appropriate termios.tcsetattr()s.  I couldn't
+    # figure out the right combo on Tru64 and I don't have an IRIX box.
+    # So just normalize the output and doc the problem O/Ses by allowing
+    # certain combinations for some platforms, but avoid allowing other
+    # differences (like extra whitespace, trailing garbage, etc.)
+
+    # This is about the best we can do without getting some feedback
+    # from someone more knowledgable.
+
+    # OSF/1 (Tru64) apparently turns \n into \r\r\n.
+    if data.endswith('\r\r\n'):
+        return data[:-3] + '\n'
+
+    # IRIX apparently turns \n into \r\n.
+    if data.endswith('\r\n'):
+        return data[:-2] + '\n'
+
+    return data
+
 # Marginal testing of pty suite. Cannot do extensive 'do or fail' testing
 # because pty code is not too portable.
 
@@ -36,19 +57,16 @@
     if not os.isatty(slave_fd) and sys.platform not in fickle_isatty:
         raise TestFailed, "slave_fd is not a tty"
 
-    # IRIX apparently turns \n into \r\n. Allow that, but avoid allowing other
-    # differences (like extra whitespace, trailing garbage, etc.)
-
     debug("Writing to slave_fd")
     os.write(slave_fd, TEST_STRING_1)
     s1 = os.read(master_fd, 1024)
-    sys.stdout.write(s1.replace("\r\n", "\n"))
+    sys.stdout.write(normalize_output(s1))
 
     debug("Writing chunked output")
     os.write(slave_fd, TEST_STRING_2[:5])
     os.write(slave_fd, TEST_STRING_2[5:])
     s2 = os.read(master_fd, 1024)
-    sys.stdout.write(s2.replace("\r\n", "\n"))
+    sys.stdout.write(normalize_output(s2))
 
     os.close(slave_fd)
     os.close(master_fd)
diff --git a/Lib/test/test_pyclbr.py b/Lib/test/test_pyclbr.py
index 0eb7d90..2410b03 100644
--- a/Lib/test/test_pyclbr.py
+++ b/Lib/test/test_pyclbr.py
@@ -97,6 +97,9 @@
                 self.assert_(isinstance(py_item, (FunctionType, BuiltinFunctionType)))
             else:
                 self.failUnless(isinstance(py_item, (ClassType, type)))
+                if py_item.__module__ != moduleName:
+                    continue   # skip classes that came from somewhere else
+
                 real_bases = [base.__name__ for base in py_item.__bases__]
                 pyclbr_bases = [ getattr(base, 'name', base)
                                  for base in value.super ]
@@ -172,7 +175,7 @@
         cm('pydoc')
 
         # Tests for modules inside packages
-        cm('email.Parser')
+        cm('email.parser')
         cm('test.test_pyclbr')
 
 
diff --git a/Lib/test/test_queue.py b/Lib/test/test_queue.py
index b55dd01..66977e6 100644
--- a/Lib/test/test_queue.py
+++ b/Lib/test/test_queue.py
@@ -221,7 +221,51 @@
     _doBlockingTest(q.get, (), q.put, ('empty',))
     _doBlockingTest(q.get, (True, 10), q.put, ('empty',))
 
+cum = 0
+cumlock = threading.Lock()
+
+def worker(q):
+    global cum
+    while True:
+        x = q.get()
+        if x is None:
+            q.task_done()
+            return
+        cumlock.acquire()
+        try:
+            cum += x
+        finally:
+            cumlock.release()
+        q.task_done()
+
+def QueueJoinTest(q):
+    global cum
+    cum = 0
+    for i in (0,1):
+        threading.Thread(target=worker, args=(q,)).start()
+    for i in xrange(100):
+        q.put(i)
+    q.join()
+    verify(cum==sum(range(100)), "q.join() did not block until all tasks were done")
+    for i in (0,1):
+        q.put(None)         # instruct the threads to close
+    q.join()                # verify that you can join twice
+
+def QueueTaskDoneTest(q):
+    try:
+        q.task_done()
+    except ValueError:
+        pass
+    else:
+        raise TestFailed("Did not detect task count going negative")
+
 def test():
+    q = Queue.Queue()
+    QueueTaskDoneTest(q)
+    QueueJoinTest(q)
+    QueueJoinTest(q)
+    QueueTaskDoneTest(q)
+
     q = Queue.Queue(QUEUE_SIZE)
     # Do it a couple of times on the same queue
     SimpleQueueTest(q)
diff --git a/Lib/test/test_quopri.py b/Lib/test/test_quopri.py
index ed66dfc..631c974 100644
--- a/Lib/test/test_quopri.py
+++ b/Lib/test/test_quopri.py
@@ -1,7 +1,7 @@
 from test import test_support
 import unittest
 
-import sys, os, cStringIO
+import sys, os, cStringIO, subprocess
 import quopri
 
 
@@ -176,17 +176,20 @@
 
     def test_scriptencode(self):
         (p, e) = self.STRINGS[-1]
-        (cin, cout) = os.popen2("%s -mquopri" % sys.executable)
-        cin.write(p)
-        cin.close()
-        self.assert_(cout.read() == e)
+        process = subprocess.Popen([sys.executable, "-mquopri"],
+                                   stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+        cout, cerr = process.communicate(p)
+        # On Windows, Python will output the result to stdout using
+        # CRLF, as the mode of stdout is text mode. To compare this
+        # with the expected result, we need to do a line-by-line comparison.
+        self.assert_(cout.splitlines() == e.splitlines())
 
     def test_scriptdecode(self):
         (p, e) = self.STRINGS[-1]
-        (cin, cout) = os.popen2("%s -mquopri -d" % sys.executable)
-        cin.write(e)
-        cin.close()
-        self.assert_(cout.read() == p)
+        process = subprocess.Popen([sys.executable, "-mquopri", "-d"],
+                                   stdin=subprocess.PIPE, stdout=subprocess.PIPE)
+        cout, cerr = process.communicate(e)
+        self.assert_(cout.splitlines() == p.splitlines())
 
 def test_main():
     test_support.run_unittest(QuopriTestCase)
diff --git a/Lib/test/test_random.py b/Lib/test/test_random.py
index 9c2e0d0..bba4c7c 100644
--- a/Lib/test/test_random.py
+++ b/Lib/test/test_random.py
@@ -93,10 +93,29 @@
         self.gen.sample(set(range(20)), 2)
         self.gen.sample(range(20), 2)
         self.gen.sample(xrange(20), 2)
-        self.gen.sample(dict.fromkeys('abcdefghijklmnopqrst'), 2)
         self.gen.sample(str('abcdefghijklmnopqrst'), 2)
         self.gen.sample(tuple('abcdefghijklmnopqrst'), 2)
 
+    def test_sample_on_dicts(self):
+        self.gen.sample(dict.fromkeys('abcdefghijklmnopqrst'), 2)
+
+        # SF bug #1460340 -- random.sample can raise KeyError
+        a = dict.fromkeys(range(10)+range(10,100,2)+range(100,110))
+        self.gen.sample(a, 3)
+
+        # A followup to bug #1460340:  sampling from a dict could return
+        # a subset of its keys or of its values, depending on the size of
+        # the subset requested.
+        N = 30
+        d = dict((i, complex(i, i)) for i in xrange(N))
+        for k in xrange(N+1):
+            samp = self.gen.sample(d, k)
+            # Verify that we got ints back (keys); the values are complex.
+            for x in samp:
+                self.assert_(type(x) is int)
+        samp.sort()
+        self.assertEqual(samp, range(N))
+
     def test_gauss(self):
         # Ensure that the seed() method initializes all the hidden state.  In
         # particular, through 2.2.1 it failed to reset a piece of state used
diff --git a/Lib/test/test_regex.py b/Lib/test/test_regex.py
deleted file mode 100644
index 2e2c8f65..0000000
--- a/Lib/test/test_regex.py
+++ /dev/null
@@ -1,113 +0,0 @@
-from test.test_support import verbose, sortdict
-import warnings
-warnings.filterwarnings("ignore", "the regex module is deprecated",
-                        DeprecationWarning, __name__)
-import regex
-from regex_syntax import *
-
-re = 'a+b+c+'
-print 'no match:', regex.match(re, 'hello aaaabcccc world')
-print 'successful search:', regex.search(re, 'hello aaaabcccc world')
-try:
-    cre = regex.compile('\(' + re)
-except regex.error:
-    print 'caught expected exception'
-else:
-    print 'expected regex.error not raised'
-
-print 'failed awk syntax:', regex.search('(a+)|(b+)', 'cdb')
-prev = regex.set_syntax(RE_SYNTAX_AWK)
-print 'successful awk syntax:', regex.search('(a+)|(b+)', 'cdb')
-regex.set_syntax(prev)
-print 'failed awk syntax:', regex.search('(a+)|(b+)', 'cdb')
-
-re = '\(<one>[0-9]+\) *\(<two>[0-9]+\)'
-print 'matching with group names and compile()'
-cre = regex.compile(re)
-print cre.match('801 999')
-try:
-    print cre.group('one')
-except regex.error:
-    print 'caught expected exception'
-else:
-    print 'expected regex.error not raised'
-
-print 'matching with group names and symcomp()'
-cre = regex.symcomp(re)
-print cre.match('801 999')
-print cre.group(0)
-print cre.group('one')
-print cre.group(1, 2)
-print cre.group('one', 'two')
-print 'realpat:', cre.realpat
-print 'groupindex:', sortdict(cre.groupindex)
-
-re = 'world'
-cre = regex.compile(re)
-print 'not case folded search:', cre.search('HELLO WORLD')
-cre = regex.compile(re, regex.casefold)
-print 'case folded search:', cre.search('HELLO WORLD')
-
-print '__members__:', cre.__members__
-print 'regs:', cre.regs
-print 'last:', cre.last
-print 'translate:', len(cre.translate)
-print 'givenpat:', cre.givenpat
-
-print 'match with pos:', cre.match('hello world', 7)
-print 'search with pos:', cre.search('hello world there world', 7)
-print 'bogus group:', cre.group(0, 1, 3)
-try:
-    print 'no name:', cre.group('one')
-except regex.error:
-    print 'caught expected exception'
-else:
-    print 'expected regex.error not raised'
-
-from regex_tests import *
-if verbose: print 'Running regex_tests test suite'
-
-for t in tests:
-    pattern=s=outcome=repl=expected=None
-    if len(t)==5:
-        pattern, s, outcome, repl, expected = t
-    elif len(t)==3:
-        pattern, s, outcome = t
-    else:
-        raise ValueError, ('Test tuples should have 3 or 5 fields',t)
-
-    try:
-        obj=regex.compile(pattern)
-    except regex.error:
-        if outcome==SYNTAX_ERROR: pass    # Expected a syntax error
-        else:
-            # Regex syntax errors aren't yet reported, so for
-            # the official test suite they'll be quietly ignored.
-            pass
-            #print '=== Syntax error:', t
-    else:
-        try:
-            result=obj.search(s)
-        except regex.error, msg:
-            print '=== Unexpected exception', t, repr(msg)
-        if outcome==SYNTAX_ERROR:
-            # This should have been a syntax error; forget it.
-            pass
-        elif outcome==FAIL:
-            if result==-1: pass   # No match, as expected
-            else: print '=== Succeeded incorrectly', t
-        elif outcome==SUCCEED:
-            if result!=-1:
-                # Matched, as expected, so now we compute the
-                # result string and compare it to our expected result.
-                start, end = obj.regs[0]
-                found=s[start:end]
-                groups=obj.group(1,2,3,4,5,6,7,8,9,10)
-                vardict=vars()
-                for i in range(len(groups)):
-                    vardict['g'+str(i+1)]=str(groups[i])
-                repl=eval(repl)
-                if repl!=expected:
-                    print '=== grouping error', t, repr(repl)+' should be '+repr(expected)
-            else:
-                print '=== Failed incorrectly', t
diff --git a/Lib/test/test_set.py b/Lib/test/test_set.py
index 6ff1215..0268be2 100644
--- a/Lib/test/test_set.py
+++ b/Lib/test/test_set.py
@@ -224,7 +224,7 @@
         # Bug #1257731
         class H(self.thetype):
             def __hash__(self):
-                return id(self)
+                return int(id(self) & 0x7fffffff)
         s=H()
         f=set()
         f.add(s)
@@ -421,7 +421,7 @@
         self.assertRaises(ReferenceError, str, p)
 
     # C API test only available in a debug build
-    if hasattr(sys, "gettotalrefcount"):
+    if hasattr(set, "test_c_api"):
         def test_c_api(self):
             self.assertEqual(set('abc').test_c_api(), True)
 
diff --git a/Lib/test/test_setuptools.py b/Lib/test/test_setuptools.py
new file mode 100644
index 0000000..a988303
--- /dev/null
+++ b/Lib/test/test_setuptools.py
@@ -0,0 +1,16 @@
+"""Tests for setuptools.
+
+The tests for setuptools are defined in the setuptools.tests package;
+this runs them from there.
+"""
+
+import test.test_support
+from setuptools.command.test import ScanningLoader
+
+def test_main():
+    test.test_support.run_suite(
+        ScanningLoader().loadTestsFromName('setuptools.tests')
+    )
+
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_sgmllib.py b/Lib/test/test_sgmllib.py
index bc25bd0..8e8b02f 100644
--- a/Lib/test/test_sgmllib.py
+++ b/Lib/test/test_sgmllib.py
@@ -214,6 +214,20 @@
             ("starttag", "e", [("a", "rgb(1,2,3)")]),
             ])
 
+    def test_attr_values_entities(self):
+        """Substitution of entities and charrefs in attribute values"""
+        # SF bug #1452246
+        self.check_events("""<a b=&lt; c=&lt;&gt; d=&lt-&gt; e='&lt; '
+                                f="&xxx;" g='&#32;&#33;' h='&#500;' i='x?a=b&c=d;'>""",
+            [("starttag", "a", [("b", "<"),
+                                ("c", "<>"),
+                                ("d", "&lt->"),
+                                ("e", "< "),
+                                ("f", "&xxx;"),
+                                ("g", " !"),
+                                ("h", "&#500;"),
+                                ("i", "x?a=b&c=d;"), ])])
+
     def test_attr_funky_names(self):
         self.check_events("""<a a.b='v' c:d=v e-f=v>""", [
             ("starttag", "a", [("a.b", "v"), ("c:d", "v"), ("e-f", "v")]),
diff --git a/Lib/test/test_socket.py b/Lib/test/test_socket.py
index 1899e78..6943080 100644
--- a/Lib/test/test_socket.py
+++ b/Lib/test/test_socket.py
@@ -268,9 +268,9 @@
             # Probably a similar problem as above; skip this test
             return
         all_host_names = [hostname, hname] + aliases
-        fqhn = socket.getfqdn()
+        fqhn = socket.getfqdn(ip)
         if not fqhn in all_host_names:
-            self.fail("Error testing host resolution mechanisms.")
+            self.fail("Error testing host resolution mechanisms. (fqdn: %s, all: %s)" % (fqhn, repr(all_host_names)))
 
     def testRefCountGetNameInfo(self):
         # Testing reference count for getnameinfo
@@ -469,6 +469,14 @@
         sock.close()
         self.assertRaises(socket.error, sock.send, "spam")
 
+    def testNewAttributes(self):
+        # testing .family, .type and .protocol
+        sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
+        self.assertEqual(sock.family, socket.AF_INET)
+        self.assertEqual(sock.type, socket.SOCK_STREAM)
+        self.assertEqual(sock.proto, 0)
+        sock.close()
+
 class BasicTCPTest(SocketConnectedTest):
 
     def __init__(self, methodName='runTest'):
@@ -817,6 +825,32 @@
         self.assert_(issubclass(socket.gaierror, socket.error))
         self.assert_(issubclass(socket.timeout, socket.error))
 
+class TestLinuxAbstractNamespace(unittest.TestCase):
+
+    UNIX_PATH_MAX = 108
+
+    def testLinuxAbstractNamespace(self):
+        address = "\x00python-test-hello\x00\xff"
+        s1 = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        s1.bind(address)
+        s1.listen(1)
+        s2 = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        s2.connect(s1.getsockname())
+        s1.accept()
+        self.assertEqual(s1.getsockname(), address)
+        self.assertEqual(s2.getpeername(), address)
+
+    def testMaxName(self):
+        address = "\x00" + "h" * (self.UNIX_PATH_MAX - 1)
+        s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        s.bind(address)
+        self.assertEqual(s.getsockname(), address)
+
+    def testNameOverflow(self):
+        address = "\x00" + "h" * self.UNIX_PATH_MAX
+        s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
+        self.assertRaises(socket.error, s.bind, address)
+
 
 def test_main():
     tests = [GeneralModuleTests, BasicTCPTest, TCPTimeoutTest, TestExceptions]
@@ -832,6 +866,8 @@
     ])
     if hasattr(socket, "socketpair"):
         tests.append(BasicSocketPairTest)
+    if sys.platform == 'linux2':
+        tests.append(TestLinuxAbstractNamespace)
     test_support.run_unittest(*tests)
 
 if __name__ == "__main__":
diff --git a/Lib/test/test_socket_ssl.py b/Lib/test/test_socket_ssl.py
index 98680b9..1091383 100644
--- a/Lib/test/test_socket_ssl.py
+++ b/Lib/test/test_socket_ssl.py
@@ -1,5 +1,6 @@
 # Test just the SSL support in the socket module, in a moderately bogus way.
 
+import sys
 from test import test_support
 import socket
 
@@ -13,6 +14,9 @@
 
     import urllib
 
+    if test_support.verbose:
+        print "test_basic ..."
+
     socket.RAND_status()
     try:
         socket.RAND_egd(1)
@@ -26,7 +30,41 @@
     buf = f.read()
     f.close()
 
+def test_timeout():
+    test_support.requires('network')
+
+    if test_support.verbose:
+        print "test_timeout ..."
+
+    # A service which issues a welcome banner (without need to write
+    # anything).
+    # XXX ("gmail.org", 995) has been unreliable so far, from time to time
+    # XXX non-responsive for hours on end (& across all buildbot slaves,
+    # XXX so that's not just a local thing).
+    ADDR = "gmail.org", 995
+
+    s = socket.socket()
+    s.settimeout(30.0)
+    try:
+        s.connect(ADDR)
+    except socket.timeout:
+        print >> sys.stderr, """\
+    WARNING:  an attempt to connect to %r timed out, in
+    test_timeout.  That may be legitimate, but is not the outcome we hoped
+    for.  If this message is seen often, test_timeout should be changed to
+    use a more reliable address.""" % (ADDR,)
+        return
+
+    ss = socket.ssl(s)
+    # Read part of return welcome banner twice.
+    ss.read(1)
+    ss.read(1)
+    s.close()
+
 def test_rude_shutdown():
+    if test_support.verbose:
+        print "test_rude_shutdown ..."
+
     try:
         import threading
     except ImportError:
@@ -74,6 +112,7 @@
         raise test_support.TestSkipped("socket module has no ssl support")
     test_rude_shutdown()
     test_basic()
+    test_timeout()
 
 if __name__ == "__main__":
     test_main()
diff --git a/Lib/test/test_sqlite.py b/Lib/test/test_sqlite.py
new file mode 100644
index 0000000..1b1d0e5
--- /dev/null
+++ b/Lib/test/test_sqlite.py
@@ -0,0 +1,16 @@
+from test.test_support import run_unittest, TestSkipped
+import unittest
+
+try:
+    import _sqlite3
+except ImportError:
+    raise TestSkipped('no sqlite available')
+from sqlite3.test import (dbapi, types, userfunctions,
+                                factory, transactions)
+
+def test_main():
+    run_unittest(dbapi.suite(), types.suite(), userfunctions.suite(),
+                 factory.suite(), transactions.suite())
+
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_startfile.py b/Lib/test/test_startfile.py
new file mode 100644
index 0000000..c4d12d7
--- /dev/null
+++ b/Lib/test/test_startfile.py
@@ -0,0 +1,37 @@
+# Ridiculously simple test of the os.startfile function for Windows.
+#
+# empty.vbs is an empty file (except for a comment), which does
+# nothing when run with cscript or wscript.
+#
+# A possible improvement would be to have empty.vbs do something that
+# we can detect here, to make sure that not only the os.startfile()
+# call succeeded, but also the the script actually has run.
+
+import unittest
+from test import test_support
+
+# use this form so that the test is skipped when startfile is not available:
+from os import startfile, path
+
+class TestCase(unittest.TestCase):
+    def test_nonexisting(self):
+        self.assertRaises(OSError, startfile, "nonexisting.vbs")
+
+    def test_nonexisting_u(self):
+        self.assertRaises(OSError, startfile, u"nonexisting.vbs")
+
+    def test_empty(self):
+        empty = path.join(path.dirname(__file__), "empty.vbs")
+        startfile(empty)
+        startfile(empty, "open")
+
+    def test_empty_u(self):
+        empty = path.join(path.dirname(__file__), "empty.vbs")
+        startfile(unicode(empty, "mbcs"))
+        startfile(unicode(empty, "mbcs"), "open")
+
+def test_main():
+    test_support.run_unittest(TestCase)
+
+if __name__=="__main__":
+    test_main()
diff --git a/Lib/test/test_sundry.py b/Lib/test/test_sundry.py
index fd10b68..af13684 100644
--- a/Lib/test/test_sundry.py
+++ b/Lib/test/test_sundry.py
@@ -12,75 +12,50 @@
 from test.test_support import verbose
 
 import BaseHTTPServer
+import DocXMLRPCServer
 import CGIHTTPServer
-import Queue
 import SimpleHTTPServer
-import SocketServer
+import SimpleXMLRPCServer
 import aifc
-import anydbm
 import audiodev
 import bdb
+import cgitb
 import cmd
 import code
-import codeop
-import colorsys
-import commands
 import compileall
-try:
-    import curses   # not available on Windows
-except ImportError:
-    if verbose:
-        print "skipping curses"
-import dircache
-import dis
-import distutils
-import doctest
-import dumbdbm
 import encodings
-import fnmatch
 import formatter
-import fpformat
 import ftplib
 import getpass
-import glob
 import gopherlib
 import htmlentitydefs
-import htmllib
-import httplib
-import imaplib
+import ihooks
 import imghdr
 import imputil
 import keyword
-import macpath
+import linecache
 import macurl2path
 import mailcap
-import mhlib
-import mimetypes
 import mimify
-import multifile
 import mutex
 import nntplib
 import nturl2path
+import opcode
+import os2emxpath
 import pdb
 import pipes
 #import poplib
 import posixfile
-import profile
 import pstats
 import py_compile
-#import reconvert
-import repr
+import pydoc
+import rexec
 try:
     import rlcompleter   # not available on Windows
 except ImportError:
     if verbose:
         print "skipping rlcompleter"
-import robotparser
 import sched
-import sgmllib
-import shelve
-import shlex
-import shutil
 import smtplib
 import sndhdr
 import statvfs
@@ -90,12 +65,17 @@
 import symbol
 import tabnanny
 import telnetlib
-import test
+import timeit
 import toaiff
-import urllib2
+import token
+try:
+    import tty     # not available on Windows
+except ImportError:
+    if verbose:
+        print "skipping tty"
+
 # Can't test the "user" module -- if the user has a ~/.pythonrc.py, it
 # can screw up all sorts of things (esp. if it prints!).
 #import user
 import webbrowser
-import whichdb
 import xml
diff --git a/Lib/test/test_sys.py b/Lib/test/test_sys.py
index b98c648..ae2a1c8 100644
--- a/Lib/test/test_sys.py
+++ b/Lib/test/test_sys.py
@@ -261,6 +261,11 @@
         self.assert_(vi[3] in ("alpha", "beta", "candidate", "final"))
         self.assert_(isinstance(vi[4], int))
 
+    def test_43581(self):
+        # Can't use sys.stdout, as this is a cStringIO object when
+        # the test runs under regrtest.
+        self.assert_(sys.__stdout__.encoding == sys.__stderr__.encoding)
+
 def test_main():
     test.test_support.run_unittest(SysModuleTest)
 
diff --git a/Lib/test/test_timeout.py b/Lib/test/test_timeout.py
index cb19d9e..4309e8c 100644
--- a/Lib/test/test_timeout.py
+++ b/Lib/test/test_timeout.py
@@ -113,8 +113,9 @@
 
         # If we are too close to www.python.org, this test will fail.
         # Pick a host that should be farther away.
-        if socket.getfqdn().split('.')[-2:] == ['python', 'org']:
-            self.addr_remote = ('python.net', 80)
+        if (socket.getfqdn().split('.')[-2:] == ['python', 'org'] or
+            socket.getfqdn().split('.')[-2:-1] == ['xs4all']):
+            self.addr_remote = ('tut.fi', 80)
 
         _t1 = time.time()
         self.failUnlessRaises(socket.error, self.sock.connect,
diff --git a/Lib/test/test_tokenize.py b/Lib/test/test_tokenize.py
index d3c1cc4..b064967 100644
--- a/Lib/test/test_tokenize.py
+++ b/Lib/test/test_tokenize.py
@@ -1,70 +1,30 @@
-from test.test_support import verbose, findfile, is_resource_enabled, TestFailed
 import os, glob, random
+from cStringIO import StringIO
+from test.test_support import (verbose, findfile, is_resource_enabled,
+                               TestFailed)
 from tokenize import (tokenize, generate_tokens, untokenize,
                       NUMBER, NAME, OP, STRING)
 
-if verbose:
-    print 'starting...'
-
-f = file(findfile('tokenize_tests' + os.extsep + 'txt'))
-tokenize(f.readline)
-f.close()
-
-
-
-###### Test roundtrip for untokenize ##########################
-
+# Test roundtrip for `untokenize`.  `f` is a file path.  The source code in f
+# is tokenized, converted back to source code via tokenize.untokenize(),
+# and tokenized again from the latter.  The test fails if the second
+# tokenization doesn't match the first.
 def test_roundtrip(f):
     ## print 'Testing:', f
-    f = file(f)
+    fobj = open(f)
     try:
-        fulltok = list(generate_tokens(f.readline))
+        fulltok = list(generate_tokens(fobj.readline))
     finally:
-        f.close()
+        fobj.close()
 
     t1 = [tok[:2] for tok in fulltok]
     newtext = untokenize(t1)
     readline = iter(newtext.splitlines(1)).next
     t2 = [tok[:2] for tok in generate_tokens(readline)]
-    assert t1 == t2
+    if t1 != t2:
+        raise TestFailed("untokenize() roundtrip failed for %r" % f)
 
-
-f = findfile('tokenize_tests' + os.extsep + 'txt')
-test_roundtrip(f)
-
-testdir = os.path.dirname(f) or os.curdir
-testfiles = glob.glob(testdir + os.sep + 'test*.py')
-if not is_resource_enabled('compiler'):
-    testfiles = random.sample(testfiles, 10)
-
-for f in testfiles:
-    test_roundtrip(f)
-
-
-###### Test detecton of IndentationError ######################
-
-from cStringIO import StringIO
-
-sampleBadText = """
-def foo():
-    bar
-  baz
-"""
-
-try:
-    for tok in generate_tokens(StringIO(sampleBadText).readline):
-        pass
-except IndentationError:
-    pass
-else:
-    raise TestFailed("Did not detect IndentationError:")
-
-
-###### Test example in the docs ###############################
-
-from decimal import Decimal
-from cStringIO import StringIO
-
+# This is an example from the docs, set up as a doctest.
 def decistmt(s):
     """Substitute Decimals for floats in a string of statements.
 
@@ -73,12 +33,21 @@
     >>> decistmt(s)
     "print +Decimal ('21.3e-5')*-Decimal ('.1234')/Decimal ('81.7')"
 
-    >>> exec(s)
-    -3.21716034272e-007
+    The format of the exponent is inherited from the platform C library.
+    Known cases are "e-007" (Windows) and "e-07" (not Windows).  Since
+    we're only showing 12 digits, and the 13th isn't close to 5, the
+    rest of the output should be platform-independent.
+
+    >>> exec(s) #doctest: +ELLIPSIS
+    -3.21716034272e-0...7
+
+    Output from calculations with Decimal should be identical across all
+    platforms.
+
     >>> exec(decistmt(s))
     -3.217160342717258261933904529E-7
-
     """
+
     result = []
     g = generate_tokens(StringIO(s).readline)   # tokenize the string
     for toknum, tokval, _, _, _  in g:
@@ -93,8 +62,53 @@
             result.append((toknum, tokval))
     return untokenize(result)
 
-import doctest
-doctest.testmod()
+def test_main():
+    if verbose:
+        print 'starting...'
 
-if verbose:
-    print 'finished'
+    # This displays the tokenization of tokenize_tests.py to stdout, and
+    # regrtest.py checks that this equals the expected output (in the
+    # test/output/ directory).
+    f = open(findfile('tokenize_tests' + os.extsep + 'txt'))
+    tokenize(f.readline)
+    f.close()
+
+    # Now run test_roundtrip() over tokenize_test.py too, and over all
+    # (if the "compiler" resource is enabled) or a small random sample (if
+    # "compiler" is not enabled) of the test*.py files.
+    f = findfile('tokenize_tests' + os.extsep + 'txt')
+    test_roundtrip(f)
+
+    testdir = os.path.dirname(f) or os.curdir
+    testfiles = glob.glob(testdir + os.sep + 'test*.py')
+    if not is_resource_enabled('compiler'):
+        testfiles = random.sample(testfiles, 10)
+
+    for f in testfiles:
+        test_roundtrip(f)
+
+    # Test detecton of IndentationError.
+    sampleBadText = """\
+def foo():
+    bar
+  baz
+"""
+
+    try:
+        for tok in generate_tokens(StringIO(sampleBadText).readline):
+            pass
+    except IndentationError:
+        pass
+    else:
+        raise TestFailed("Did not detect IndentationError:")
+
+    # Run the doctests in this module.
+    from test import test_tokenize  # i.e., this module
+    from test.test_support import run_doctest
+    run_doctest(test_tokenize)
+
+    if verbose:
+        print 'finished'
+
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_trace.py b/Lib/test/test_trace.py
index 944ff9a..4f946f7 100644
--- a/Lib/test/test_trace.py
+++ b/Lib/test/test_trace.py
@@ -13,7 +13,15 @@
                 (1, 'line'),
                 (1, 'return')]
 
-# Armin Rigo's failing example:
+# Many of the tests below are tricky because they involve pass statements.
+# If there is implicit control flow around a pass statement (in an except
+# clause or else caluse) under what conditions do you set a line number
+# following that clause?
+
+
+# The entire "while 0:" statement is optimized away.  No code
+# exists for it, so the line numbers skip directly from "del x"
+# to "x = 1".
 def arigo_example():
     x = 1
     del x
@@ -24,7 +32,6 @@
 arigo_example.events = [(0, 'call'),
                         (1, 'line'),
                         (2, 'line'),
-                        (3, 'line'),
                         (5, 'line'),
                         (5, 'return')]
 
@@ -60,14 +67,16 @@
                       (2, 'return')]
 
 def no_pop_blocks():
-    while 0:
+    y = 1
+    while not y:
         bla
     x = 1
 
 no_pop_blocks.events = [(0, 'call'),
                         (1, 'line'),
-                        (3, 'line'),
-                        (3, 'return')]
+                        (2, 'line'),
+                        (4, 'line'),
+                        (4, 'return')]
 
 def called(): # line -3
     x = 1
@@ -127,6 +136,13 @@
                              (4, 'return')]
 
 # implicit return example
+# This test is interesting because of the else: pass
+# part of the code.  The code generate for the true
+# part of the if contains a jump past the else branch.
+# The compiler then generates an implicit "return None"
+# Internally, the compiler visits the pass statement
+# and stores its line number for use on the next instruction.
+# The next instruction is the implicit return None.
 def ireturn_example():
     a = 5
     b = 5
@@ -140,7 +156,8 @@
                           (2, 'line'),
                           (3, 'line'),
                           (4, 'line'),
-                          (4, 'return')]
+                          (6, 'line'),
+                          (6, 'return')]
 
 # Tight loop with while(1) example (SF #765624)
 def tightloop_example():
@@ -221,14 +238,12 @@
 
     def test_01_basic(self):
         self.run_test(basic)
-## XXX: These tests fail with the new ast compiler.  They must
-## be fixed before a release.
-##    def test_02_arigo(self):
-##        self.run_test(arigo_example)
+    def test_02_arigo(self):
+        self.run_test(arigo_example)
     def test_03_one_instr(self):
         self.run_test(one_instr_line)
-##    def test_04_no_pop_blocks(self):
-##        self.run_test(no_pop_blocks)
+    def test_04_no_pop_blocks(self):
+        self.run_test(no_pop_blocks)
 ##    def test_05_no_pop_tops(self):
 ##        self.run_test(no_pop_tops)
     def test_06_call(self):
@@ -240,8 +255,8 @@
         self.run_test2(settrace_and_return)
     def test_09_settrace_and_raise(self):
         self.run_test2(settrace_and_raise)
-##    def test_10_ireturn(self):
-##        self.run_test(ireturn_example)
+    def test_10_ireturn(self):
+        self.run_test(ireturn_example)
     def test_11_tightloop(self):
         self.run_test(tightloop_example)
     def test_12_tighterloop(self):
@@ -579,17 +594,14 @@
         self.run_test(no_jump_too_far_forwards)
     def test_09_no_jump_too_far_backwards(self):
         self.run_test(no_jump_too_far_backwards)
-# XXX: These tests cause the interpreter to crash.  The frame_setlineno()
-# function no longer works correctly because the lineno table generated by
-# the AST compiler is slightly different than with the old compiler.
-#    def test_10_no_jump_to_except_1(self):
-#        self.run_test(no_jump_to_except_1)
-#    def test_11_no_jump_to_except_2(self):
-#        self.run_test(no_jump_to_except_2)
-#    def test_12_no_jump_to_except_3(self):
-#        self.run_test(no_jump_to_except_3)
-#    def test_13_no_jump_to_except_4(self):
-#        self.run_test(no_jump_to_except_4)
+    def test_10_no_jump_to_except_1(self):
+        self.run_test(no_jump_to_except_1)
+    def test_11_no_jump_to_except_2(self):
+        self.run_test(no_jump_to_except_2)
+    def test_12_no_jump_to_except_3(self):
+        self.run_test(no_jump_to_except_3)
+    def test_13_no_jump_to_except_4(self):
+        self.run_test(no_jump_to_except_4)
     def test_14_no_jump_forwards_into_block(self):
         self.run_test(no_jump_forwards_into_block)
     def test_15_no_jump_backwards_into_block(self):
diff --git a/Lib/test/test_traceback.py b/Lib/test/test_traceback.py
index 29a120f..22c0456 100644
--- a/Lib/test/test_traceback.py
+++ b/Lib/test/test_traceback.py
@@ -24,6 +24,9 @@
         # XXX why doesn't compile raise the same traceback?
         import test.badsyntax_nocaret
 
+    def syntax_error_bad_indentation(self):
+        compile("def spam():\n  print 1\n print 2", "?", "exec")
+
     def test_caret(self):
         err = self.get_exception_format(self.syntax_error_with_caret,
                                         SyntaxError)
@@ -40,6 +43,13 @@
         self.assert_(len(err) == 3)
         self.assert_(err[1].strip() == "[x for x in x] = x")
 
+    def test_bad_indentation(self):
+        err = self.get_exception_format(self.syntax_error_bad_indentation,
+                                        IndentationError)
+        self.assert_(len(err) == 4)
+        self.assert_("^" in err[2])
+        self.assert_(err[1].strip() == "print 2")
+
     def test_bug737473(self):
         import sys, os, tempfile, time
 
diff --git a/Lib/test/test_unicode.py b/Lib/test/test_unicode.py
index 49ef29d..c7113b5 100644
--- a/Lib/test/test_unicode.py
+++ b/Lib/test/test_unicode.py
@@ -810,6 +810,22 @@
         self.assertEqual(str(Foo9("foo")), "string")
         self.assertEqual(unicode(Foo9("foo")), u"not unicode")
 
+    def test_unicode_repr(self):
+        class s1:
+            def __repr__(self):
+                return '\\n'
+
+        class s2:
+            def __repr__(self):
+                return u'\\n'
+
+        self.assertEqual(repr(s1()), '\\n')
+        self.assertEqual(repr(s2()), '\\n')
+
+
+
+
+
 def test_main():
     test_support.run_unittest(UnicodeTest)
 
diff --git a/Lib/test/test_urllib2.py b/Lib/test/test_urllib2.py
index 2843138..64a2ee9 100644
--- a/Lib/test/test_urllib2.py
+++ b/Lib/test/test_urllib2.py
@@ -13,8 +13,7 @@
 # parse_keqv_list, parse_http_list (I'm leaving this for Anthony Baxter
 #  and Greg Stein, since they're doing Digest Authentication)
 # Authentication stuff (ditto)
-# ProxyHandler, CustomProxy, CustomProxyHandler (I don't use a proxy)
-# GopherHandler (haven't used gopher for a decade or so...)
+# CustomProxy, CustomProxyHandler
 
 class TrivialTests(unittest.TestCase):
     def test_trivial(self):
@@ -90,6 +89,7 @@
         return self.handle(self.meth_name, self.action, *args)
 
 class MockHandler:
+    handler_order = 500
     def __init__(self, methods):
         self._define_methods(methods)
     def _define_methods(self, methods):
@@ -154,7 +154,7 @@
     for meths in meth_spec:
         class MockHandlerSubclass(MockHandler): pass
         h = MockHandlerSubclass(meths)
-        h.handler_order = count
+        h.handler_order += count
         h.add_parent(opener)
         count = count + 1
         handlers.append(h)
@@ -349,13 +349,19 @@
         TESTFN = test_support.TESTFN
         urlpath = sanepathname2url(os.path.abspath(TESTFN))
         towrite = "hello, world\n"
-        for url in [
+        urls = [
             "file://localhost%s" % urlpath,
             "file://%s" % urlpath,
             "file://%s%s" % (socket.gethostbyname('localhost'), urlpath),
-            "file://%s%s" % (socket.gethostbyname(socket.gethostname()),
-                             urlpath),
-            ]:
+            ]
+        try:
+            localaddr = socket.gethostbyname(socket.gethostname())
+        except socket.gaierror:
+            localaddr = ''
+        if localaddr:
+            urls.append("file://%s%s" % (localaddr, urlpath))
+
+        for url in urls:
             f = open(TESTFN, "wb")
             try:
                 try:
@@ -636,6 +642,23 @@
         o.open("http://www.example.com/")
         self.assert_(not hh.req.has_header("Cookie"))
 
+    def test_proxy(self):
+        o = OpenerDirector()
+        ph = urllib2.ProxyHandler(dict(http="proxy.example.com:3128"))
+        o.add_handler(ph)
+        meth_spec = [
+            [("http_open", "return response")]
+            ]
+        handlers = add_ordered_mock_handlers(o, meth_spec)
+
+        req = Request("http://acme.example.com/")
+        self.assertEqual(req.get_host(), "acme.example.com")
+        r = o.open(req)
+        self.assertEqual(req.get_host(), "proxy.example.com:3128")
+
+        self.assertEqual([(handlers[0], "http_open")],
+                         [tup[0:2] for tup in o.calls])
+
 
 class MiscTests(unittest.TestCase):
 
@@ -821,6 +844,7 @@
 
 
 def test_main(verbose=None):
+    test_support.run_doctest(urllib2, verbose)
     tests = (TrivialTests,
              OpenerDirectorTests,
              HandlerTests,
diff --git a/Lib/test/test_urlparse.py b/Lib/test/test_urlparse.py
index 39ada06..5cee458 100644
--- a/Lib/test/test_urlparse.py
+++ b/Lib/test/test_urlparse.py
@@ -12,15 +12,53 @@
     def checkRoundtrips(self, url, parsed, split):
         result = urlparse.urlparse(url)
         self.assertEqual(result, parsed)
+        t = (result.scheme, result.netloc, result.path,
+             result.params, result.query, result.fragment)
+        self.assertEqual(t, parsed)
         # put it back together and it should be the same
         result2 = urlparse.urlunparse(result)
         self.assertEqual(result2, url)
+        self.assertEqual(result2, result.geturl())
+
+        # the result of geturl() is a fixpoint; we can always parse it
+        # again to get the same result:
+        result3 = urlparse.urlparse(result.geturl())
+        self.assertEqual(result3.geturl(), result.geturl())
+        self.assertEqual(result3,          result)
+        self.assertEqual(result3.scheme,   result.scheme)
+        self.assertEqual(result3.netloc,   result.netloc)
+        self.assertEqual(result3.path,     result.path)
+        self.assertEqual(result3.params,   result.params)
+        self.assertEqual(result3.query,    result.query)
+        self.assertEqual(result3.fragment, result.fragment)
+        self.assertEqual(result3.username, result.username)
+        self.assertEqual(result3.password, result.password)
+        self.assertEqual(result3.hostname, result.hostname)
+        self.assertEqual(result3.port,     result.port)
 
         # check the roundtrip using urlsplit() as well
         result = urlparse.urlsplit(url)
         self.assertEqual(result, split)
+        t = (result.scheme, result.netloc, result.path,
+             result.query, result.fragment)
+        self.assertEqual(t, split)
         result2 = urlparse.urlunsplit(result)
         self.assertEqual(result2, url)
+        self.assertEqual(result2, result.geturl())
+
+        # check the fixpoint property of re-parsing the result of geturl()
+        result3 = urlparse.urlsplit(result.geturl())
+        self.assertEqual(result3.geturl(), result.geturl())
+        self.assertEqual(result3,          result)
+        self.assertEqual(result3.scheme,   result.scheme)
+        self.assertEqual(result3.netloc,   result.netloc)
+        self.assertEqual(result3.path,     result.path)
+        self.assertEqual(result3.query,    result.query)
+        self.assertEqual(result3.fragment, result.fragment)
+        self.assertEqual(result3.username, result.username)
+        self.assertEqual(result3.password, result.password)
+        self.assertEqual(result3.hostname, result.hostname)
+        self.assertEqual(result3.port,     result.port)
 
     def test_roundtrips(self):
         testcases = [
@@ -187,6 +225,69 @@
             ]:
             self.assertEqual(urlparse.urldefrag(url), (defrag, frag))
 
+    def test_urlsplit_attributes(self):
+        url = "HTTP://WWW.PYTHON.ORG/doc/#frag"
+        p = urlparse.urlsplit(url)
+        self.assertEqual(p.scheme, "http")
+        self.assertEqual(p.netloc, "WWW.PYTHON.ORG")
+        self.assertEqual(p.path, "/doc/")
+        self.assertEqual(p.query, "")
+        self.assertEqual(p.fragment, "frag")
+        self.assertEqual(p.username, None)
+        self.assertEqual(p.password, None)
+        self.assertEqual(p.hostname, "www.python.org")
+        self.assertEqual(p.port, None)
+        # geturl() won't return exactly the original URL in this case
+        # since the scheme is always case-normalized
+        #self.assertEqual(p.geturl(), url)
+
+        url = "http://User:Pass@www.python.org:080/doc/?query=yes#frag"
+        p = urlparse.urlsplit(url)
+        self.assertEqual(p.scheme, "http")
+        self.assertEqual(p.netloc, "User:Pass@www.python.org:080")
+        self.assertEqual(p.path, "/doc/")
+        self.assertEqual(p.query, "query=yes")
+        self.assertEqual(p.fragment, "frag")
+        self.assertEqual(p.username, "User")
+        self.assertEqual(p.password, "Pass")
+        self.assertEqual(p.hostname, "www.python.org")
+        self.assertEqual(p.port, 80)
+        self.assertEqual(p.geturl(), url)
+
+    def test_attributes_bad_port(self):
+        """Check handling of non-integer ports."""
+        p = urlparse.urlsplit("http://www.example.net:foo")
+        self.assertEqual(p.netloc, "www.example.net:foo")
+        self.assertRaises(ValueError, lambda: p.port)
+
+        p = urlparse.urlparse("http://www.example.net:foo")
+        self.assertEqual(p.netloc, "www.example.net:foo")
+        self.assertRaises(ValueError, lambda: p.port)
+
+    def test_attributes_without_netloc(self):
+        # This example is straight from RFC 3261.  It looks like it
+        # should allow the username, hostname, and port to be filled
+        # in, but doesn't.  Since it's a URI and doesn't use the
+        # scheme://netloc syntax, the netloc and related attributes
+        # should be left empty.
+        uri = "sip:alice@atlanta.com;maddr=239.255.255.1;ttl=15"
+        p = urlparse.urlsplit(uri)
+        self.assertEqual(p.netloc, "")
+        self.assertEqual(p.username, None)
+        self.assertEqual(p.password, None)
+        self.assertEqual(p.hostname, None)
+        self.assertEqual(p.port, None)
+        self.assertEqual(p.geturl(), uri)
+
+        p = urlparse.urlparse(uri)
+        self.assertEqual(p.netloc, "")
+        self.assertEqual(p.username, None)
+        self.assertEqual(p.password, None)
+        self.assertEqual(p.hostname, None)
+        self.assertEqual(p.port, None)
+        self.assertEqual(p.geturl(), uri)
+
+
 def test_main():
     test_support.run_unittest(UrlParseTestCase)
 
diff --git a/Lib/test/test_wait3.py b/Lib/test/test_wait3.py
new file mode 100644
index 0000000..f6a41a6
--- /dev/null
+++ b/Lib/test/test_wait3.py
@@ -0,0 +1,32 @@
+"""This test checks for correct wait3() behavior.
+"""
+
+import os
+from test.fork_wait import ForkWait
+from test.test_support import TestSkipped, run_unittest
+
+try:
+    os.fork
+except AttributeError:
+    raise TestSkipped, "os.fork not defined -- skipping test_wait3"
+
+try:
+    os.wait3
+except AttributeError:
+    raise TestSkipped, "os.wait3 not defined -- skipping test_wait3"
+
+class Wait3Test(ForkWait):
+    def wait_impl(self, cpid):
+        while 1:
+            spid, status, rusage = os.wait3(0)
+            if spid == cpid:
+                break
+        self.assertEqual(spid, cpid)
+        self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
+        self.assertTrue(rusage)
+
+def test_main():
+    run_unittest(Wait3Test)
+
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_wait4.py b/Lib/test/test_wait4.py
new file mode 100644
index 0000000..027e5c3
--- /dev/null
+++ b/Lib/test/test_wait4.py
@@ -0,0 +1,29 @@
+"""This test checks for correct wait4() behavior.
+"""
+
+import os
+from test.fork_wait import ForkWait
+from test.test_support import TestSkipped, run_unittest
+
+try:
+    os.fork
+except AttributeError:
+    raise TestSkipped, "os.fork not defined -- skipping test_wait4"
+
+try:
+    os.wait4
+except AttributeError:
+    raise TestSkipped, "os.wait4 not defined -- skipping test_wait4"
+
+class Wait4Test(ForkWait):
+    def wait_impl(self, cpid):
+        spid, status, rusage = os.wait4(cpid, 0)
+        self.assertEqual(spid, cpid)
+        self.assertEqual(status, 0, "cause = %d, exit = %d" % (status&0xff, status>>8))
+        self.assertTrue(rusage)
+
+def test_main():
+    run_unittest(Wait4Test)
+
+if __name__ == "__main__":
+    test_main()
diff --git a/Lib/test/test_warnings.py b/Lib/test/test_warnings.py
index b7061c1..5d051a5 100644
--- a/Lib/test/test_warnings.py
+++ b/Lib/test/test_warnings.py
@@ -82,6 +82,10 @@
         self.assertEqual(msg.category, 'UserWarning')
 
 def test_main(verbose=None):
+    # Obscure hack so that this test passes after reloads or repeated calls
+    # to test_main (regrtest -R).
+    if '__warningregistry__' in globals():
+        del globals()['__warningregistry__']
     test_support.run_unittest(TestModule)
 
 if __name__ == "__main__":
diff --git a/Lib/test/test_winsound.py b/Lib/test/test_winsound.py
index 77c432a..19d4459 100644
--- a/Lib/test/test_winsound.py
+++ b/Lib/test/test_winsound.py
@@ -3,6 +3,9 @@
 import unittest
 from test import test_support
 import winsound, time
+import os
+import subprocess
+
 
 class BeepTest(unittest.TestCase):
 
@@ -44,6 +47,7 @@
     def test_question(self):
         winsound.MessageBeep(winsound.MB_ICONQUESTION)
 
+
 class PlaySoundTest(unittest.TestCase):
 
     def test_errors(self):
@@ -56,19 +60,54 @@
         )
 
     def test_alias_asterisk(self):
-        winsound.PlaySound('SystemAsterisk', winsound.SND_ALIAS)
+        if _have_soundcard():
+            winsound.PlaySound('SystemAsterisk', winsound.SND_ALIAS)
+        else:
+            self.assertRaises(
+                RuntimeError,
+                winsound.PlaySound,
+                'SystemAsterisk', winsound.SND_ALIAS
+            )
 
     def test_alias_exclamation(self):
-        winsound.PlaySound('SystemExclamation', winsound.SND_ALIAS)
+        if _have_soundcard():
+            winsound.PlaySound('SystemExclamation', winsound.SND_ALIAS)
+        else:
+            self.assertRaises(
+                RuntimeError,
+                winsound.PlaySound,
+                'SystemExclamation', winsound.SND_ALIAS
+            )
 
     def test_alias_exit(self):
-        winsound.PlaySound('SystemExit', winsound.SND_ALIAS)
+        if _have_soundcard():
+            winsound.PlaySound('SystemExit', winsound.SND_ALIAS)
+        else:
+            self.assertRaises(
+                RuntimeError,
+                winsound.PlaySound,
+                'SystemExit', winsound.SND_ALIAS
+            )
 
     def test_alias_hand(self):
-        winsound.PlaySound('SystemHand', winsound.SND_ALIAS)
+        if _have_soundcard():
+            winsound.PlaySound('SystemHand', winsound.SND_ALIAS)
+        else:
+            self.assertRaises(
+                RuntimeError,
+                winsound.PlaySound,
+                'SystemHand', winsound.SND_ALIAS
+            )
 
     def test_alias_question(self):
-        winsound.PlaySound('SystemQuestion', winsound.SND_ALIAS)
+        if _have_soundcard():
+            winsound.PlaySound('SystemQuestion', winsound.SND_ALIAS)
+        else:
+            self.assertRaises(
+                RuntimeError,
+                winsound.PlaySound,
+                'SystemQuestion', winsound.SND_ALIAS
+            )
 
     def test_alias_fallback(self):
         # This test can't be expected to work on all systems.  The MS
@@ -85,41 +124,83 @@
         return
 
     def test_alias_nofallback(self):
-        # Note that this is not the same as asserting RuntimeError
-        # will get raised:  you cannot convert this to
-        # self.assertRaises(...) form.  The attempt may or may not
-        # raise RuntimeError, but it shouldn't raise anything other
-        # than RuntimeError, and that's all we're trying to test here.
-        # The MS docs aren't clear about whether the SDK PlaySound()
-        # with SND_ALIAS and SND_NODEFAULT will return True or False when
-        # the alias is unknown.  On Tim's WinXP box today, it returns
-        # True (no exception is raised).  What we'd really like to test
-        # is that no sound is played, but that requires first wiring an
-        # eardrum class into unittest <wink>.
-        try:
-            winsound.PlaySound(
-                '!"$%&/(#+*',
-                winsound.SND_ALIAS | winsound.SND_NODEFAULT
+        if _have_soundcard():
+            # Note that this is not the same as asserting RuntimeError
+            # will get raised:  you cannot convert this to
+            # self.assertRaises(...) form.  The attempt may or may not
+            # raise RuntimeError, but it shouldn't raise anything other
+            # than RuntimeError, and that's all we're trying to test
+            # here.  The MS docs aren't clear about whether the SDK
+            # PlaySound() with SND_ALIAS and SND_NODEFAULT will return
+            # True or False when the alias is unknown.  On Tim's WinXP
+            # box today, it returns True (no exception is raised).  What
+            # we'd really like to test is that no sound is played, but
+            # that requires first wiring an eardrum class into unittest
+            # <wink>.
+            try:
+                winsound.PlaySound(
+                    '!"$%&/(#+*',
+                    winsound.SND_ALIAS | winsound.SND_NODEFAULT
+                )
+            except RuntimeError:
+                pass
+        else:
+            self.assertRaises(
+                RuntimeError,
+                winsound.PlaySound,
+                '!"$%&/(#+*', winsound.SND_ALIAS | winsound.SND_NODEFAULT
             )
-        except RuntimeError:
-            pass
 
     def test_stopasync(self):
-        winsound.PlaySound(
-            'SystemQuestion',
-            winsound.SND_ALIAS | winsound.SND_ASYNC | winsound.SND_LOOP
-        )
-        time.sleep(0.5)
-        try:
+        if _have_soundcard():
             winsound.PlaySound(
                 'SystemQuestion',
-                winsound.SND_ALIAS | winsound.SND_NOSTOP
+                winsound.SND_ALIAS | winsound.SND_ASYNC | winsound.SND_LOOP
             )
-        except RuntimeError:
-            pass
-        else: # the first sound might already be finished
-            pass
-        winsound.PlaySound(None, winsound.SND_PURGE)
+            time.sleep(0.5)
+            try:
+                winsound.PlaySound(
+                    'SystemQuestion',
+                    winsound.SND_ALIAS | winsound.SND_NOSTOP
+                )
+            except RuntimeError:
+                pass
+            else: # the first sound might already be finished
+                pass
+            winsound.PlaySound(None, winsound.SND_PURGE)
+        else:
+            self.assertRaises(
+                RuntimeError,
+                winsound.PlaySound,
+                None, winsound.SND_PURGE
+            )
+
+
+def _get_cscript_path():
+    """Return the full path to cscript.exe or None."""
+    for dir in os.environ.get("PATH", "").split(os.pathsep):
+        cscript_path = os.path.join(dir, "cscript.exe")
+        if os.path.exists(cscript_path):
+            return cscript_path
+
+__have_soundcard_cache = None
+def _have_soundcard():
+    """Return True iff this computer has a soundcard."""
+    global __have_soundcard_cache
+    if __have_soundcard_cache is None:
+        cscript_path = _get_cscript_path()
+        if cscript_path is None:
+            # Could not find cscript.exe to run our VBScript helper. Default
+            # to True: most computers these days *do* have a soundcard.
+            return True
+
+        check_script = os.path.join(os.path.dirname(__file__),
+                                    "check_soundcard.vbs")
+        p = subprocess.Popen([cscript_path, check_script],
+                             stdout=subprocess.PIPE)
+        __have_soundcard_cache = not p.wait()
+    return __have_soundcard_cache
+
 
 def test_main():
     test_support.run_unittest(BeepTest, MessageBeepTest, PlaySoundTest)
diff --git a/Lib/test/test_with.py b/Lib/test/test_with.py
index 4854436..48e00f4 100644
--- a/Lib/test/test_with.py
+++ b/Lib/test/test_with.py
@@ -494,6 +494,62 @@
         self.assertAfterWithGeneratorInvariantsWithError(self.foo)
         self.assertAfterWithGeneratorInvariantsNoError(self.bar)
 
+    def testRaisedStopIteration1(self):
+        @contextmanager
+        def cm():
+            yield
+
+        def shouldThrow():
+            with cm():
+                raise StopIteration("from with")
+
+        self.assertRaises(StopIteration, shouldThrow)
+
+    def testRaisedStopIteration2(self):
+        class cm (object):
+            def __context__(self):
+                return self
+
+            def __enter__(self):
+                pass
+
+            def __exit__(self, type, value, traceback):
+                pass
+
+        def shouldThrow():
+            with cm():
+                raise StopIteration("from with")
+
+        self.assertRaises(StopIteration, shouldThrow)
+
+    def testRaisedGeneratorExit1(self):
+        @contextmanager
+        def cm():
+            yield
+
+        def shouldThrow():
+            with cm():
+                raise GeneratorExit("from with")
+
+        self.assertRaises(GeneratorExit, shouldThrow)
+
+    def testRaisedGeneratorExit2(self):
+        class cm (object):
+            def __context__(self):
+                return self
+
+            def __enter__(self):
+                pass
+
+            def __exit__(self, type, value, traceback):
+                pass
+
+        def shouldThrow():
+            with cm():
+                raise GeneratorExit("from with")
+
+        self.assertRaises(GeneratorExit, shouldThrow)
+
 
 class NonLocalFlowControlTestCase(unittest.TestCase):
 
diff --git a/Lib/test/test_xrange.py b/Lib/test/test_xrange.py
index 40590cd..c0d1dbe 100644
--- a/Lib/test/test_xrange.py
+++ b/Lib/test/test_xrange.py
@@ -57,12 +57,7 @@
         self.assertRaises(OverflowError, xrange, 0, 2*sys.maxint)
 
         r = xrange(-sys.maxint, sys.maxint, 2)
-        if sys.maxint > 0x7fffffff:
-            # XXX raising ValueError is less than ideal, but this can't
-            # be fixed until range_length() returns a long in rangeobject.c
-            self.assertRaises(ValueError, len, r)
-        else:
-            self.assertEqual(len(r), sys.maxint)
+        self.assertEqual(len(r), sys.maxint)
         self.assertRaises(OverflowError, xrange, -sys.maxint-1, sys.maxint, 2)
 
 def test_main():
diff --git a/Lib/test/test_zipimport.py b/Lib/test/test_zipimport.py
index eb7cbf6..4e1a845 100644
--- a/Lib/test/test_zipimport.py
+++ b/Lib/test/test_zipimport.py
@@ -12,7 +12,12 @@
 from test.test_importhooks import ImportHooksBaseTestCase, test_src, test_co
 
 import zipimport
-
+import linecache
+import doctest
+import inspect
+import StringIO
+from traceback import extract_tb, extract_stack, print_tb
+raise_src = 'def do_raise(): raise TypeError\n'
 
 # so we only run testAFakeZlib once if this test is run repeatedly
 # which happens when we look for ref leaks
@@ -54,7 +59,8 @@
 
     def setUp(self):
         # We're reusing the zip archive path, so we must clear the
-        # cached directory info.
+        # cached directory info and linecache
+        linecache.clearcache()
         zipimport._zip_directory_cache.clear()
         ImportHooksBaseTestCase.setUp(self)
 
@@ -83,6 +89,11 @@
 
             mod = __import__(".".join(modules), globals(), locals(),
                              ["__dummy__"])
+
+            call = kw.get('call')
+            if call is not None:
+                call(mod)
+
             if expected_ext:
                 file = mod.get_file()
                 self.assertEquals(file, os.path.join(TEMP_ZIP,
@@ -249,6 +260,74 @@
         self.doTest(".py", files, TESTMOD,
                     stuff="Some Stuff"*31)
 
+    def assertModuleSource(self, module):
+        self.assertEqual(inspect.getsource(module), test_src)
+
+    def testGetSource(self):
+        files = {TESTMOD + ".py": (NOW, test_src)}
+        self.doTest(".py", files, TESTMOD, call=self.assertModuleSource)
+
+    def testGetCompiledSource(self):
+        pyc = make_pyc(compile(test_src, "<???>", "exec"), NOW)
+        files = {TESTMOD + ".py": (NOW, test_src),
+                 TESTMOD + pyc_ext: (NOW, pyc)}
+        self.doTest(pyc_ext, files, TESTMOD, call=self.assertModuleSource)
+
+    def runDoctest(self, callback):
+        files = {TESTMOD + ".py": (NOW, test_src),
+                 "xyz.txt": (NOW, ">>> log.append(True)\n")}
+        self.doTest(".py", files, TESTMOD, call=callback)
+
+    def doDoctestFile(self, module):
+        log = []
+        old_master, doctest.master = doctest.master, None
+        try:
+            doctest.testfile(
+                'xyz.txt', package=module, module_relative=True,
+                globs=locals()
+            )
+        finally:
+            doctest.master = old_master
+        self.assertEqual(log,[True])
+
+    def testDoctestFile(self):
+        self.runDoctest(self.doDoctestFile)
+
+    def doDoctestSuite(self, module):
+        log = []
+        doctest.DocFileTest(
+            'xyz.txt', package=module, module_relative=True,
+            globs=locals()
+        ).run()
+        self.assertEqual(log,[True])
+
+    def testDoctestSuite(self):
+        self.runDoctest(self.doDoctestSuite)
+
+
+    def doTraceback(self, module):
+        try:
+            module.do_raise()
+        except:
+            tb = sys.exc_info()[2].tb_next
+
+            f,lno,n,line = extract_tb(tb, 1)[0]
+            self.assertEqual(line, raise_src.strip())
+
+            f,lno,n,line = extract_stack(tb.tb_frame, 1)[0]
+            self.assertEqual(line, raise_src.strip())
+
+            s = StringIO.StringIO()
+            print_tb(tb, 1, s)
+            self.failUnless(s.getvalue().endswith(raise_src))
+        else:
+            raise AssertionError("This ought to be impossible")
+
+    def testTraceback(self):
+        files = {TESTMOD + ".py": (NOW, raise_src)}
+        self.doTest(None, files, TESTMOD, call=self.doTraceback)
+
+
 class CompressedZipImportTestCase(UncompressedZipImportTestCase):
     compression = ZIP_DEFLATED
 
diff --git a/Lib/tokenize.py b/Lib/tokenize.py
index 2b40e6f..a30791c 100644
--- a/Lib/tokenize.py
+++ b/Lib/tokenize.py
@@ -182,7 +182,7 @@
     for tok in iterable:
         toknum, tokval = tok[:2]
 
-        if toknum == NAME:
+        if toknum in (NAME, NUMBER):
             tokval += ' '
 
         if toknum == INDENT:
diff --git a/Lib/traceback.py b/Lib/traceback.py
index 93a64b7..abda422 100644
--- a/Lib/traceback.py
+++ b/Lib/traceback.py
@@ -66,7 +66,7 @@
         _print(file,
                '  File "%s", line %d, in %s' % (filename,lineno,name))
         linecache.checkcache(filename)
-        line = linecache.getline(filename, lineno)
+        line = linecache.getline(filename, lineno, f.f_globals)
         if line: _print(file, '    ' + line.strip())
         tb = tb.tb_next
         n = n+1
@@ -98,7 +98,7 @@
         filename = co.co_filename
         name = co.co_name
         linecache.checkcache(filename)
-        line = linecache.getline(filename, lineno)
+        line = linecache.getline(filename, lineno, f.f_globals)
         if line: line = line.strip()
         else: line = None
         list.append((filename, lineno, name, line))
@@ -158,14 +158,14 @@
     """
     list = []
     if (type(etype) == types.ClassType
-        or issubclass(etype, Exception)):
+        or (isinstance(etype, type) and issubclass(etype, Exception))):
         stype = etype.__name__
     else:
         stype = etype
     if value is None:
         list.append(str(stype) + '\n')
     else:
-        if etype is SyntaxError:
+        if issubclass(etype, SyntaxError):
             try:
                 msg, (filename, lineno, offset, line) = value
             except:
@@ -279,7 +279,7 @@
         filename = co.co_filename
         name = co.co_name
         linecache.checkcache(filename)
-        line = linecache.getline(filename, lineno)
+        line = linecache.getline(filename, lineno, f.f_globals)
         if line: line = line.strip()
         else: line = None
         list.append((filename, lineno, name, line))
diff --git a/Lib/urllib.py b/Lib/urllib.py
index 136f42e..d65c0b0 100644
--- a/Lib/urllib.py
+++ b/Lib/urllib.py
@@ -442,9 +442,9 @@
         return addinfourl(fp, noheaders(), "gopher:" + url)
 
     def open_file(self, url):
+        """Use local file or FTP depending on form of URL."""
         if not isinstance(url, str):
             raise IOError, ('file error', 'proxy support for file protocol currently not implemented')
-        """Use local file or FTP depending on form of URL."""
         if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
             return self.open_ftp(url)
         else:
@@ -1032,7 +1032,7 @@
     global _hostprog
     if _hostprog is None:
         import re
-        _hostprog = re.compile('^//([^/]*)(.*)$')
+        _hostprog = re.compile('^//([^/?]*)(.*)$')
 
     match = _hostprog.match(url)
     if match: return match.group(1, 2)
diff --git a/Lib/urllib2.py b/Lib/urllib2.py
index 4c83bfc..ec01c8f 100644
--- a/Lib/urllib2.py
+++ b/Lib/urllib2.py
@@ -14,7 +14,7 @@
 HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
 deals with digest authentication.
 
-urlopen(url, data=None) -- basic usage is that same as original
+urlopen(url, data=None) -- basic usage is the same as original
 urllib.  pass the url and optionally data to post to an HTTP URL, and
 get a file-like object back.  One difference is that you can also pass
 a Request instance instead of URL.  Raises a URLError (subclass of
@@ -77,16 +77,13 @@
 # the handler knows that the problem was, e.g., that it didn't know
 # that hash algo that requested in the challenge, it would be good to
 # pass that information along to the client, too.
-
-# XXX to do:
-# name!
-# documentation (getting there)
-# complex proxies
-# abstract factory for opener
 # ftp errors aren't handled cleanly
-# gopher can return a socket.error
 # check digest against correct (i.e. non-apache) implementation
 
+# Possible extensions:
+# complex proxies  XXX not sure what exactly was meant by this
+# abstract factory for opener
+
 import base64
 import ftplib
 import httplib
@@ -111,15 +108,15 @@
 except ImportError:
     from StringIO import StringIO
 
-# not sure how many of these need to be gotten rid of
-from urllib import (unwrap, unquote, splittype, splithost,
+from urllib import (unwrap, unquote, splittype, splithost, quote,
      addinfourl, splitport, splitgophertype, splitquery,
      splitattr, ftpwrapper, noheaders, splituser, splitpasswd, splitvalue)
 
 # support for FileHandler, proxies via environment variables
 from urllib import localhost, url2pathname, getproxies
 
-__version__ = "2.4"
+# used in User-Agent header sent
+__version__ = sys.version[:3]
 
 _opener = None
 def urlopen(url, data=None):
@@ -330,8 +327,9 @@
         pass
 
     def _call_chain(self, chain, kind, meth_name, *args):
-        # XXX raise an exception if no one else should try to handle
-        # this url.  return None if you can't but someone else could.
+        # Handlers raise an exception if no one else should try to handle
+        # the request, or return None if they can't but another handler
+        # could.  Otherwise, they return the response.
         handlers = chain.get(kind, ())
         for handler in handlers:
             func = getattr(handler, meth_name)
@@ -507,6 +505,8 @@
             # from the user (of urllib2, in this case).  In practice,
             # essentially all clients do redirect in this case, so we
             # do the same.
+            # be conciliant with URIs containing a space
+            newurl = newurl.replace(' ', '%20')
             return Request(newurl,
                            headers=req.headers,
                            origin_req_host=req.get_origin_req_host(),
@@ -561,6 +561,80 @@
               "lead to an infinite loop.\n" \
               "The last 30x error message was:\n"
 
+
+def _parse_proxy(proxy):
+    """Return (scheme, user, password, host/port) given a URL or an authority.
+
+    If a URL is supplied, it must have an authority (host:port) component.
+    According to RFC 3986, having an authority component means the URL must
+    have two slashes after the scheme:
+
+    >>> _parse_proxy('file:/ftp.example.com/')
+    Traceback (most recent call last):
+    ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
+
+    The first three items of the returned tuple may be None.
+
+    Examples of authority parsing:
+
+    >>> _parse_proxy('proxy.example.com')
+    (None, None, None, 'proxy.example.com')
+    >>> _parse_proxy('proxy.example.com:3128')
+    (None, None, None, 'proxy.example.com:3128')
+
+    The authority component may optionally include userinfo (assumed to be
+    username:password):
+
+    >>> _parse_proxy('joe:password@proxy.example.com')
+    (None, 'joe', 'password', 'proxy.example.com')
+    >>> _parse_proxy('joe:password@proxy.example.com:3128')
+    (None, 'joe', 'password', 'proxy.example.com:3128')
+
+    Same examples, but with URLs instead:
+
+    >>> _parse_proxy('http://proxy.example.com/')
+    ('http', None, None, 'proxy.example.com')
+    >>> _parse_proxy('http://proxy.example.com:3128/')
+    ('http', None, None, 'proxy.example.com:3128')
+    >>> _parse_proxy('http://joe:password@proxy.example.com/')
+    ('http', 'joe', 'password', 'proxy.example.com')
+    >>> _parse_proxy('http://joe:password@proxy.example.com:3128')
+    ('http', 'joe', 'password', 'proxy.example.com:3128')
+
+    Everything after the authority is ignored:
+
+    >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
+    ('ftp', 'joe', 'password', 'proxy.example.com')
+
+    Test for no trailing '/' case:
+
+    >>> _parse_proxy('http://joe:password@proxy.example.com')
+    ('http', 'joe', 'password', 'proxy.example.com')
+
+    """
+    from urlparse import _splitnetloc
+    scheme, r_scheme = splittype(proxy)
+    if not r_scheme.startswith("/"):
+        # authority
+        scheme = None
+        authority = proxy
+    else:
+        # URL
+        if not r_scheme.startswith("//"):
+            raise ValueError("proxy URL with no authority: %r" % proxy)
+        # We have an authority, so for RFC 3986-compliant URLs (by ss 3.
+        # and 3.3.), path is empty or starts with '/'
+        end = r_scheme.find("/", 2)
+        if end == -1:
+            end = None
+        authority = r_scheme[2:end]
+    userinfo, hostport = splituser(authority)
+    if userinfo is not None:
+        user, password = splitpasswd(userinfo)
+    else:
+        user = password = None
+    return scheme, user, password, hostport
+
 class ProxyHandler(BaseHandler):
     # Proxies must be in front
     handler_order = 100
@@ -577,76 +651,27 @@
 
     def proxy_open(self, req, proxy, type):
         orig_type = req.get_type()
-        type, r_type = splittype(proxy)
-        if not type or r_type.isdigit():
-            # proxy is specified without protocol
-            type = orig_type
-            host = proxy
-        else:
-            host, r_host = splithost(r_type)
-        user_pass, host = splituser(host)
-        user, password = splitpasswd(user_pass)
+        proxy_type, user, password, hostport = _parse_proxy(proxy)
+        if proxy_type is None:
+            proxy_type = orig_type
         if user and password:
-            user, password = user_pass.split(':', 1)
-            user_pass = base64.encodestring('%s:%s' % (unquote(user),
-                                            unquote(password))).strip()
-            req.add_header('Proxy-authorization', 'Basic ' + user_pass)
-        host = unquote(host)
-        req.set_proxy(host, type)
-        if orig_type == type:
+            user_pass = '%s:%s' % (unquote(user), unquote(password))
+            creds = base64.encodestring(user_pass).strip()
+            req.add_header('Proxy-authorization', 'Basic ' + creds)
+        hostport = unquote(hostport)
+        req.set_proxy(hostport, proxy_type)
+        if orig_type == proxy_type:
             # let other handlers take care of it
-            # XXX this only makes sense if the proxy is before the
-            # other handlers
             return None
         else:
             # need to start over, because the other handlers don't
             # grok the proxy's URL type
+            # e.g. if we have a constructor arg proxies like so:
+            # {'http': 'ftp://proxy.example.com'}, we may end up turning
+            # a request for http://acme.example.com/a into one for
+            # ftp://proxy.example.com/a
             return self.parent.open(req)
 
-# feature suggested by Duncan Booth
-# XXX custom is not a good name
-class CustomProxy:
-    # either pass a function to the constructor or override handle
-    def __init__(self, proto, func=None, proxy_addr=None):
-        self.proto = proto
-        self.func = func
-        self.addr = proxy_addr
-
-    def handle(self, req):
-        if self.func and self.func(req):
-            return 1
-
-    def get_proxy(self):
-        return self.addr
-
-class CustomProxyHandler(BaseHandler):
-    # Proxies must be in front
-    handler_order = 100
-
-    def __init__(self, *proxies):
-        self.proxies = {}
-
-    def proxy_open(self, req):
-        proto = req.get_type()
-        try:
-            proxies = self.proxies[proto]
-        except KeyError:
-            return None
-        for p in proxies:
-            if p.handle(req):
-                req.set_proxy(p.get_proxy())
-                return self.parent.open(req)
-        return None
-
-    def do_proxy(self, p, req):
-        return self.parent.open(req)
-
-    def add_proxy(self, cpo):
-        if cpo.proto in self.proxies:
-            self.proxies[cpo.proto].append(cpo)
-        else:
-            self.proxies[cpo.proto] = [cpo]
-
 class HTTPPasswordMgr:
     def __init__(self):
         self.passwd = {}
@@ -1128,8 +1153,11 @@
     names = None
     def get_names(self):
         if FileHandler.names is None:
-            FileHandler.names = (socket.gethostbyname('localhost'),
-                                 socket.gethostbyname(socket.gethostname()))
+            try:
+                FileHandler.names = (socket.gethostbyname('localhost'),
+                                    socket.gethostbyname(socket.gethostname()))
+            except socket.gaierror:
+                FileHandler.names = (socket.gethostbyname('localhost'),)
         return FileHandler.names
 
     # not entirely sure what the rules are here
@@ -1258,6 +1286,7 @@
 
 class GopherHandler(BaseHandler):
     def gopher_open(self, req):
+        # XXX can raise socket.error
         import gopherlib  # this raises DeprecationWarning in 2.5
         host = req.get_host()
         if not host:
@@ -1273,25 +1302,3 @@
         else:
             fp = gopherlib.send_selector(selector, host)
         return addinfourl(fp, noheaders(), req.get_full_url())
-
-#bleck! don't use this yet
-class OpenerFactory:
-
-    default_handlers = [UnknownHandler, HTTPHandler,
-                        HTTPDefaultErrorHandler, HTTPRedirectHandler,
-                        FTPHandler, FileHandler]
-    handlers = []
-    replacement_handlers = []
-
-    def add_handler(self, h):
-        self.handlers = self.handlers + [h]
-
-    def replace_handler(self, h):
-        pass
-
-    def build_opener(self):
-        opener = OpenerDirector()
-        for ph in self.default_handlers:
-            if inspect.isclass(ph):
-                ph = ph()
-            opener.add_handler(ph)
diff --git a/Lib/urlparse.py b/Lib/urlparse.py
index 8b75051..eade040 100644
--- a/Lib/urlparse.py
+++ b/Lib/urlparse.py
@@ -16,12 +16,12 @@
                'snews', 'prospero', 'rtsp', 'rtspu', 'rsync', '',
                'svn', 'svn+ssh', 'sftp']
 non_hierarchical = ['gopher', 'hdl', 'mailto', 'news',
-                    'telnet', 'wais', 'imap', 'snews', 'sip']
+                    'telnet', 'wais', 'imap', 'snews', 'sip', 'sips']
 uses_params = ['ftp', 'hdl', 'prospero', 'http', 'imap',
-               'https', 'shttp', 'rtsp', 'rtspu', 'sip',
+               'https', 'shttp', 'rtsp', 'rtspu', 'sip', 'sips',
                'mms', '', 'sftp']
 uses_query = ['http', 'wais', 'imap', 'https', 'shttp', 'mms',
-              'gopher', 'rtsp', 'rtspu', 'sip', '']
+              'gopher', 'rtsp', 'rtspu', 'sip', 'sips', '']
 uses_fragment = ['ftp', 'hdl', 'http', 'gopher', 'news',
                  'nntp', 'wais', 'https', 'shttp', 'snews',
                  'file', 'prospero', '']
@@ -41,7 +41,111 @@
     _parse_cache = {}
 
 
-def urlparse(url, scheme='', allow_fragments=1):
+class BaseResult(tuple):
+    """Base class for the parsed result objects.
+
+    This provides the attributes shared by the two derived result
+    objects as read-only properties.  The derived classes are
+    responsible for checking the right number of arguments were
+    supplied to the constructor.
+
+    """
+
+    __slots__ = ()
+
+    # Attributes that access the basic components of the URL:
+
+    @property
+    def scheme(self):
+        return self[0]
+
+    @property
+    def netloc(self):
+        return self[1]
+
+    @property
+    def path(self):
+        return self[2]
+
+    @property
+    def query(self):
+        return self[-2]
+
+    @property
+    def fragment(self):
+        return self[-1]
+
+    # Additional attributes that provide access to parsed-out portions
+    # of the netloc:
+
+    @property
+    def username(self):
+        netloc = self.netloc
+        if "@" in netloc:
+            userinfo = netloc.split("@", 1)[0]
+            if ":" in userinfo:
+                userinfo = userinfo.split(":", 1)[0]
+            return userinfo
+        return None
+
+    @property
+    def password(self):
+        netloc = self.netloc
+        if "@" in netloc:
+            userinfo = netloc.split("@", 1)[0]
+            if ":" in userinfo:
+                return userinfo.split(":", 1)[1]
+        return None
+
+    @property
+    def hostname(self):
+        netloc = self.netloc
+        if "@" in netloc:
+            netloc = netloc.split("@", 1)[1]
+        if ":" in netloc:
+            netloc = netloc.split(":", 1)[0]
+        return netloc.lower() or None
+
+    @property
+    def port(self):
+        netloc = self.netloc
+        if "@" in netloc:
+            netloc = netloc.split("@", 1)[1]
+        if ":" in netloc:
+            port = netloc.split(":", 1)[1]
+            return int(port, 10)
+        return None
+
+
+class SplitResult(BaseResult):
+
+    __slots__ = ()
+
+    def __new__(cls, scheme, netloc, path, query, fragment):
+        return BaseResult.__new__(
+            cls, (scheme, netloc, path, query, fragment))
+
+    def geturl(self):
+        return urlunsplit(self)
+
+
+class ParseResult(BaseResult):
+
+    __slots__ = ()
+
+    def __new__(cls, scheme, netloc, path, params, query, fragment):
+        return BaseResult.__new__(
+            cls, (scheme, netloc, path, params, query, fragment))
+
+    @property
+    def params(self):
+        return self[3]
+
+    def geturl(self):
+        return urlunparse(self)
+
+
+def urlparse(url, scheme='', allow_fragments=True):
     """Parse a URL into 6 components:
     <scheme>://<netloc>/<path>;<params>?<query>#<fragment>
     Return a 6-tuple: (scheme, netloc, path, params, query, fragment).
@@ -53,7 +157,7 @@
         url, params = _splitparams(url)
     else:
         params = ''
-    return scheme, netloc, url, params, query, fragment
+    return ParseResult(scheme, netloc, url, params, query, fragment)
 
 def _splitparams(url):
     if '/'  in url:
@@ -73,12 +177,13 @@
         delim = len(url)
     return url[start:delim], url[delim:]
 
-def urlsplit(url, scheme='', allow_fragments=1):
+def urlsplit(url, scheme='', allow_fragments=True):
     """Parse a URL into 5 components:
     <scheme>://<netloc>/<path>?<query>#<fragment>
     Return a 5-tuple: (scheme, netloc, path, query, fragment).
     Note that we don't break the components up in smaller bits
     (e.g. netloc is a single string) and we don't expand % escapes."""
+    allow_fragments = bool(allow_fragments)
     key = url, scheme, allow_fragments
     cached = _parse_cache.get(key, None)
     if cached:
@@ -97,9 +202,9 @@
                 url, fragment = url.split('#', 1)
             if '?' in url:
                 url, query = url.split('?', 1)
-            tuple = scheme, netloc, url, query, fragment
-            _parse_cache[key] = tuple
-            return tuple
+            v = SplitResult(scheme, netloc, url, query, fragment)
+            _parse_cache[key] = v
+            return v
         for c in url[:i]:
             if c not in scheme_chars:
                 break
@@ -111,9 +216,9 @@
         url, fragment = url.split('#', 1)
     if scheme in uses_query and '?' in url:
         url, query = url.split('?', 1)
-    tuple = scheme, netloc, url, query, fragment
-    _parse_cache[key] = tuple
-    return tuple
+    v = SplitResult(scheme, netloc, url, query, fragment)
+    _parse_cache[key] = v
+    return v
 
 def urlunparse((scheme, netloc, url, params, query, fragment)):
     """Put a parsed URL back together again.  This may result in a
@@ -136,7 +241,7 @@
         url = url + '#' + fragment
     return url
 
-def urljoin(base, url, allow_fragments = 1):
+def urljoin(base, url, allow_fragments=True):
     """Join a base URL and a possibly relative URL to form an absolute
     interpretation of the latter."""
     if not base:
diff --git a/Lib/uu.py b/Lib/uu.py
index 40e8bf0..3ccedb0 100755
--- a/Lib/uu.py
+++ b/Lib/uu.py
@@ -132,7 +132,7 @@
             data = binascii.a2b_uu(s)
         except binascii.Error, v:
             # Workaround for broken uuencoders by /Fredrik Lundh
-            nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3
+            nbytes = (((ord(s[0])-32) & 63) * 4 + 5) // 3
             data = binascii.a2b_uu(s[:nbytes])
             if not quiet:
                 sys.stderr.write("Warning: %s\n" % v)
@@ -151,7 +151,7 @@
 
     (options, args) = parser.parse_args()
     if len(args) > 2:
-        p.error('incorrect number of arguments')
+        parser.error('incorrect number of arguments')
         sys.exit(1)
 
     input = sys.stdin
diff --git a/Lib/warnings.py b/Lib/warnings.py
index e622b9a..bc0b818 100644
--- a/Lib/warnings.py
+++ b/Lib/warnings.py
@@ -58,10 +58,11 @@
         if not filename:
             filename = module
     registry = globals.setdefault("__warningregistry__", {})
-    warn_explicit(message, category, filename, lineno, module, registry)
+    warn_explicit(message, category, filename, lineno, module, registry,
+                  globals)
 
 def warn_explicit(message, category, filename, lineno,
-                  module=None, registry=None):
+                  module=None, registry=None, module_globals=None):
     if module is None:
         module = filename or "<unknown>"
         if module[-3:].lower() == ".py":
@@ -92,6 +93,11 @@
     if action == "ignore":
         registry[key] = 1
         return
+
+    # Prime the linecache for formatting, in case the
+    # "file" is actually in a zipfile or something.
+    linecache.getlines(filename, module_globals)
+
     if action == "error":
         raise message
     # Other actions
diff --git a/Lib/xmlcore/dom/expatbuilder.py b/Lib/xmlcore/dom/expatbuilder.py
index 81d9c2b..32ffa41 100644
--- a/Lib/xmlcore/dom/expatbuilder.py
+++ b/Lib/xmlcore/dom/expatbuilder.py
@@ -59,7 +59,7 @@
     "NMTOKENS": minidom.TypeInfo(None, "nmtokens"),
     }
 
-class ElementInfo(NewStyle):
+class ElementInfo(object):
     __slots__ = '_attr_info', '_model', 'tagName'
 
     def __init__(self, tagName, model=None):
@@ -460,7 +460,7 @@
 # where allowed.
 _ALLOWED_FILTER_RETURNS = (FILTER_ACCEPT, FILTER_REJECT, FILTER_SKIP)
 
-class FilterVisibilityController(NewStyle):
+class FilterVisibilityController(object):
     """Wrapper around a DOMBuilderFilter which implements the checks
     to make the whatToShow filter attribute work."""
 
@@ -518,7 +518,7 @@
         }
 
 
-class FilterCrutch(NewStyle):
+class FilterCrutch(object):
     __slots__ = '_builder', '_level', '_old_start', '_old_end'
 
     def __init__(self, builder):
@@ -908,7 +908,7 @@
         raise ParseEscape()
 
 
-def parse(file, namespaces=1):
+def parse(file, namespaces=True):
     """Parse a document, returning the resulting Document node.
 
     'file' may be either a file name or an open file object.
@@ -929,7 +929,7 @@
     return result
 
 
-def parseString(string, namespaces=1):
+def parseString(string, namespaces=True):
     """Parse a document from a string, returning the resulting
     Document node.
     """
@@ -940,7 +940,7 @@
     return builder.parseString(string)
 
 
-def parseFragment(file, context, namespaces=1):
+def parseFragment(file, context, namespaces=True):
     """Parse a fragment of a document, given the context from which it
     was originally extracted.  context should be the parent of the
     node(s) which are in the fragment.
@@ -963,7 +963,7 @@
     return result
 
 
-def parseFragmentString(string, context, namespaces=1):
+def parseFragmentString(string, context, namespaces=True):
     """Parse a fragment of a document from a string, given the context
     from which it was originally extracted.  context should be the
     parent of the node(s) which are in the fragment.
diff --git a/Lib/xmlcore/dom/minicompat.py b/Lib/xmlcore/dom/minicompat.py
index 364ca45..f99b7fe 100644
--- a/Lib/xmlcore/dom/minicompat.py
+++ b/Lib/xmlcore/dom/minicompat.py
@@ -4,10 +4,6 @@
 #
 # The following names are defined:
 #
-#   isinstance    -- version of the isinstance() function that accepts
-#                    tuples as the second parameter regardless of the
-#                    Python version
-#
 #   NodeList      -- lightest possible NodeList implementation
 #
 #   EmptyNodeList -- lightest possible NodeList that is guarateed to
@@ -15,8 +11,6 @@
 #
 #   StringTypes   -- tuple of defined string types
 #
-#   GetattrMagic  -- base class used to make _get_<attr> be magically
-#                    invoked when available
 #   defproperty   -- function used in conjunction with GetattrMagic;
 #                    using these together is needed to make them work
 #                    as efficiently as possible in both Python 2.2+
@@ -41,14 +35,8 @@
 #
 #                    defproperty() should be used for each version of
 #                    the relevant _get_<property>() function.
-#
-#   NewStyle      -- base class to cause __slots__ to be honored in
-#                    the new world
-#
-#   True, False   -- only for Python 2.2 and earlier
 
-__all__ = ["NodeList", "EmptyNodeList", "NewStyle",
-           "StringTypes", "defproperty", "GetattrMagic"]
+__all__ = ["NodeList", "EmptyNodeList", "StringTypes", "defproperty"]
 
 import xmlcore.dom
 
@@ -60,125 +48,62 @@
     StringTypes = type(''), type(unicode(''))
 
 
-# define True and False only if not defined as built-ins
-try:
-    True
-except NameError:
-    True = 1
-    False = 0
-    __all__.extend(["True", "False"])
+class NodeList(list):
+    __slots__ = ()
+
+    def item(self, index):
+        if 0 <= index < len(self):
+            return self[index]
+
+    def _get_length(self):
+        return len(self)
+
+    def _set_length(self, value):
+        raise xml.dom.NoModificationAllowedErr(
+            "attempt to modify read-only attribute 'length'")
+
+    length = property(_get_length, _set_length,
+                      doc="The number of nodes in the NodeList.")
+
+    def __getstate__(self):
+        return list(self)
+
+    def __setstate__(self, state):
+        self[:] = state
+
+class EmptyNodeList(tuple):
+    __slots__ = ()
+
+    def __add__(self, other):
+        NL = NodeList()
+        NL.extend(other)
+        return NL
+
+    def __radd__(self, other):
+        NL = NodeList()
+        NL.extend(other)
+        return NL
+
+    def item(self, index):
+        return None
+
+    def _get_length(self):
+        return 0
+
+    def _set_length(self, value):
+        raise xml.dom.NoModificationAllowedErr(
+            "attempt to modify read-only attribute 'length'")
+
+    length = property(_get_length, _set_length,
+                      doc="The number of nodes in the NodeList.")
 
 
-try:
-    isinstance('', StringTypes)
-except TypeError:
-    #
-    # Wrap isinstance() to make it compatible with the version in
-    # Python 2.2 and newer.
-    #
-    _isinstance = isinstance
-    def isinstance(obj, type_or_seq):
-        try:
-            return _isinstance(obj, type_or_seq)
-        except TypeError:
-            for t in type_or_seq:
-                if _isinstance(obj, t):
-                    return 1
-            return 0
-    __all__.append("isinstance")
-
-
-if list is type([]):
-    class NodeList(list):
-        __slots__ = ()
-
-        def item(self, index):
-            if 0 <= index < len(self):
-                return self[index]
-
-        def _get_length(self):
-            return len(self)
-
-        def _set_length(self, value):
-            raise xmlcore.dom.NoModificationAllowedErr(
-                "attempt to modify read-only attribute 'length'")
-
-        length = property(_get_length, _set_length,
-                          doc="The number of nodes in the NodeList.")
-
-        def __getstate__(self):
-            return list(self)
-
-        def __setstate__(self, state):
-            self[:] = state
-
-    class EmptyNodeList(tuple):
-        __slots__ = ()
-
-        def __add__(self, other):
-            NL = NodeList()
-            NL.extend(other)
-            return NL
-
-        def __radd__(self, other):
-            NL = NodeList()
-            NL.extend(other)
-            return NL
-
-        def item(self, index):
-            return None
-
-        def _get_length(self):
-            return 0
-
-        def _set_length(self, value):
-            raise xmlcore.dom.NoModificationAllowedErr(
-                "attempt to modify read-only attribute 'length'")
-
-        length = property(_get_length, _set_length,
-                          doc="The number of nodes in the NodeList.")
-
-else:
-    def NodeList():
-        return []
-
-    def EmptyNodeList():
-        return []
-
-
-try:
-    property
-except NameError:
-    def defproperty(klass, name, doc):
-        # taken care of by the base __getattr__()
-        pass
-
-    class GetattrMagic:
-        def __getattr__(self, key):
-            if key.startswith("_"):
-                raise AttributeError, key
-
-            try:
-                get = getattr(self, "_get_" + key)
-            except AttributeError:
-                raise AttributeError, key
-            return get()
-
-    class NewStyle:
-        pass
-
-else:
-    def defproperty(klass, name, doc):
-        get = getattr(klass, ("_get_" + name)).im_func
-        def set(self, value, name=name):
-            raise xmlcore.dom.NoModificationAllowedErr(
-                "attempt to modify read-only attribute " + repr(name))
-        assert not hasattr(klass, "_set_" + name), \
-               "expected not to find _set_" + name
-        prop = property(get, set, doc=doc)
-        setattr(klass, name, prop)
-
-    class GetattrMagic:
-        pass
-
-    NewStyle = object
+def defproperty(klass, name, doc):
+    get = getattr(klass, ("_get_" + name)).im_func
+    def set(self, value, name=name):
+        raise xml.dom.NoModificationAllowedErr(
+            "attempt to modify read-only attribute " + repr(name))
+    assert not hasattr(klass, "_set_" + name), \
+           "expected not to find _set_" + name
+    prop = property(get, set, doc=doc)
+    setattr(klass, name, prop)
diff --git a/Lib/xmlcore/dom/minidom.py b/Lib/xmlcore/dom/minidom.py
index 54620e1..a8abd14 100644
--- a/Lib/xmlcore/dom/minidom.py
+++ b/Lib/xmlcore/dom/minidom.py
@@ -20,8 +20,6 @@
 from xmlcore.dom.minicompat import *
 from xmlcore.dom.xmlbuilder import DOMImplementationLS, DocumentLS
 
-_TupleType = type(())
-
 # This is used by the ID-cache invalidation checks; the list isn't
 # actually complete, since the nodes being checked will never be the
 # DOCUMENT_NODE or DOCUMENT_FRAGMENT_NODE.  (The node being checked is
@@ -31,7 +29,7 @@
                             xmlcore.dom.Node.ENTITY_REFERENCE_NODE)
 
 
-class Node(xmlcore.dom.Node, GetattrMagic):
+class Node(xmlcore.dom.Node):
     namespaceURI = None # this is non-null only for elements and attributes
     parentNode = None
     ownerDocument = None
@@ -459,7 +457,7 @@
 defproperty(Attr, "schemaType", doc="Schema type for this attribute.")
 
 
-class NamedNodeMap(NewStyle, GetattrMagic):
+class NamedNodeMap(object):
     """The attribute list is a transient interface to the underlying
     dictionaries.  Mutations here will change the underlying element's
     dictionary.
@@ -523,7 +521,7 @@
             return cmp(id(self), id(other))
 
     def __getitem__(self, attname_or_tuple):
-        if isinstance(attname_or_tuple, _TupleType):
+        if isinstance(attname_or_tuple, tuple):
             return self._attrsNS[attname_or_tuple]
         else:
             return self._attrs[attname_or_tuple]
@@ -613,7 +611,7 @@
 AttributeList = NamedNodeMap
 
 
-class TypeInfo(NewStyle):
+class TypeInfo(object):
     __slots__ = 'namespace', 'name'
 
     def __init__(self, namespace, name):
@@ -1146,7 +1144,7 @@
         writer.write("<![CDATA[%s]]>" % self.data)
 
 
-class ReadOnlySequentialNamedNodeMap(NewStyle, GetattrMagic):
+class ReadOnlySequentialNamedNodeMap(object):
     __slots__ = '_seq',
 
     def __init__(self, seq=()):
@@ -1170,7 +1168,7 @@
                 return n
 
     def __getitem__(self, name_or_tuple):
-        if isinstance(name_or_tuple, _TupleType):
+        if isinstance(name_or_tuple, tuple):
             node = self.getNamedItemNS(*name_or_tuple)
         else:
             node = self.getNamedItem(name_or_tuple)
@@ -1418,7 +1416,7 @@
     def _create_document(self):
         return Document()
 
-class ElementInfo(NewStyle):
+class ElementInfo(object):
     """Object that represents content-model information for an element.
 
     This implementation is not expected to be used in practice; DOM
diff --git a/Lib/xmlcore/dom/xmlbuilder.py b/Lib/xmlcore/dom/xmlbuilder.py
index d58c723..6566d3c 100644
--- a/Lib/xmlcore/dom/xmlbuilder.py
+++ b/Lib/xmlcore/dom/xmlbuilder.py
@@ -3,8 +3,6 @@
 import copy
 import xmlcore.dom
 
-from xmlcore.dom.minicompat import *
-
 from xmlcore.dom.NodeFilter import NodeFilter
 
 
@@ -211,7 +209,7 @@
     return name.lower().replace('-', '_')
 
 
-class DOMEntityResolver(NewStyle):
+class DOMEntityResolver(object):
     __slots__ = '_opener',
 
     def resolveEntity(self, publicId, systemId):
@@ -255,7 +253,7 @@
                     return param.split("=", 1)[1].lower()
 
 
-class DOMInputSource(NewStyle):
+class DOMInputSource(object):
     __slots__ = ('byteStream', 'characterStream', 'stringData',
                  'encoding', 'publicId', 'systemId', 'baseURI')
 
diff --git a/Mac/Modules/carbonevt/_CarbonEvtmodule.c b/Mac/Modules/carbonevt/_CarbonEvtmodule.c
index fee43cc..fd4b266 100755
--- a/Mac/Modules/carbonevt/_CarbonEvtmodule.c
+++ b/Mac/Modules/carbonevt/_CarbonEvtmodule.c
@@ -40,7 +40,7 @@
                         PyMac_GetOSType, &(out->eventClass),
                         &(out->eventKind)))
                 return 1;
-        return NULL;
+        return 0;
 }
 
 /********** end EventTypeSpec *******/
@@ -78,7 +78,7 @@
 {
         if (PyArg_ParseTuple(v, "ll", &out->signature, &out->id))
                 return 1;
-        return NULL;
+        return 0;
 }
 
 /********** end EventHotKeyID *******/
diff --git a/Mac/Modules/cf/_CFmodule.c b/Mac/Modules/cf/_CFmodule.c
index 22e9676..5f934c2 100644
--- a/Mac/Modules/cf/_CFmodule.c
+++ b/Mac/Modules/cf/_CFmodule.c
@@ -1329,7 +1329,7 @@
 	if (v == Py_None) { *p_itself = NULL; return 1; }
 	if (PyString_Check(v)) {
 	    char *cStr;
-	    int cLen;
+	    Py_ssize_t cLen;
 	    if( PyString_AsStringAndSize(v, &cStr, &cLen) < 0 ) return 0;
 	    *p_itself = CFDataCreate((CFAllocatorRef)NULL, (unsigned char *)cStr, cLen);
 	    return 1;
@@ -1826,7 +1826,7 @@
 	if (PyString_Check(v)) {
 	    char *cStr;
 	    if (!PyArg_Parse(v, "es", "ascii", &cStr))
-	        return NULL;
+	        return 0;
 	        *p_itself = CFStringCreateWithCString((CFAllocatorRef)NULL, cStr, kCFStringEncodingASCII);
 	        return 1;
 	}
diff --git a/Mac/Modules/gestaltmodule.c b/Mac/Modules/gestaltmodule.c
index f82687e..6d8673f 100644
--- a/Mac/Modules/gestaltmodule.c
+++ b/Mac/Modules/gestaltmodule.c
@@ -33,17 +33,10 @@
 gestalt_gestalt(PyObject *self, PyObject *args)
 {
 	OSErr iErr;
-	char *str;
-	int size;
 	OSType selector;
 	long response;
-	if (!PyArg_Parse(args, "s#", &str, &size))
+	if (!PyArg_Parse(args, "O&", PyMac_GetOSType, &selector))
 		return NULL;
-	if (size != 4) {
-		PyErr_SetString(PyExc_TypeError, "gestalt arg must be 4-char string");
-		return NULL;
-	}
-	selector = *(OSType*)str;
 	iErr = Gestalt ( selector, &response );
 	if (iErr != 0) 
 		return PyMac_Error(iErr);
diff --git a/Mac/Modules/qd/_Qdmodule.c b/Mac/Modules/qd/_Qdmodule.c
index 118dfc4..bfb040f 100644
--- a/Mac/Modules/qd/_Qdmodule.c
+++ b/Mac/Modules/qd/_Qdmodule.c
@@ -5824,7 +5824,6 @@
 {
 	PyObject *_res = NULL;
 	char *textBuf__in__;
-	int textBuf__len__;
 	int textBuf__in_len__;
 	short firstByte;
 	short byteCount;
@@ -5885,7 +5884,6 @@
 	PyObject *_res = NULL;
 	short _rv;
 	char *textBuf__in__;
-	int textBuf__len__;
 	int textBuf__in_len__;
 	short firstByte;
 	short byteCount;
@@ -6471,7 +6469,6 @@
 {
 	PyObject *_res = NULL;
 	char *textBuf__in__;
-	int textBuf__len__;
 	int textBuf__in_len__;
 	short firstByte;
 	short byteCount;
@@ -6534,7 +6531,7 @@
 	        return NULL;
 	if ( PyString_Size(source) != sizeof(BitMap) && PyString_Size(source) != sizeof(PixMap) ) {
 	        PyErr_Format(PyExc_TypeError,
-	                "Argument size was %d, should be %d (sizeof BitMap) or %d (sizeof PixMap)",
+	                "Argument size was %ld, should be %lu (sizeof BitMap) or %lu (sizeof PixMap)",
 	                PyString_Size(source), sizeof(BitMap), sizeof(PixMap));
 	        return NULL;
 	}
diff --git a/Makefile.pre.in b/Makefile.pre.in
index 056b578..f8a7481 100644
--- a/Makefile.pre.in
+++ b/Makefile.pre.in
@@ -30,6 +30,7 @@
 
 CC=		@CC@
 CXX=		@CXX@
+MAINCC=		@MAINCC@
 LINKCC=		@LINKCC@
 AR=		@AR@
 RANLIB=		@RANLIB@
@@ -157,7 +158,6 @@
 SYSLIBS=	$(LIBM) $(LIBC)
 SHLIBS=		@SHLIBS@
 
-MAINOBJ=	@MAINOBJ@
 THREADOBJ=	@THREADOBJ@
 DLINCLDIR=	@DLINCLDIR@
 DYNLOADFILE=	@DYNLOADFILE@
@@ -219,13 +219,15 @@
 
 ##########################################################################
 # AST
-AST_H=		$(srcdir)/Include/Python-ast.h
-AST_C=		$(srcdir)/Python/Python-ast.c
+AST_H_DIR=	$(srcdir)/Include
+AST_H=		$(AST_H_DIR)/Python-ast.h
+AST_C_DIR=	$(srcdir)/Python
+AST_C=		$(AST_C_DIR)/Python-ast.c
 AST_ASDL=	$(srcdir)/Parser/Python.asdl
 
 ASDLGEN_FILES=	$(srcdir)/Parser/asdl.py $(srcdir)/Parser/asdl_c.py
 # XXX Note that a build now requires Python exist before the build starts
-ASDLGEN=	$(srcdir)/Parser/asdl_c.py -h $(srcdir)/Include -c $(srcdir)/Python
+ASDLGEN=	$(srcdir)/Parser/asdl_c.py
 
 ##########################################################################
 # Python
@@ -326,9 +328,9 @@
 all:		$(BUILDPYTHON) oldsharedmods sharedmods
 
 # Build the interpreter
-$(BUILDPYTHON):	Modules/$(MAINOBJ) $(LIBRARY) $(LDLIBRARY)
+$(BUILDPYTHON):	Modules/python.o $(LIBRARY) $(LDLIBRARY)
 		$(LINKCC) $(LDFLAGS) $(LINKFORSHARED) -o $@ \
-			Modules/$(MAINOBJ) \
+			Modules/python.o \
 			$(BLDLIBRARY) $(LIBS) $(MODLIBS) $(SYSLIBS) $(LDLAST)
 
 platform: $(BUILDPYTHON)
@@ -448,8 +450,8 @@
 		-DVPATH='"$(VPATH)"' \
 		-o $@ $(srcdir)/Modules/getpath.c
 
-Modules/ccpython.o: $(srcdir)/Modules/ccpython.cc
-	$(CXX) -c $(PY_CFLAGS) -o $@ $(srcdir)/Modules/ccpython.cc
+Modules/python.o: $(srcdir)/Modules/python.c
+	$(MAINCC) -c $(PY_CFLAGS) -o $@ $(srcdir)/Modules/python.c
 
 
 $(GRAMMAR_H) $(GRAMMAR_C): $(PGEN) $(GRAMMAR_INPUT)
@@ -465,9 +467,12 @@
 
 Parser/tokenizer_pgen.o:	$(srcdir)/Parser/tokenizer.c
 
-$(AST_H) $(AST_C): $(AST_ASDL) $(ASDLGEN_FILES)
-	$(ASDLGEN) $(AST_ASDL)
+$(AST_H): $(AST_ASDL) $(ASDLGEN_FILES)
+	$(ASDLGEN) -h $(AST_H_DIR) $(AST_ASDL)
 
+$(AST_C): $(AST_ASDL) $(ASDLGEN_FILES)
+	$(ASDLGEN) -c $(AST_C_DIR) $(AST_ASDL)
+	
 Python/compile.o Python/symtable.o: $(GRAMMAR_H) $(AST_H)
 
 Python/getplatform.o: $(srcdir)/Python/getplatform.c
@@ -537,7 +542,7 @@
 		Include/weakrefobject.h \
 		pyconfig.h
 
-$(LIBRARY_OBJS) $(MODOBJS) Modules/$(MAINOBJ): $(PYTHON_HEADERS)
+$(LIBRARY_OBJS) $(MODOBJS) Modules/python.o: $(PYTHON_HEADERS)
 
 
 ######################################################################
@@ -680,9 +685,13 @@
 PLATMACPATH=:plat-mac:plat-mac/lib-scriptpackages
 LIBSUBDIRS=	lib-old lib-tk site-packages test test/output test/data \
 		test/decimaltestdata \
-		encodings email email/test email/test/data compiler hotshot \
-		logging bsddb bsddb/test csv ctypes idlelib idlelib/Icons \
+		encodings compiler hotshot \
+		email email/mime email/test email/test/data \
+		sqlite3 sqlite3/test \
+		logging bsddb bsddb/test csv \
+		ctypes ctypes/test ctypes/macholib idlelib idlelib/Icons \
 		distutils distutils/command distutils/tests $(XMLLIBSUBDIRS) \
+		setuptools setuptools/command setuptools/tests setuptools.egg-info \
 		curses $(MACHDEPS)
 libinstall:	$(BUILDPYTHON) $(srcdir)/Lib/$(PLATDIR)
 	@for i in $(SCRIPTDIR) $(LIBDEST); \
@@ -718,6 +727,7 @@
 	do \
 		a=$(srcdir)/Lib/$$d; \
 		if test ! -d $$a; then continue; else true; fi; \
+		if test `ls $$a | wc -l` -lt 1; then continue; fi; \
 		b=$(LIBDEST)/$$d; \
 		for i in $$a/*; \
 		do \
@@ -809,7 +819,7 @@
 		fi; \
 	fi
 	$(INSTALL_DATA) Modules/config.c $(DESTDIR)$(LIBPL)/config.c
-	$(INSTALL_DATA) Modules/$(MAINOBJ) $(DESTDIR)$(LIBPL)/$(MAINOBJ)
+	$(INSTALL_DATA) Modules/python.o $(DESTDIR)$(LIBPL)/python.o
 	$(INSTALL_DATA) $(srcdir)/Modules/config.c.in $(DESTDIR)$(LIBPL)/config.c.in
 	$(INSTALL_DATA) Makefile $(DESTDIR)$(LIBPL)/Makefile
 	$(INSTALL_DATA) Modules/Setup $(DESTDIR)$(LIBPL)/Setup
@@ -817,6 +827,11 @@
 	$(INSTALL_DATA) Modules/Setup.config $(DESTDIR)$(LIBPL)/Setup.config
 	$(INSTALL_SCRIPT) $(srcdir)/Modules/makesetup $(DESTDIR)$(LIBPL)/makesetup
 	$(INSTALL_SCRIPT) $(srcdir)/install-sh $(DESTDIR)$(LIBPL)/install-sh
+	# Substitution happens here, as the completely-expanded BINDIR
+	# is not available in configure
+	sed -e "s,@BINDIR@,$(BINDIR)," < $(srcdir)/Misc/python-config.in >python-config
+	$(INSTALL_SCRIPT) python-config $(BINDIR)/python-config
+	rm python-config
 	@if [ -s Modules/python.exp -a \
 		"`echo $(MACHDEP) | sed 's/^\(...\).*/\1/'`" = "aix" ]; then \
 		echo; echo "Installing support files for building shared extension modules on AIX:"; \
@@ -942,6 +957,10 @@
 .c.o:
 	$(CC) -c $(PY_CFLAGS) -o $@ $<
 
+# Run reindent on the library
+reindent:
+	./python$(EXEEXT) $(srcdir)/Tools/scripts/reindent.py -r $(srcdir)/Lib
+
 # Rerun configure with the same options as it was run last time,
 # provided the config.status script exists
 recheck:
@@ -969,13 +988,14 @@
 
 # Sanitation targets -- clean leaves libraries, executables and tags
 # files, which clobber removes those as well
+pycremoval:
+	find $(srcdir) -name '*.py[co]' -exec rm -f {} ';'
 
-clean:
+clean: pycremoval
 	find . -name '*.o' -exec rm -f {} ';'
 	find . -name '*.s[ol]' -exec rm -f {} ';'
-	find $(srcdir) -name '*.py[co]' -exec rm -f {} ';'
-	find $(srcdir) -name 'fficonfig.h' -exec rm -f {} ';'
-	find $(srcdir) -name 'fficonfig.py' -exec rm -f {} ';'
+	find $(srcdir)/build -name 'fficonfig.h' -exec rm -f {} ';' || true
+	find $(srcdir)/build -name 'fficonfig.py' -exec rm -f {} ';' || true
 
 clobber: clean
 	-rm -f $(BUILDPYTHON) $(PGEN) $(LIBRARY) $(LDLIBRARY) $(DLLLIBRARY) \
diff --git a/Misc/ACKS b/Misc/ACKS
index 9225031..a824a86 100644
--- a/Misc/ACKS
+++ b/Misc/ACKS
@@ -132,6 +132,7 @@
 Matt Conway
 David M. Cooke
 Greg Copeland
+Aldo Cortesi
 David Costanzo
 Scott Cotton
 Greg Couch
@@ -477,6 +478,7 @@
 Dan Pierson
 Martijn Pieters
 François Pinard
+Zach Pincus
 Michael Piotrowski
 Iustin Pop
 John Popplewell
@@ -536,6 +538,7 @@
 Gregor Schmid
 Ralf Schmitt
 Peter Schneider-Kamp
+Chad J. Schroeder
 Sam Schulenburg
 Stefan Schwarzer
 Dietmar Schwertberger
diff --git a/Misc/BeOS-setup.py b/Misc/BeOS-setup.py
index 07dbe15..991e608 100644
--- a/Misc/BeOS-setup.py
+++ b/Misc/BeOS-setup.py
@@ -176,8 +176,6 @@
         #
 
         # Some modules that are normally always on:
-        exts.append( Extension('regex', ['regexmodule.c', 'regexpr.c']) )
-
         exts.append( Extension('_weakref', ['_weakref.c']) )
         exts.append( Extension('_symtable', ['symtablemodule.c']) )
 
diff --git a/Misc/HISTORY b/Misc/HISTORY
index d8847a8..be4ca88 100644
--- a/Misc/HISTORY
+++ b/Misc/HISTORY
@@ -8,6 +8,5293 @@
 ======================================================================
 
 
+What's New in Python 2.4 final?
+===============================
+
+*Release date: 30-NOV-2004*
+
+Core and builtins
+-----------------
+
+- Bug 875692: Improve signal handling, especially when using threads, by
+  forcing an early re-execution of PyEval_EvalFrame() "periodic" code when
+  things_to_do is not cleared by Py_MakePendingCalls().
+
+
+What's New in Python 2.4 (release candidate 1)
+==============================================
+
+*Release date: 18-NOV-2004*
+
+Core and builtins
+-----------------
+
+- Bug 1061968:  Fixes in 2.4a3 to address thread bug 1010677 reintroduced
+  the years-old thread shutdown race bug 225673.  Numeric history lesson
+  aside, all bugs in all three reports are fixed now.
+
+
+Library
+-------
+
+- Bug 1052242: If exceptions are raised by an atexit handler function an
+  attempt is made to execute the remaining handlers.  The last exception
+  raised is re-raised.
+
+- ``doctest``'s new support for adding ``pdb.set_trace()`` calls to
+  doctests was broken in a dramatic but shallow way.  Fixed.
+
+- Bug 1065388:  ``calendar``'s ``day_name``, ``day_abbr``, ``month_name``,
+  and ``month_abbr`` attributes emulate sequences of locale-correct
+  spellings of month and day names.  Because the locale can change at
+  any time, the correct spelling is recomputed whenever one of these is
+  indexed.  In the worst case, the index may be a slice object, so these
+  recomputed every day or month name each time they were indexed.  This is
+  much slower than necessary in the usual case, when the index is just an
+  integer.  In that case, only the single spelling needed is recomputed
+  now; and, when the index is a slice object, only the spellings needed
+  by the slice are recomputed now.
+
+- Patch 1061679: Added ``__all__`` to pickletools.py.
+
+Build
+-----
+
+- Bug 1034277 / Patch 1035255: Remove compilation of core against CoreServices
+  and CoreFoundation on OS X.  Involved removing PyMac_GetAppletScriptFile()
+  which has no known users.  Thanks Bob Ippolito.
+
+C API
+-----
+
+- The PyRange_New() function is deprecated.
+
+
+What's New in Python 2.4 beta 2?
+================================
+
+*Release date: 03-NOV-2004*
+
+License
+-------
+
+The Python Software Foundation changed the license under which Python
+is released, to remove Python version numbers.  There were no other
+changes to the license.  So, for example, wherever the license for
+Python 2.3 said "Python 2.3", the new license says "Python".  The
+intent is to make it possible to refer to the PSF license in a more
+durable way.  For example, some people say they're confused by that
+the Open Source Initiative's entry for the Python Software Foundation
+License::
+
+      http://www.opensource.org/licenses/PythonSoftFoundation.php
+
+says "Python 2.1.1" all over it, wondering whether it applies only
+to Python 2.1.1.
+
+The official name of the new license is the Python Software Foundation
+License Version 2.
+
+Core and builtins
+-----------------
+
+- Bug #1055820 Cyclic garbage collection was not protecting against that
+  calling a live weakref to a piece of cyclic trash could resurrect an
+  insane mutation of the trash if any Python code ran during gc (via
+  running a dead object's __del__ method, running another callback on a
+  weakref to a dead object, or via any Python code run in any other thread
+  that managed to obtain the GIL while a __del__ or callback was running
+  in the thread doing gc).  The most likely symptom was "impossible"
+  ``AttributeError`` exceptions, appearing seemingly at random, on weakly
+  referenced objects.  The cure was to clear all weakrefs to unreachable
+  objects before allowing any callbacks to run.
+
+- Bug #1054139 _PyString_Resize() now invalidates its cached hash value.
+
+Extension Modules
+-----------------
+
+- Bug #1048870:  the compiler now generates distinct code objects for
+  functions with identical bodies.  This was producing confusing
+  traceback messages which pointed to the function where the code
+  object was first defined rather than the function being executed.
+
+Library
+-------
+
+- Patch #1056967 changes the semantics of Template.safe_substitute() so that
+  no ValueError is raised on an 'invalid' match group.  Now the delimiter is
+  returned.
+
+- Bug #1052503 pdb.runcall() was not passing along keyword arguments.
+
+- Bug #902037: XML.sax.saxutils.prepare_input_source() now combines relative
+  paths with a base path before checking os.path.isfile().
+
+- The whichdb module can now be run from the command line.
+
+- Bug #1045381: time.strptime() can now infer the date using %U or %W (week of
+  the year) when the day of the week and year are also specified.
+
+- Bug #1048816: fix bug in Ctrl-K at start of line in curses.textpad.Textbox
+
+- Bug #1017553: fix bug in tarfile.filemode()
+
+- Patch #737473: fix bug that old source code is shown in tracebacks even if
+  the source code is updated and reloaded.
+
+Build
+-----
+
+- Patch #1044395: --enable-shared is allowed in FreeBSD also.
+
+What's New in Python 2.4 beta 1?
+================================
+
+*Release date: 15-OCT-2004*
+
+Core and builtins
+-----------------
+
+- Patch #975056: Restartable signals were not correctly disabled on
+  BSD systems. Consistently use PyOS_setsig() instead of signal().
+
+- The internal portable implementation of thread-local storage (TLS), used
+  by the ``PyGILState_Ensure()``/``PyGILState_Release()`` API, was not
+  thread-correct.  This could lead to a variety of problems, up to and
+  including segfaults.  See bug 1041645 for an example.
+
+- Added a command line option, -m module, which searches sys.path for the
+  module and then runs it.  (Contributed by Nick Coghlan.)
+
+- The bytecode optimizer now folds tuples of constants into a single
+  constant.
+
+- SF bug #513866:  Float/long comparison anomaly.  Prior to 2.4b1, when
+  an integer was compared to a float, the integer was coerced to a float.
+  That could yield spurious overflow errors (if the integer was very
+  large), and to anomalies such as
+  ``long(1e200)+1 == 1e200 == long(1e200)-1``.  Coercion to float is no
+  longer performed, and cases like ``long(1e200)-1 < 1e200``,
+  ``long(1e200)+1 > 1e200`` and ``(1 << 20000) > 1e200`` are computed
+  correctly now.
+
+Extension modules
+-----------------
+
+- ``collections.deque`` objects didn't play quite right with garbage
+  collection, which could lead to a segfault in a release build, or
+  an assert failure in a debug build.  Also, added overflow checks,
+  better detection of mutation during iteration, and shielded deque
+  comparisons from unusual subclass overrides of the __iter__() method.
+
+Library
+-------
+
+- Patch 1046644: distutils build_ext grew two new options - --swig for
+  specifying the swig executable to use, and --swig-opts to specify
+  options to pass to swig. --swig-opts="-c++" is the new way to spell
+  --swig-cpp.
+
+- Patch 983206: distutils now obeys environment variable LDSHARED, if
+  it is set.
+
+- Added Peter Astrand's subprocess.py module.  See PEP 324 for details.
+
+- time.strptime() now properly escapes timezones and all other locale-specific
+  strings for regex-specific symbols.  Was breaking under Japanese Windows when
+  the timezone was specified as "Tokyo (standard time)".
+  Closes bug #1039270.
+
+- Updates for the email package:
+
+  + email.Utils.formatdate() grew a 'usegmt' argument for HTTP support.
+  + All deprecated APIs that in email 2.x issued warnings have been removed:
+    _encoder argument to the MIMEText constructor, Message.add_payload(),
+    Utils.dump_address_pair(), Utils.decode(), Utils.encode()
+  + New deprecations: Generator.__call__(), Message.get_type(),
+    Message.get_main_type(), Message.get_subtype(), the 'strict' argument to
+    the Parser constructor.  These will be removed in email 3.1.
+  + Support for Python earlier than 2.3 has been removed (see PEP 291).
+  + All defect classes have been renamed to end in 'Defect'.
+  + Some FeedParser fixes; also a MultipartInvariantViolationDefect will be
+    added to messages that claim to be multipart but really aren't.
+  + Updates to documentation.
+
+- re's findall() and finditer() functions now take an optional flags argument
+  just like the compile(), search(), and match() functions.  Also, documented
+  the previously existing start and stop parameters for the findall() and
+  finditer() methods of regular expression objects.
+
+- rfc822 Messages now support iterating over the headers.
+
+- The (undocumented) tarfile.Tarfile.membernames has been removed;
+  applications should use the getmember function.
+
+- httplib now offers symbolic constants for the HTTP status codes.
+
+- SF bug #1028306:  Trying to compare a ``datetime.date`` to a
+  ``datetime.datetime`` mistakenly compared only the year, month and day.
+  Now it acts like a mixed-type comparison:  ``False`` for ``==``,
+  ``True`` for ``!=``, and raises ``TypeError`` for other comparison
+  operators.  Because datetime is a subclass of date, comparing only the
+  base class (date) members can still be done, if that's desired, by
+  forcing using of the approprate date method; e.g.,
+  ``a_date.__eq__(a_datetime)`` is true if and only if the year, month
+  and day members of ``a_date`` and ``a_datetime`` are equal.
+
+- bdist_rpm now supports command line options --force-arch,
+  {pre,post}-install,  {pre,post}-uninstall, and
+  {prep,build,install,clean,verify}-script.
+
+- SF patch #998993: The UTF-8 and the UTF-16 stateful decoders now support
+  decoding incomplete input (when the input stream is temporarily exhausted).
+  ``codecs.StreamReader`` now implements buffering, which enables proper
+  readline support for the UTF-16 decoders. ``codecs.StreamReader.read()``
+  has a new argument ``chars`` which specifies the number of characters to
+  return. ``codecs.StreamReader.readline()`` and
+  ``codecs.StreamReader.readlines()`` have a new argument ``keepends``.
+  Trailing "\n"s will be stripped from the lines if ``keepends`` is false.
+
+- The documentation for doctest is greatly expanded, and now covers all
+  the new public features (of which there are many).
+
+- ``doctest.master`` was put back in, and ``doctest.testmod()`` once again
+  updates it.  This isn't good, because every ``testmod()`` call
+  contributes to bloating the "hidden" state of ``doctest.master``, but
+  some old code apparently relies on it.  For now, all we can do is
+  encourage people to stitch doctests together via doctest's unittest
+  integration features instead.
+
+- httplib now handles ipv6 address/port pairs.
+
+- SF bug #1017864: ConfigParser now correctly handles default keys,
+  processing them with ``ConfigParser.optionxform`` when supplied,
+  consistent with the handling of config file entries and runtime-set
+  options.
+
+- SF bug #997050: Document, test, & check for non-string values in
+  ConfigParser.  Moved the new string-only restriction added in
+  rev. 1.65 to the SafeConfigParser class, leaving existing
+  ConfigParser & RawConfigParser behavior alone, and documented the
+  conditions under which non-string values work.
+
+Build
+-----
+
+- Building on darwin now includes /opt/local/include and /opt/local/lib for
+  building extension modules.  This is so as to include software installed as
+  a DarwinPorts port <http://darwinports.opendarwin.org/>
+
+- pyport.h now defines a Py_IS_NAN macro.  It works as-is when the
+  platform C computes true for ``x != x`` if and only if X is a NaN.
+  Other platforms can override the default definition with a platform-
+  specific spelling in that platform's pyconfig.h.  You can also override
+  pyport.h's default Py_IS_INFINITY definition now.
+
+C API
+-----
+
+- SF patch 1044089:  New function ``PyEval_ThreadsInitialized()`` returns
+  non-zero if PyEval_InitThreads() has been called.
+
+- The undocumented and unused extern int ``_PyThread_Started`` was removed.
+
+- The C API calls ``PyInterpreterState_New()`` and ``PyThreadState_New()``
+  are two of the very few advertised as being safe to call without holding
+  the GIL.  However, this wasn't true in a debug build, as bug 1041645
+  demonstrated.  In a debug build, Python redirects the ``PyMem`` family
+  of calls to Python's small-object allocator, to get the benefit of
+  its extra debugging capabilities.  But Python's small-object allocator
+  isn't threadsafe, relying on the GIL to avoid the expense of doing its
+  own locking.  ``PyInterpreterState_New()`` and ``PyThreadState_New()``
+  call the platform ``malloc()`` directly now, regardless of build type.
+
+- PyLong_AsUnsignedLong[Mask] now support int objects as well.
+
+- SF patch #998993: ``PyUnicode_DecodeUTF8Stateful`` and
+  ``PyUnicode_DecodeUTF16Stateful`` have been added, which implement stateful
+  decoding.
+
+Tests
+-----
+
+- test__locale ported to unittest
+
+Mac
+---
+
+- ``plistlib`` now supports non-dict root objects.  There is also a new
+  interface for reading and writing plist files: ``readPlist(pathOrFile)``
+  and ``writePlist(rootObject, pathOrFile)``
+
+Tools/Demos
+-----------
+
+- The text file comparison scripts ``ndiff.py`` and ``diff.py`` now
+  read the input files in universal-newline mode.  This spares them
+  from consuming a great deal of time to deduce the useless result that,
+  e.g., a file with Windows line ends and a file with Linux line ends
+  have no lines in common.
+
+
+What's New in Python 2.4 alpha 3?
+=================================
+
+*Release date: 02-SEP-2004*
+
+Core and builtins
+-----------------
+
+- SF patch #1007189: ``from ... import ...`` statements now allow the name
+  list to be surrounded by parentheses.
+
+- Some speedups for long arithmetic, thanks to Trevor Perrin.  Gradeschool
+  multiplication was sped a little by optimizing the C code.  Gradeschool
+  squaring was sped by about a factor of 2, by exploiting that about half
+  the digit products are duplicates in a square.  Because exponentiation
+  uses squaring often, this also speeds long power.  For example, the time
+  to compute 17**1000000 dropped from about 14 seconds to 9 on my box due
+  to this much.  The cutoff for Karatsuba multiplication was raised,
+  since gradeschool multiplication got quicker, and the cutoff was
+  aggressively small regardless.  The exponentiation algorithm was switched
+  from right-to-left to left-to-right, which is more efficient for small
+  bases.  In addition, if the exponent is large, the algorithm now does
+  5 bits (instead of 1 bit) at a time.  That cut the time to compute
+  17**1000000 on my box in half again, down to about 4.5 seconds.
+
+- OverflowWarning is no longer generated.  PEP 237 scheduled this to
+  occur in Python 2.3, but since OverflowWarning was disabled by default,
+  nobody realized it was still being generated.  On the chance that user
+  code is still using them, the Python builtin OverflowWarning, and
+  corresponding C API PyExc_OverflowWarning, will exist until Python 2.5.
+
+- Py_InitializeEx has been added.
+
+- Fix the order of application of decorators.  The proper order is bottom-up;
+  the first decorator listed is the last one called.
+
+- SF patch #1005778.  Fix a seg fault if the list size changed while
+  calling list.index().  This could happen if a rich comparison function
+  modified the list.
+
+- The ``func_name`` (a.k.a. ``__name__``) attribute of user-defined
+  functions is now writable.
+
+- code_new (a.k.a new.code()) now checks its arguments sufficiently
+  carefully that passing them on to PyCode_New() won't trigger calls
+  to Py_FatalError() or PyErr_BadInternalCall().  It is still the case
+  that the returned code object might be entirely insane.
+
+- Subclasses of string can no longer be interned.  The semantics of
+  interning were not clear here -- a subclass could be mutable, for
+  example -- and had bugs.  Explicitly interning a subclass of string
+  via intern() will raise a TypeError.  Internal operations that attempt
+  to intern a string subclass will have no effect.
+
+- Bug 1003935:  xrange() could report bogus OverflowErrors.  Documented
+  what xrange() intends, and repaired tests accordingly.
+
+Extension modules
+-----------------
+
+- difflib now supports HTML side-by-side diff.
+
+- os.urandom has been added for systems that support sources of random
+  data.
+
+- Patch 1012740:  truncate() on a writeable cStringIO now resets the
+  position to the end of the stream.  This is consistent with the original
+  StringIO module and avoids inadvertently resurrecting data that was
+  supposed to have been truncated away.
+
+- Added socket.socketpair().
+
+- Added CurrentByteIndex, CurrentColumnNumber, CurrentLineNumber
+  members to xml.parsers.expat.XMLParser object.
+
+- The mpz, rotor, and xreadlines modules, all deprecated in earlier
+  versions of Python, have now been removed.
+
+Library
+-------
+
+- Patch #934356: if a module defines __all__, believe that rather than using
+  heuristics for filtering out imported names.
+
+- Patch #941486: added os.path.lexists(), which returns True for broken
+  symlinks, unlike os.path.exists().
+
+- the random module now uses os.urandom() for seeding if it is available.
+  Added a new generator based on os.urandom().
+
+- difflib and diff.py can now generate HTML.
+
+- bdist_rpm now includes version and release in the BuildRoot, and
+  replaces - by ``_`` in version and release.
+
+- distutils build/build_scripts now has an -e option to specify the
+  path to the Python interpreter for installed scripts.
+
+- PEP 292 classes Template and SafeTemplate are added to the string module.
+
+- tarfile now generates GNU tar files by default.
+
+- HTTPResponse has now a getheaders method.
+
+- Patch #1006219: let inspect.getsource handle '@' decorators. Thanks Simon
+  Percivall.
+
+- logging.handlers.SMTPHandler.date_time has been removed;
+  the class now uses email.Utils.formatdate to generate the time stamp.
+
+- A new function tkFont.nametofont was added to return an existing
+  font. The Font class constructor now has an additional exists argument
+  which, if True, requests to return/configure an existing font, rather
+  than creating a new one.
+
+- Updated the decimal package's min() and max() methods to match the
+  latest revision of the General Decimal Arithmetic Specification.
+  Quiet NaNs are ignored and equal values are sorted based on sign
+  and exponent.
+
+- The decimal package's Context.copy() method now returns deep copies.
+
+- Deprecated sys.exitfunc in favor of the atexit module.  The sys.exitfunc
+  attribute will be kept around for backwards compatibility and atexit
+  will just become the one preferred way to do it.
+
+- patch #675551: Add get_history_item and replace_history_item functions
+  to the readline module.
+
+- bug #989672: pdb.doc and the help messages for the help_d and help_u methods
+  of the pdb.Pdb class gives have been corrected. d(own) goes to a newer
+  frame, u(p) to an older frame, not the other way around.
+
+- bug #990669: os.path.realpath() will resolve symlinks before normalizing the
+  path, as normalizing the path may alter the meaning of the path if it
+  contains symlinks.
+
+- bug #851123: shutil.copyfile will raise an exception when trying to copy a
+  file onto a link to itself. Thanks Gregory Ball.
+
+- bug #570300: Fix inspect to resolve file locations using os.path.realpath()
+  so as to properly list all functions in a module when the module itself is
+  reached through a symlink.  Thanks Johannes Gijsbers.
+
+- doctest refactoring continued.  See the docs for details.  As part of
+  this effort, some old and little- (never?) used features are now
+  deprecated:  the Tester class, the module is_private() function, and the
+  isprivate argument to testmod().  The Tester class supplied a feeble
+  "by hand" way to combine multiple doctests, if you knew exactly what
+  you were doing.  The newer doctest features for unittest integration
+  already did a better job of that, are stronger now than ever, and the
+  new DocTestRunner class is a saner foundation if you want to do it by
+  hand.  The "private name" filtering gimmick was a mistake from the
+  start, and testmod() changed long ago to ignore it by default.  If
+  you want to filter out tests, the new DocTestFinder class can be used
+  to return a list of all doctests, and you can filter that list by
+  any computable criteria before passing it to a DocTestRunner instance.
+
+- Bug #891637, patch #1005466: fix inspect.getargs() crash on def foo((bar)).
+
+Tools/Demos
+-----------
+
+- IDLE's shortcut keys for windows are now case insensitive so that
+  Control-V works the same as Control-v.
+
+- pygettext.py: Generate POT-Creation-Date header in ISO format.
+
+Build
+-----
+
+- Backward incompatibility:  longintrepr.h now triggers a compile-time
+  error if SHIFT (the number of bits in a Python long "digit") isn't
+  divisible by 5.  This new requirement allows simple code for the new
+  5-bits-at-a-time long_pow() implementation.  If necessary, the
+  restriction could be removed (by complicating long_pow(), or by
+  falling back to the 1-bit-at-a-time algorithm), but there are no
+  plans to do so.
+
+- bug #991962: When building with --disable-toolbox-glue on Darwin no
+  attempt to build Mac-specific modules occurs.
+
+- The --with-tsc flag to configure to enable VM profiling with the
+  processor's timestamp counter now works on PPC platforms.
+
+- patch #1006629: Define _XOPEN_SOURCE to 500 on Solaris 8/9 to match
+  GCC's definition and avoid redefinition warnings.
+
+- Detect pthreads support (provided by gnu pth pthread emulation) on
+  GNU/k*BSD systems.
+
+- bug #1005737, #1007249: Fixed several build problems and warnings
+  found on old/legacy C compilers of HP-UX, IRIX and Tru64.
+
+C API
+-----
+
+..
+
+Documentation
+-------------
+
+- patch #1005936, bug #1009373: fix index entries which contain
+  an underscore when viewed with Acrobat.
+
+- bug #990669: os.path.normpath may alter the meaning of a path if
+  it contains symbolic links. This has been documented in a comment
+  since 1992, but is now in the library reference as well.
+
+New platforms
+-------------
+
+- FreeBSD 6 is now supported.
+
+Tests
+-----
+
+..
+
+Windows
+-------
+
+- Boosted the stack reservation for python.exe and pythonw.exe from
+  the default 1MB to 2MB.  Stack frames under VC 7.1 for 2.4 are enough
+  bigger than under VC 6.0 for 2.3.4 that deeply recursive progams
+  within the default sys.getrecursionlimit() default value of 1000 were
+  able to suffer undetected C stack overflows.  The standard test program
+  test_compiler was one such program.  If a Python process on Windows
+  "just vanishes" without a trace, and without an error message of any
+  kind, but with an exit code of 128, undetected stack overflow may be
+  the problem.
+
+Mac
+---
+
+..
+
+
+What's New in Python 2.4 alpha 2?
+=================================
+
+*Release date: 05-AUG-2004*
+
+Core and builtins
+-----------------
+
+- Patch #980695:  Implements efficient string concatenation for statements
+  of the form s=s+t and s+=t.  This will vary across implementations.
+  Accordingly, the str.join() method is strongly preferred for performance
+  sensitive code.
+
+- PEP-0318, Function Decorators have been added to the language. These are
+  implemented using the Java-style @decorator syntax, like so::
+
+     @staticmethod
+     def foo(bar):
+
+  (The PEP needs to be updated to reflect the current state)
+
+- When importing a module M raises an exception, Python no longer leaves M
+  in sys.modules.  Before 2.4a2 it did, and a subsequent import of M would
+  succeed, picking up a module object from sys.modules reflecting as much
+  of the initialization of M as completed before the exception was raised.
+  Subsequent imports got no indication that M was in a partially-
+  initialized state, and the importers could get into arbitrarily bad
+  trouble as a result (the M they got was in an unintended state,
+  arbitrarily far removed from M's author's intent).  Now subsequent
+  imports of M will continue raising exceptions (but if, for example, the
+  source code for M is edited between import attempts, then perhaps later
+  attempts will succeed, or raise a different exception).
+
+  This can break existing code, but in such cases the code was probably
+  working before by accident.  In the Python source, the only case of
+  breakage discovered was in a test accidentally relying on a damaged
+  module remaining in sys.modules.  Cases are also known where tests
+  deliberately provoking import errors remove damaged modules from
+  sys.modules themselves, and such tests will break now if they do an
+  unconditional del sys.modules[M].
+
+- u'%s' % obj will now try obj.__unicode__() first and fallback to
+  obj.__str__() if no __unicode__ method can be found.
+
+- Patch #550732: Add PyArg_VaParseTupleAndKeywords().  Analogous to
+  PyArg_VaParse().  Both are now documented.  Thanks Greg Chapman.
+
+- Allow string and unicode return types from .encode()/.decode()
+  methods on string and unicode objects.  Added unicode.decode()
+  which was missing for no apparent reason.
+
+- An attempt to fix the mess that is Python's behaviour with
+  signal handlers and threads, complicated by readline's behaviour.
+  It's quite possible that there are still bugs here.
+
+- Added C macros Py_CLEAR and Py_VISIT to ease the implementation of
+  types that support garbage collection.
+
+- Compiler now treats None as a constant.
+
+- The type of values returned by __int__, __float__, __long__,
+  __oct__, and __hex__ are now checked.  Returning an invalid type
+  will cause a TypeError to be raised.  This matches the behavior of
+  Jython.
+
+- Implemented bind_textdomain_codeset() in locale module.
+
+- Added a workaround for proper string operations in BSDs.  str.split
+  and str.is* methods can now work correctly with UTF-8 locales.
+
+- Bug #989185: unicode.iswide() and unicode.width() is dropped and
+  the East Asian Width support is moved to unicodedata extension
+  module.
+
+- Patch #941229: The source code encoding in interactive mode
+  now refers sys.stdin.encoding not just ISO-8859-1 anymore.  This
+  allows for non-latin-1 users to write unicode strings directly.
+
+Extension modules
+-----------------
+
+- cpickle now supports the same keyword arguments as pickle.
+
+Library
+-------
+
+- Added new codecs and aliases for ISO_8859-11, ISO_8859-16 and
+  TIS-620
+
+- Thanks to Edward Loper, doctest has been massively refactored, and
+  many new features were added.  Full docs will appear later.  For now
+  the doctest module comments and new test cases give good coverage.
+  The refactoring provides many hook points for customizing behavior
+  (such as how to report errors, and how to compare expected to actual
+  output).  New features include a <BLANKLINE> marker for expected
+  output containing blank lines, options to produce unified or context
+  diffs when actual output doesn't match expectations, an option to
+  normalize whitespace before comparing, and an option to use an
+  ellipsis to signify "don't care" regions of output.
+
+- Tkinter now supports the wish -sync and -use options.
+
+- The following methods in time support passing of None: ctime(), gmtime(),
+  and localtime().  If None is provided, the current time is used (the
+  same as when the argument is omitted).
+  [SF bug 658254, patch 663482]
+
+- nntplib does now allow to ignore a .netrc file.
+
+- urllib2 now recognizes Basic authentication even if other authentication
+  schemes are offered.
+
+- Bug #1001053.  wave.open() now accepts unicode filenames.
+
+- gzip.GzipFile has a new fileno() method, to retrieve the handle of the
+  underlying file object (provided it has a fileno() method).  This is
+  needed if you want to use os.fsync() on a GzipFile.
+
+- imaplib has two new methods: deleteacl and myrights.
+
+- nntplib has two new methods: description and descriptions. They
+  use a more RFC-compliant way of getting a newsgroup description.
+
+- Bug #993394.  Fix a possible red herring of KeyError in 'threading' being
+  raised during interpreter shutdown from a registered function with atexit
+  when dummy_threading is being used.
+
+- Bug #857297/Patch #916874.  Fix an error when extracting a hard link
+  from a tarfile.
+
+- Patch #846659.  Fix an error in tarfile.py when using
+  GNU longname/longlink creation.
+
+- The obsolete FCNTL.py has been deleted.  The builtin fcntl module
+  has been available (on platforms that support fcntl) since Python
+  1.5a3, and all FCNTL.py did is export fcntl's names, after generating
+  a deprecation warning telling you to use fcntl directly.
+
+- Several new unicode codecs are added: big5hkscs, euc_jis_2004,
+  iso2022_jp_2004, shift_jis_2004.
+
+- Bug #788520.  Queue.{get, get_nowait, put, put_nowait} have new
+  implementations, exploiting Conditions (which didn't exist at the time
+  Queue was introduced).  A minor semantic change is that the Full and
+  Empty exceptions raised by non-blocking calls now occur only if the
+  queue truly was full or empty at the instant the queue was checked (of
+  course the Queue may no longer be full or empty by the time a calling
+  thread sees those exceptions, though).  Before, the exceptions could
+  also be raised if it was "merely inconvenient" for the implementation
+  to determine the true state of the Queue (because the Queue was locked
+  by some other method in progress).
+
+- Bugs #979794 and #980117: difflib.get_grouped_opcodes() now handles the
+  case of comparing two empty lists.  This affected both context_diff() and
+  unified_diff(),
+
+- Bug #980938: smtplib now prints debug output to sys.stderr.
+
+- Bug #930024: posixpath.realpath() now handles infinite loops in symlinks by
+  returning the last point in the path that was not part of any loop.  Thanks
+  AM Kuchling.
+
+- Bug #980327: ntpath not handles compressing erroneous slashes between the
+  drive letter and the rest of the path.  Also clearly handles UNC addresses now
+  as well.  Thanks Paul Moore.
+
+- bug #679953: zipfile.py should now work for files over 2 GB.  The packed data
+  for file sizes (compressed and uncompressed) was being stored as signed
+  instead of unsigned.
+
+- decimal.py now only uses signals in the IBM spec.  The other conditions are
+  no longer part of the public API.
+
+- codecs module now has two new generic APIs: encode() and decode()
+  which don't restrict the return types (unlike the unicode and
+  string methods of the same name).
+
+- Non-blocking SSL sockets work again; they were broken in Python 2.3.
+  SF patch 945642.
+
+- doctest unittest integration improvements:
+
+  o Improved the unitest test output for doctest-based unit tests
+
+  o Can now pass setUp and tearDown functions when creating
+    DocTestSuites.
+
+- The threading module has a new class, local, for creating objects
+  that provide thread-local data.
+
+- Bug #990307: when keep_empty_values is True, cgi.parse_qsl()
+  no longer returns spurious empty fields.
+
+- Implemented bind_textdomain_codeset() in gettext module.
+
+- Introduced in gettext module the l*gettext() family of functions,
+  which return translation strings encoded in the preferred encoding,
+  as informed by locale module's getpreferredencoding().
+
+- optparse module (and tests) upgraded to Optik 1.5a1.  Changes:
+
+  - Add expansion of default values in help text: the string
+    "%default" in an option's help string is expanded to str() of
+    that option's default value, or "none" if no default value.
+
+  - Bug #955889: option default values that happen to be strings are
+    now processed in the same way as values from the command line; this
+    allows generation of nicer help when using custom types.  Can
+    be disabled with parser.set_process_default_values(False).
+
+  - Bug #960515: don't crash when generating help for callback
+    options that specify 'type', but not 'dest' or 'metavar'.
+
+  - Feature #815264: change the default help format for short options
+    that take an argument from e.g. "-oARG" to "-o ARG"; add
+    set_short_opt_delimiter() and set_long_opt_delimiter() methods to
+    HelpFormatter to allow (slight) customization of the formatting.
+
+  - Patch #736940: internationalize Optik: all built-in user-
+    targeted literal strings are passed through gettext.gettext().  (If
+    you want translations (.po files), they're not included with Python
+    -- you'll find them in the Optik source distribution from
+    http://optik.sourceforge.net/ .)
+
+  - Bug #878453: respect $COLUMNS environment variable for
+    wrapping help output.
+
+  - Feature #988122: expand "%prog" in the 'description' passed
+    to OptionParser, just like in the 'usage' and 'version' strings.
+    (This is *not* done in the 'description' passed to OptionGroup.)
+
+C API
+-----
+
+- PyImport_ExecCodeModule() and PyImport_ExecCodeModuleEx():  if an
+  error occurs while loading the module, these now delete the module's
+  entry from sys.modules.  All ways of loading modules eventually call
+  one of these, so this is an error-case change in semantics for all
+  ways of loading modules.  In rare cases, a module loader may wish
+  to keep a module object in sys.modules despite that the module's
+  code cannot be executed.  In such cases, the module loader must
+  arrange to reinsert the name and module object in sys.modules.
+  PyImport_ReloadModule() has been changed to reinsert the original
+  module object into sys.modules if the module reload fails, so that
+  its visible semantics have not changed.
+
+- A large pile of datetime field-extraction macros is now documented,
+  thanks to Anthony Tuininga (patch #986010).
+
+Documentation
+-------------
+
+- Improved the tutorial on creating types in C.
+
+  - point out the importance of reassigning data members before
+    assigning their values
+
+  - correct my misconception about return values from visitprocs. Sigh.
+
+  - mention the labor saving Py_VISIT and Py_CLEAR macros.
+
+- Major rewrite of the math module docs, to address common confusions.
+
+Tests
+-----
+
+- The test data files for the decimal test suite are now installed on
+  platforms that use the Makefile.
+
+- SF patch 995225:  The test file testtar.tar accidentally contained
+  CVS keywords (like $Id$), which could cause spurious failures in
+  test_tarfile.py depending on how the test file was checked out.
+
+
+What's New in Python 2.4 alpha 1?
+=================================
+
+*Release date: 08-JUL-2004*
+
+Core and builtins
+-----------------
+
+- weakref.ref is now the type object also known as
+  weakref.ReferenceType; it can be subclassed like any other new-style
+  class.  There's less per-entry overhead in WeakValueDictionary
+  objects now (one object instead of three).
+
+- Bug #951851: Python crashed when reading import table of certain
+  Windows DLLs.
+
+- Bug #215126.  The locals argument to eval(), execfile(), and exec now
+  accept any mapping type.
+
+- marshal now shares interned strings. This change introduces
+  a new .pyc magic.
+
+- Bug #966623. classes created with type() in an exec(, {}) don't
+  have a __module__, but code in typeobject assumed it would always
+  be there.
+
+- Python no longer relies on the LC_NUMERIC locale setting to be
+  the "C" locale; as a result, it no longer tries to prevent changing
+  the LC_NUMERIC category.
+
+- Bug #952807:  Unpickling pickled instances of subclasses of
+  datetime.date, datetime.datetime and datetime.time could yield insane
+  objects.  Thanks to Jiwon Seo for a fix.
+
+- Bug #845802: Python crashes when __init__.py is a directory.
+
+- Unicode objects received two new methods: iswide() and width().
+  These query East Asian width information, as specified in Unicode
+  TR11.
+
+- Improved the tuple hashing algorithm to give fewer collisions in
+  common cases.  Fixes bug  #942952.
+
+- Implemented generator expressions (PEP 289).  Coded by Jiwon Seo.
+
+- Enabled the profiling of C extension functions (and builtins) - check
+  new documentation and modified profile and bdb modules for more details
+
+- Set file.name to the object passed to open (instead of a new string)
+
+- Moved tracebackobject into traceback.h and renamed to PyTracebackObject
+
+- Optimized the byte coding for multiple assignments like "a,b=b,a" and
+  "a,b,c=1,2,3".  Improves their speed by 25% to 30%.
+
+- Limit the nested depth of a tuple for the second argument to isinstance()
+  and issubclass() to the recursion limit of the interpreter.
+  Fixes bug  #858016 .
+
+- Optimized dict iterators, creating separate types for each
+  and having them reveal their length.  Also optimized the
+  methods:  keys(), values(), and items().
+
+- Implemented a newcode opcode, LIST_APPEND, that simplifies
+  the generated bytecode for list comprehensions and further
+  improves their performance (about 35%).
+
+- Implemented rich comparisons for floats, which seems to make
+  comparisons involving NaNs somewhat less surprising when the
+  underlying C compiler actually implements C99 semantics.
+
+- Optimized list.extend() to save memory and no longer create
+  intermediate sequences.  Also, extend() now pre-allocates the
+  needed memory whenever the length of the iterable is known in
+  advance -- this halves the time to extend the list.
+
+- Optimized list resize operations to make fewer calls to the system
+  realloc().  Significantly speeds up list appends, list pops,
+  list comprehensions, and the list constructor (when the input iterable
+  length is not known).
+
+- Changed the internal list over-allocation scheme.  For larger lists,
+  overallocation ranged between 3% and 25%.  Now, it is a constant 12%.
+  For smaller lists (n<8), overallocation was upto eight elements.  Now,
+  the overallocation is no more than three elements -- this improves space
+  utilization for applications that have large numbers of small lists.
+
+- Most list bodies now get re-used rather than freed.  Speeds up list
+  instantiation and deletion by saving calls to malloc() and free().
+
+- The dict.update() method now accepts all the same argument forms
+  as the dict() constructor.  This now includes item lists and/or
+  keyword arguments.
+
+- Support for arbitrary objects supporting the read-only buffer
+  interface as the co_code field of code objects (something that was
+  only possible to create from C code) has been removed.
+
+- Made omitted callback and None equivalent for weakref.ref() and
+  weakref.proxy(); the None case wasn't handled correctly in all
+  cases.
+
+- Fixed problem where PyWeakref_NewRef() and PyWeakref_NewProxy()
+  assumed that initial existing entries in an object's weakref list
+  would not be removed while allocating a new weakref object.  Since
+  GC could be invoked at that time, however, that assumption was
+  invalid.  In a truly obscure case of GC being triggered during
+  creation for a new weakref object for an referent which already
+  has a weakref without a callback which is only referenced from
+  cyclic trash, a memory error can occur.  This consistently created a
+  segfault in a debug build, but provided less predictable behavior in
+  a release build.
+
+- input() builtin function now respects compiler flags such as
+  __future__ statements.  SF patch 876178.
+
+- Removed PendingDeprecationWarning from apply().  apply() remains
+  deprecated, but the nuisance warning will not be issued.
+
+- At Python shutdown time (Py_Finalize()), 2.3 called cyclic garbage
+  collection twice, both before and after tearing down modules.  The
+  call after tearing down modules has been disabled, because too much
+  of Python has been torn down then for __del__ methods and weakref
+  callbacks to execute sanely.  The most common symptom was a sequence
+  of uninformative messages on stderr when Python shut down, produced
+  by threads trying to raise exceptions, but unable to report the nature
+  of their problems because too much of the sys module had already been
+  destroyed.
+
+- Removed FutureWarnings related to hex/oct literals and conversions
+  and left shifts.  (Thanks to Kalle Svensson for SF patch 849227.)
+  This addresses most of the remaining semantic changes promised by
+  PEP 237, except for repr() of a long, which still shows the trailing
+  'L'.  The PEP appears to promise warnings for operations that
+  changed semantics compared to Python 2.3, but this is not
+  implemented; we've suffered through enough warnings related to
+  hex/oct literals and I think it's best to be silent now.
+
+- For str and unicode objects, the ljust(), center(), and rjust()
+  methods now accept an optional argument specifying a fill
+  character other than a space.
+
+- When method objects have an attribute that can be satisfied either
+  by the function object or by the method object, the function
+  object's attribute usually wins.  Christian Tismer pointed out that
+  that this is really a mistake, because this only happens for special
+  methods (like __reduce__) where the method object's version is
+  really more appropriate than the function's attribute.  So from now
+  on, all method attributes will have precedence over function
+  attributes with the same name.
+
+- Critical bugfix, for SF bug 839548:  if a weakref with a callback,
+  its callback, and its weakly referenced object, all became part of
+  cyclic garbage during a single run of garbage collection, the order
+  in which they were torn down was unpredictable.  It was possible for
+  the callback to see partially-torn-down objects, leading to immediate
+  segfaults, or, if the callback resurrected garbage objects, to
+  resurrect insane objects that caused segfaults (or other surprises)
+  later.  In one sense this wasn't surprising, because Python's cyclic gc
+  had no knowledge of Python's weakref objects.  It does now.  When
+  weakrefs with callbacks become part of cyclic garbage now, those
+  weakrefs are cleared first.  The callbacks don't trigger then,
+  preventing the problems.  If you need callbacks to trigger, then just
+  as when cyclic gc is not involved, you need to write your code so
+  that weakref objects outlive the objects they weakly reference.
+
+- Critical bugfix, for SF bug 840829:  if cyclic garbage collection
+  happened to occur during a weakref callback for a new-style class
+  instance, subtle memory corruption was the result (in a release build;
+  in a debug build, a segfault occurred reliably very soon after).
+  This has been repaired.
+
+- Compiler flags set in PYTHONSTARTUP are now active in __main__.
+
+- Added two builtin types, set() and frozenset().
+
+- Added a reversed() builtin function that returns a reverse iterator
+  over a sequence.
+
+- Added a sorted() builtin function that returns a new sorted list
+  from any iterable.
+
+- CObjects are now mutable (on the C level) through PyCObject_SetVoidPtr.
+
+- list.sort() now supports three keyword arguments:  cmp, key, and reverse.
+  The key argument can be a function of one argument that extracts a
+  comparison key from the original record:  mylist.sort(key=str.lower).
+  The reverse argument is a boolean value and if True will change the
+  sort order as if the comparison arguments were reversed.  In addition,
+  the documentation has been amended to provide a guarantee that all sorts
+  starting with Py2.3 are guaranteed to be stable (the relative order of
+  records with equal keys is unchanged).
+
+- Added test whether wchar_t is signed or not. A signed wchar_t is not
+  usable as internal unicode type base for Py_UNICODE since the
+  unicode implementation assumes an unsigned type.
+
+- Fixed a bug in the cache of length-one Unicode strings that could
+  lead to a seg fault.  The specific problem occurred when an earlier,
+  non-fatal error left an uninitialized Unicode object in the
+  freelist.
+
+- The % formatting operator now supports '%F' which is equivalent to
+  '%f'.  This has always been documented but never implemented.
+
+- complex(obj) could leak a little memory if obj wasn't a string or
+  number.
+
+- zip() with no arguments now returns an empty list instead of raising
+  a TypeError exception.
+
+- obj.__contains__() now returns True/False instead of 1/0.  SF patch
+  820195.
+
+- Python no longer tries to be smart about recursive comparisons.
+  When comparing containers with cyclic references to themselves it
+  will now just hit the recursion limit.  See SF patch 825639.
+
+- str and unicode builtin types now have an rsplit() method that is
+  same as split() except that it scans the string from the end
+  working towards the beginning.  See SF feature request 801847.
+
+- Fixed a bug in object.__reduce_ex__ when using protocol 2.  Failure
+  to clear the error when attempts to get the __getstate__ attribute
+  fail caused intermittent errors and odd behavior.
+
+- buffer objects based on other objects no longer cache a pointer to
+  the data and the data length.  Instead, the appropriate tp_as_buffer
+  method is called as necessary.
+
+- fixed: if a file is opened with an explicit buffer size >= 1, repeated
+  close() calls would attempt to free() the buffer already free()ed on
+  the first call.
+
+
+Extension modules
+-----------------
+
+- Added socket.getservbyport(), and make the second argument in
+  getservbyname() and getservbyport() optional.
+
+- time module code that deals with input POSIX timestamps will now raise
+  ValueError if more than a second is lost in precision when the
+  timestamp is cast to the platform C time_t type.  There's no chance
+  that the platform will do anything sensible with the result in such
+  cases.  This includes ctime(), localtime() and gmtime().  Assorted
+  fromtimestamp() and utcfromtimestamp() methods in the datetime module
+  were also protected.  Closes bugs #919012 and 975996.
+
+- fcntl.ioctl now warns if the mutate flag is not specified.
+
+- nt now properly allows to refer to UNC roots, e.g. in nt.stat().
+
+- the weakref module now supports additional objects:  array.array,
+  sre.pattern_objects, file objects, and sockets.
+
+- operator.isMappingType() and operator.isSequenceType() now give
+  fewer false positives.
+
+- socket.sslerror is now a subclass of socket.error .  Also added
+  socket.error to the socket module's C API.
+
+- Bug #920575: A problem where the _locale module segfaults on
+  nl_langinfo(ERA) caused by GNU libc's illegal NULL return is fixed.
+
+- array objects now support the copy module.  Also, their resizing
+  scheme has been updated to match that used for list objects.  This improves
+  the performance (speed and memory usage) of append() operations.
+  Also, array.array() and array.extend() now accept any iterable argument
+  for repeated appends without needing to create another temporary array.
+
+- cStringIO.writelines() now accepts any iterable argument and writes
+  the lines one at a time rather than joining them and writing once.
+  Made a parallel change to StringIO.writelines().  Saves memory and
+  makes suitable for use with generator expressions.
+
+- time.strftime() now checks that the values in its time tuple argument
+  are within the proper boundaries to prevent possible crashes from the
+  platform's C library implementation of strftime().  Can possibly
+  break code that uses values outside the range that didn't cause
+  problems previously (such as sitting day of year to 0).  Fixes bug
+  #897625.
+
+- The socket module now supports Bluetooth sockets, if the
+  system has <bluetooth/bluetooth.h>
+
+- Added a collections module containing a new datatype, deque(),
+  offering high-performance, thread-safe, memory friendly appends
+  and pops on either side of the deque.
+
+- Several modules now take advantage of collections.deque() for
+  improved performance:  Queue, mutex, shlex, threading, and pydoc.
+
+- The operator module has two new functions, attrgetter() and
+  itemgetter() which are useful for creating fast data extractor
+  functions for map(), list.sort(), itertools.groupby(), and
+  other functions that expect a function argument.
+
+- socket.SHUT_{RD,WR,RDWR} was added.
+
+- os.getsid was added.
+
+- The pwd module incorrectly advertised its struct type as
+  struct_pwent; this has been renamed to struct_passwd.  (The old name
+  is still supported for backwards compatibility.)
+
+- The xml.parsers.expat module now provides Expat 1.95.7.
+
+- socket.IPPROTO_IPV6 was added.
+
+- readline.clear_history was added.
+
+- select.select() now accepts sequences for its first three arguments.
+
+- cStringIO now supports the f.closed attribute.
+
+- The signal module now exposes SIGRTMIN and SIGRTMAX (if available).
+
+- curses module now supports use_default_colors().  [patch #739124]
+
+- Bug #811028: ncurses.h breakage on FreeBSD/MacOS X
+
+- Bug #814613: INET_ADDRSTRLEN fix needed for all compilers on SGI
+
+- Implemented non-recursive SRE matching scheme (#757624).
+
+- Implemented (?(id/name)yes|no) support in SRE (#572936).
+
+- random.seed() with no arguments or None uses time.time() as a default
+  seed.  Modified to match Py2.2 behavior and use fractional seconds so
+  that successive runs are more likely to produce different sequences.
+
+- random.Random has a new method, getrandbits(k), which returns an int
+  with k random bits.  This method is now an optional part of the API
+  for user defined generators.  Any generator that defines genrandbits()
+  can now use randrange() for ranges with a length >= 2**53.  Formerly,
+  randrange would return only even numbers for ranges that large (see
+  SF bug #812202).  Generators that do not define genrandbits() now
+  issue a warning when randrange() is called with a range that large.
+
+- itertools has a new function, groupby() for aggregating iterables
+  into groups sharing the same key (as determined by a key function).
+  It offers some of functionality of SQL's groupby keyword and of
+  the Unix uniq filter.
+
+- itertools now has a new tee() function which produces two independent
+  iterators from a single iterable.
+
+- itertools.izip() with no arguments now returns an empty iterator instead
+  of raising a TypeError exception.
+
+- Fixed #853061: allow BZ2Compressor.compress() to receive an empty string
+  as parameter.
+
+Library
+-------
+
+- Added a new module: cProfile, a C profiler with the same interface as the
+  profile module.  cProfile avoids some of the drawbacks of the hotshot
+  profiler and provides a bit more information than the other two profilers.
+  Based on "lsprof" (patch #1212837).
+
+- Bug #1266283: The new function "lexists" is now in os.path.__all__.
+
+- Bug #981530: Fix UnboundLocalError in shutil.rmtree().  This affects
+  the documented behavior: the function passed to the onerror()
+  handler can now also be os.listdir.
+
+- Bug #754449: threading.Thread objects no longer mask exceptions raised during
+  interpreter shutdown with another exception from attempting to handle the
+  original exception.
+
+- Added decimal.py per PEP 327.
+
+- Bug #981299: rsync is now a recognized protocol in urlparse that uses a
+  "netloc" portion of a URL.
+
+- Bug #919012: shutil.move() will not try to move a directory into itself.
+  Thanks Johannes Gijsbers.
+
+- Bug #934282: pydoc.stripid() is now case-insensitive.  Thanks Robin Becker.
+
+- Bug #823209:  cmath.log() now takes an optional base argument so that its
+  API matches math.log().
+
+- Bug #957381: distutils bdist_rpm no longer fails on recent RPM versions
+  that generate a -debuginfo.rpm
+
+- os.path.devnull has been added for all supported platforms.
+
+- Fixed #877165: distutils now picks the right C++ compiler command
+  on cygwin and mingw32.
+
+- urllib.urlopen().readline() now handles HTTP/0.9 correctly.
+
+- refactored site.py into functions.  Also wrote regression tests for the
+  module.
+
+- The distutils install command now supports the --home option and
+  installation scheme for all platforms.
+
+- asyncore.loop now has a repeat count parameter that defaults to
+  looping forever.
+
+- The distutils sdist command now ignores all .svn directories, in
+  addition to CVS and RCS directories.  .svn directories hold
+  administrative files for the Subversion source control system.
+
+- Added a new module: cookielib.  Automatic cookie handling for HTTP
+  clients.  Also, support for cookielib has been added to urllib2, so
+  urllib2.urlopen() can transparently handle cookies.
+
+- stringprep.py now uses built-in set() instead of sets.Set().
+
+- Bug #876278: Unbounded recursion in modulefinder
+
+- Bug #780300: Swap public and system ID in LexicalHandler.startDTD.
+  Applications relying on the wrong order need to be corrected.
+
+- Bug #926075: Fixed a bug that returns a wrong pattern object
+  for a string or unicode object in sre.compile() when a different
+  type pattern with the same value exists.
+
+- Added countcallers arg to trace.Trace class (--trackcalls command line arg
+  when run from the command prompt).
+
+- Fixed a caching bug in platform.platform() where the argument of 'terse' was
+  not taken into consideration when caching value.
+
+- Added two new command-line arguments for profile (output file and
+  default sort).
+
+- Added global runctx function to profile module
+
+- Add hlist missing entryconfigure and entrycget methods.
+
+- The ptcp154 codec was added for Kazakh character set support.
+
+- Support non-anonymous ftp URLs in urllib2.
+
+- The encodings package will now apply codec name aliases
+  first before starting to try the import of the codec module.
+  This simplifies overriding built-in codecs with external
+  packages, e.g. the included CJK codecs with the JapaneseCodecs
+  package, by adjusting the aliases dictionary in encodings.aliases
+  accordingly.
+
+- base64 now supports RFC 3548 Base16, Base32, and Base64 encoding and
+  decoding standards.
+
+- urllib2 now supports processors.  A processor is a handler that
+  implements an xxx_request or xxx_response method.  These methods are
+  called for all requests.
+
+- distutils compilers now compile source files in the same order as
+  they are passed to the compiler.
+
+- pprint.pprint() and pprint.pformat() now have additional parameters
+  indent, width and depth.
+
+- Patch #750542: pprint now will pretty print subclasses of list, tuple
+  and dict too, as long as they don't overwrite __repr__().
+
+- Bug #848614: distutils' msvccompiler fails to find the MSVC6
+  compiler because of incomplete registry entries.
+
+- httplib.HTTP.putrequest now offers to omit the implicit Accept-Encoding.
+
+- Patch #841977: modulefinder didn't find extension modules in packages
+
+- imaplib.IMAP4.thread was added.
+
+- Plugged a minor hole in tempfile.mktemp() due to the use of
+  os.path.exists(), switched to using os.lstat() directly if possible.
+
+- bisect.py and heapq.py now have underlying C implementations
+  for better performance.
+
+- heapq.py has two new functions, nsmallest() and nlargest().
+
+- traceback.format_exc has been added (similar to print_exc but it returns
+  a string).
+
+- xmlrpclib.MultiCall has been added.
+
+- poplib.POP3_SSL has been added.
+
+- tmpfile.mkstemp now returns an absolute path even if dir is relative.
+
+- urlparse is RFC 2396 compliant.
+
+- The fieldnames argument to the csv module's DictReader constructor is now
+  optional.  If omitted, the first row of the file will be used as the
+  list of fieldnames.
+
+- encodings.bz2_codec was added for access to bz2 compression
+  using "a long string".encode('bz2')
+
+- Various improvements to unittest.py, realigned with PyUnit CVS.
+
+- dircache now passes exceptions to the caller, instead of returning
+  empty lists.
+
+- The bsddb module and dbhash module now support the iterator and
+  mapping protocols which make them more substitutable for dictionaries
+  and shelves.
+
+- The csv module's DictReader and DictWriter classes now accept keyword
+  arguments.  This was an omission in the initial implementation.
+
+- The email package handles some RFC 2231 parameters with missing
+  CHARSET fields better.  It also includes a patch to parameter
+  parsing when semicolons appear inside quotes.
+
+- sets.py now runs under Py2.2.  In addition, the argument restrictions
+  for most set methods (but not the operators) have been relaxed to
+  allow any iterable.
+
+- _strptime.py now has a behind-the-scenes caching mechanism for the most
+  recent TimeRE instance used along with the last five unique directive
+  patterns.  The overall module was also made more thread-safe.
+
+- random.cunifvariate() and random.stdgamma() were deprecated in Py2.3
+  and removed in Py2.4.
+
+- Bug #823328: urllib2.py's HTTP Digest Auth support works again.
+
+- Patch #873597: CJK codecs are imported into rank of default codecs.
+
+Tools/Demos
+-----------
+
+- A hotshotmain script was added to the Tools/scripts directory that
+  makes it easy to run a script under control of the hotshot profiler.
+
+- The db2pickle and pickle2db scripts can now dump/load gdbm files.
+
+- The file order on the command line of the pickle2db script was reversed.
+  It is now [ picklefile ] dbfile.  This provides better symmetry with
+  db2pickle.  The file arguments to both scripts are now source followed by
+  destination in situations where both files are given.
+
+- The pydoc script will display a link to the module documentation for
+  modules determined to be part of the core distribution.  The documentation
+  base directory defaults to http://www.python.org/doc/current/lib/ but can
+  be changed by setting the PYTHONDOCS environment variable.
+
+- texcheck.py now detects double word errors.
+
+- md5sum.py mistakenly opened input files in text mode by default, a
+  silent and dangerous change from previous releases.  It once again
+  opens input files in binary mode by default.  The -t and -b flags
+  remain for compatibility with the 2.3 release, but -b is the default
+  now.
+
+- py-electric-colon now works when pending-delete/delete-selection mode is
+  in effect
+
+- py-help-at-point is no longer bound to the F1 key - it's still bound to
+  C-c C-h
+
+- Pynche was fixed to not crash when there is no ~/.pynche file and no
+  -d option was given.
+
+Build
+-----
+
+- Bug #978645: Modules/getpath.c now builds properly in --disable-framework
+  build under OS X.
+
+- Profiling using gprof is now available if Python is configured with
+  --enable-profiling.
+
+- Profiling the VM using the Pentium TSC is now possible if Python
+  is configured --with-tsc.
+
+- In order to find libraries, setup.py now also looks in /lib64, for use
+  on AMD64.
+
+- Bug #934635: Fixed a bug where the configure script couldn't detect
+  getaddrinfo() properly if the KAME stack had SCTP support.
+
+- Support for missing ANSI C header files (limits.h, stddef.h, etc) was
+  removed.
+
+- Systems requiring the D4, D6 or D7 variants of pthreads are no longer
+  supported (see PEP 11).
+
+- Universal newline support can no longer be disabled (see PEP 11).
+
+- Support for DGUX, SunOS 4, IRIX 4 and Minix was removed (see PEP 11).
+
+- Support for systems requiring --with-dl-dld or --with-sgi-dl was removed
+  (see PEP 11).
+
+- Tests for sizeof(char) were removed since ANSI C mandates that
+  sizeof(char) must be 1.
+
+C API
+-----
+
+- Thanks to Anthony Tuininga, the datetime module now supplies a C API
+  containing type-check macros and constructors.  See new docs in the
+  Python/C API Reference Manual for details.
+
+- Private function _PyTime_DoubleToTimet added, to convert a Python
+  timestamp (C double) to platform time_t with some out-of-bounds
+  checking.  Declared in new header file timefuncs.h.  It would be
+  good to expose some other internal timemodule.c functions there.
+
+- New public functions PyEval_EvaluateFrame and PyGen_New to expose
+  generator objects.
+
+- New public functions Py_IncRef() and Py_DecRef(), exposing the
+  functionality of the Py_XINCREF() and Py_XDECREF macros. Useful for
+  runtime dynamic embedding of Python.  See patch #938302, by Bob
+  Ippolito.
+
+- Added a new macro, PySequence_Fast_ITEMS, which retrieves a fast sequence's
+  underlying array of PyObject pointers.  Useful for high speed looping.
+
+- Created a new method flag, METH_COEXIST, which causes a method to be loaded
+  even if already defined by a slot wrapper.  This allows a __contains__
+  method, for example, to co-exist with a defined sq_contains slot.  This
+  is helpful because the PyCFunction can take advantage of optimized calls
+  whenever METH_O or METH_NOARGS flags are defined.
+
+- Added a new function, PyDict_Contains(d, k) which is like
+  PySequence_Contains() but is specific to dictionaries and executes
+  about 10% faster.
+
+- Added three new macros: Py_RETURN_NONE, Py_RETURN_TRUE, and Py_RETURN_FALSE.
+  Each return the singleton they mention after Py_INCREF()ing them.
+
+- Added a new function, PyTuple_Pack(n, ...) for constructing tuples from a
+  variable length argument list of Python objects without having to invoke
+  the more complex machinery of Py_BuildValue().  PyTuple_Pack(3, a, b, c)
+  is equivalent to Py_BuildValue("(OOO)", a, b, c).
+
+Windows
+-------
+
+- The _winreg module could segfault when reading very large registry
+  values, due to unchecked alloca() calls (SF bug 851056).  The fix is
+  uses either PyMem_Malloc(n) or PyString_FromStringAndSize(NULL, n),
+  as appropriate, followed by a size check.
+
+- file.truncate() could misbehave if the file was open for update
+  (modes r+, rb+, w+, wb+), and the most recent file operation before
+  the truncate() call was an input operation.  SF bug 801631.
+
+
+What's New in Python 2.3 final?
+===============================
+
+*Release date: 29-Jul-2003*
+
+IDLE
+----
+
+- Bug 778400:  IDLE hangs when selecting "Edit with IDLE" from explorer.
+  This was unique to Windows, and was fixed by adding an -n switch to
+  the command the Windows installer creates to execute "Edit with IDLE"
+  context-menu actions.
+
+- IDLE displays a new message upon startup:  some "personal firewall"
+  kinds of programs (for example, ZoneAlarm) open a dialog of their
+  own when any program opens a socket.  IDLE does use sockets, talking
+  on the computer's internal loopback interface.  This connection is not
+  visible on any external interface and no data is sent to or received
+  from the Internet.  So, if you get such a dialog when opening IDLE,
+  asking whether to let pythonw.exe talk to address 127.0.0.1, say yes,
+  and rest assured no communication external to your machine is taking
+  place.  If you don't allow it, IDLE won't be able to start.
+
+
+What's New in Python 2.3 release candidate 2?
+=============================================
+
+*Release date: 24-Jul-2003*
+
+Core and builtins
+-----------------
+
+- It is now possible to import from zipfiles containing additional
+  data bytes before the zip compatible archive.  Zipfiles containing a
+  comment at the end are still unsupported.
+
+Extension modules
+-----------------
+
+- A longstanding bug in the parser module's initialization could cause
+  fatal internal refcount confusion when the module got initialized more
+  than once.  This has been fixed.
+
+- Fixed memory leak in pyexpat; using the parser's ParseFile() method
+  with open files that aren't instances of the standard file type
+  caused an instance of the bound .read() method to be leaked on every
+  call.
+
+- Fixed some leaks in the locale module.
+
+Library
+-------
+
+- Lib/encodings/rot_13.py when used as a script, now more properly
+  uses the first Python interpreter on your path.
+
+- Removed caching of TimeRE (and thus LocaleTime) in _strptime.py to
+  fix a locale related bug in the test suite.  Although another patch
+  was needed to actually fix the problem, the cache code was not
+  restored.
+
+IDLE
+----
+
+- Calltips patches.
+
+Build
+-----
+
+- For MacOSX, added -mno-fused-madd to BASECFLAGS to fix test_coercion
+  on Panther (OSX 10.3).
+
+C API
+-----
+
+Windows
+-------
+
+- The tempfile module could do insane imports on Windows if PYTHONCASEOK
+  was set, making temp file creation impossible.  Repaired.
+
+- Add a patch to workaround pthread_sigmask() bugs in Cygwin.
+
+Mac
+---
+
+- Various fixes to pimp.
+
+- Scripts runs with pythonw no longer had full window manager access.
+
+- Don't force boot-disk-only install, for reasons unknown it causes
+  more problems than it solves.
+
+
+What's New in Python 2.3 release candidate 1?
+=============================================
+
+*Release date: 18-Jul-2003*
+
+Core and builtins
+-----------------
+
+- The new function sys.getcheckinterval() returns the last value set
+  by sys.setcheckinterval().
+
+- Several bugs in the symbol table phase of the compiler have been
+  fixed.  Errors could be lost and compilation could fail without
+  reporting an error.  SF patch 763201.
+
+- The interpreter is now more robust about importing the warnings
+  module.  In an executable generated by freeze or similar programs,
+  earlier versions of 2.3 would fail if the warnings module could
+  not be found on the file system.  Fixes SF bug 771097.
+
+- A warning about assignments to module attributes that shadow
+  builtins, present in earlier releases of 2.3, has been removed.
+
+- It is not possible to create subclasses of builtin types like str
+  and tuple that define an itemsize.  Earlier releases of Python 2.3
+  allowed this by mistake, leading to crashes and other problems.
+
+- The thread_id is now initialized to 0 in a non-thread build.  SF bug
+  770247.
+
+- SF bug 762891: "del p[key]" on proxy object no longer raises SystemError.
+
+Extension modules
+-----------------
+
+- weakref.proxy() can now handle "del obj[i]" for proxy objects
+  defining __delitem__.  Formerly, it generated a SystemError.
+
+- SSL no longer crashes the interpreter when the remote side disconnects.
+
+- On Unix the mmap module can again be used to map device files.
+
+- time.strptime now exclusively uses the Python implementation
+  contained within the _strptime module.
+
+- The print slot of weakref proxy objects was removed, because it was
+  not consistent with the object's repr slot.
+
+- The mmap module only checks file size for regular files, not
+  character or block devices.  SF patch 708374.
+
+- The cPickle Pickler garbage collection support was fixed to traverse
+  the find_class attribute, if present.
+
+- There are several fixes for the bsddb3 wrapper module.
+
+  bsddb3 no longer crashes if an environment is closed before a cursor
+  (SF bug 763298).
+
+  The DB and DBEnv set_get_returns_none function was extended to take
+  a level instead of a boolean flag.  The new level 2 means that in
+  addition, cursor.set()/.get() methods return None instead of raising
+  an exception.
+
+  A typo was fixed in DBCursor.join_item(), preventing a crash.
+
+Library
+-------
+
+- distutils now supports MSVC 7.1
+
+- doctest now examines all docstrings by default.  Previously, it would
+  skip over functions with private names (as indicated by the underscore
+  naming convention).  The old default created too much of a risk that
+  user tests were being skipped inadvertently.  Note, this change could
+  break code in the unlikely case that someone had intentionally put
+  failing tests in the docstrings of private functions.  The breakage
+  is easily fixable by specifying the old behavior when calling testmod()
+  or Tester().
+
+- There were several fixes to the way dumbdbms are closed.  It's vital
+  that a dumbdbm database be closed properly, else the on-disk data
+  and directory files can be left in mutually inconsistent states.
+  dumbdbm.py's _Database.__del__() method attempted to close the
+  database properly, but a shutdown race in _Database._commit() could
+  prevent this from working, so that a program trusting __del__() to
+  get the on-disk files in synch could be badly surprised.  The race
+  has been repaired.  A sync() method was also added so that shelve
+  can guarantee data is written to disk.
+
+  The close() method can now be called more than once without complaint.
+
+- The classes in threading.py are now new-style classes.  That they
+  weren't before was an oversight.
+
+- The urllib2 digest authentication handlers now define the correct
+  auth_header.  The earlier versions would fail at runtime.
+
+- SF bug 763023: fix uncaught ZeroDivisionError in difflib ratio methods
+  when there are no lines.
+
+- SF bug 763637: fix exception in Tkinter with after_cancel
+  which could occur with Tk 8.4
+
+- SF bug 770601: CGIHTTPServer.py now passes the entire environment
+  to child processes.
+
+- SF bug 765238: add filter to fnmatch's __all__.
+
+- SF bug 748201: make time.strptime() error messages more helpful.
+
+- SF patch 764470: Do not dump the args attribute of a Fault object in
+  xmlrpclib.
+
+- SF patch 549151: urllib and urllib2 now redirect POSTs on 301
+  responses.
+
+- SF patch 766650: The whichdb module was fixed to recognize dbm files
+  generated by gdbm on OS/2 EMX.
+
+- SF bugs 763047 and 763052: fixes bug of timezone value being left as
+  -1 when ``time.tzname[0] == time.tzname[1] and not time.daylight``
+  is true when it should only when time.daylight is true.
+
+- SF bug 764548: re now allows subclasses of str and unicode to be
+  used as patterns.
+
+- SF bug 763637: In Tkinter, change after_cancel() to handle tuples
+  of varying sizes.  Tk 8.4 returns a different number of values
+  than Tk 8.3.
+
+- SF bug 763023: difflib.ratio() did not catch zero division.
+
+- The Queue module now has an __all__ attribute.
+
+Tools/Demos
+-----------
+
+- See Lib/idlelib/NEWS.txt for IDLE news.
+
+- SF bug 753592: webchecker/wsgui now handles user supplied directories.
+
+- The trace.py script has been removed.  It is now in the standard library.
+
+Build
+-----
+
+- Python now compiles with -fno-strict-aliasing if possible (SF bug 766696).
+
+- The socket module compiles on IRIX 6.5.10.
+
+- An irix64 system is treated the same way as an irix6 system (SF
+  patch 764560).
+
+- Several definitions were missing on FreeBSD 5.x unless the
+  __BSD_VISIBLE symbol was defined.  configure now defines it as
+  needed.
+
+C API
+-----
+
+- Unicode objects now support mbcs as a built-in encoding, so the C
+  API can use it without deferring to the encodings package.
+
+Windows
+-------
+
+- The Windows implementation of PyThread_start_new_thread() never
+  checked error returns from Windows functions correctly.  As a result,
+  it could claim to start a new thread even when the Microsoft
+  _beginthread() function failed (due to "too many threads" -- this is
+  on the order of thousands when it happens).  In these cases, the
+  Python exception ::
+
+      thread.error: can't start new thread
+
+  is raised now.
+
+- SF bug 766669: Prevent a GPF on interpreter exit when sockets are in
+  use.  The interpreter now calls WSACleanup() from Py_Finalize()
+  instead of from DLL teardown.
+
+Mac
+---
+
+- Bundlebuilder now inherits default values in the right way.  It was
+  previously possible for app bundles to get a type of "BNDL" instead
+  of "APPL."  Other improvements include, a --build-id option to
+  specify the CFBundleIdentifier and using the --python option to set
+  the executable in the bundle.
+
+- Fixed two bugs in MacOSX framework handling.
+
+- pythonw did not allow user interaction in 2.3rc1, this has been fixed.
+
+- Python is now compiled with -mno-fused-madd, making all tests pass
+  on Panther.
+
+What's New in Python 2.3 beta 2?
+================================
+
+*Release date: 29-Jun-2003*
+
+Core and builtins
+-----------------
+
+- A program can now set the environment variable PYTHONINSPECT to some
+  string value in Python, and cause the interpreter to enter the
+  interactive prompt at program exit, as if Python had been invoked
+  with the -i option.
+
+- list.index() now accepts optional start and stop arguments.  Similar
+  changes were made to UserList.index(). SF feature request 754014.
+
+- SF patch 751998 fixes an unwanted side effect of the previous fix
+  for SF bug 742860 (the next item).
+
+- SF bug 742860: "WeakKeyDictionary __delitem__ uses iterkeys".  This
+  wasn't threadsafe, was very inefficient (expected time O(len(dict))
+  instead of O(1)), and could raise a spurious RuntimeError if another
+  thread mutated the dict during __delitem__, or if a comparison function
+  mutated it.  It also neglected to raise KeyError when the key wasn't
+  present; didn't raise TypeError when the key wasn't of a weakly
+  referencable type; and broke various more-or-less obscure dict
+  invariants by using a sequence of equality comparisons over the whole
+  set of dict keys instead of computing the key's hash code to narrow
+  the search to those keys with the same hash code.  All of these are
+  considered to be bugs.  A new implementation of __delitem__ repairs all
+  that, but note that fixing these bugs may change visible behavior in
+  code relying (whether intentionally or accidentally) on old behavior.
+
+- SF bug 734869: Fixed a compiler bug that caused a fatal error when
+  compiling a list comprehension that contained another list comprehension
+  embedded in a lambda expression.
+
+- SF bug 705231:  builtin pow() no longer lets the platform C pow()
+  raise -1.0 to integer powers, because (at least) glibc gets it wrong
+  in some cases.  The result should be -1.0 if the power is odd and 1.0
+  if the power is even, and any float with a sufficiently large exponent
+  is (mathematically) an exact even integer.
+
+- SF bug 759227: A new-style class that implements __nonzero__() must
+  return a bool or int (but not an int subclass) from that method.  This
+  matches the restriction on classic classes.
+
+- The encoding attribute has been added for file objects, and set to
+  the terminal encoding on Unix and Windows.
+
+- The softspace attribute of file objects became read-only by oversight.
+  It's writable again.
+
+- Reverted a 2.3 beta 1 change to iterators for subclasses of list and
+  tuple.  By default, the iterators now access data elements directly
+  instead of going through __getitem__.  If __getitem__ access is
+  preferred, then __iter__ can be overridden.
+
+- SF bug 735247: The staticmethod and super types participate in
+  garbage collection. Before this change, it was possible for leaks to
+  occur in functions with non-global free variables that used these types.
+
+Extension modules
+-----------------
+
+- the socket module has a new exception, socket.timeout, to allow
+  timeouts to be handled separately from other socket errors.
+
+- SF bug 751276: cPickle has fixed to propagate exceptions raised in
+  user code.  In earlier versions, cPickle caught and ignored any
+  exception when it performed operations that it expected to raise
+  specific exceptions like AttributeError.
+
+- cPickle Pickler and Unpickler objects now participate in garbage
+  collection.
+
+- mimetools.choose_boundary() could return duplicate strings at times,
+  especially likely on Windows.  The strings returned are now guaranteed
+  unique within a single program run.
+
+- thread.interrupt_main() raises KeyboardInterrupt in the main thread.
+  dummy_thread has also been modified to try to simulate the behavior.
+
+- array.array.insert() now treats negative indices as being relative
+  to the end of the array, just like list.insert() does. (SF bug #739313)
+
+- The datetime module classes datetime, time, and timedelta are now
+  properly subclassable.
+
+- _tkinter.{get|set}busywaitinterval was added.
+
+- itertools.islice() now accepts stop=None as documented.
+  Fixes SF bug #730685.
+
+- the bsddb185 module is built in one restricted instance -
+  /usr/include/db.h exists and defines HASHVERSION to be 2.  This is true
+  for many BSD-derived systems.
+
+
+Library
+-------
+
+- Some happy doctest extensions from Jim Fulton have been added to
+  doctest.py.  These are already being used in Zope3.  The two
+  primary ones:
+
+  doctest.debug(module, name) extracts the doctests from the named object
+  in the given module, puts them in a temp file, and starts pdb running
+  on that file.  This is great when a doctest fails.
+
+  doctest.DocTestSuite(module=None) returns a synthesized unittest
+  TestSuite instance, to be run by the unittest framework, which
+  runs all the doctests in the module.  This allows writing tests in
+  doctest style (which can be clearer and shorter than writing tests
+  in unittest style), without losing unittest's powerful testing
+  framework features (which doctest lacks).
+
+- For compatibility with doctests created before 2.3, if an expected
+  output block consists solely of "1" and the actual output block
+  consists solely of "True", it's accepted as a match; similarly
+  for "0" and "False".  This is quite un-doctest-like, but is practical.
+  The behavior can be disabled by passing the new doctest module
+  constant DONT_ACCEPT_TRUE_FOR_1 to the new optionflags optional
+  argument.
+
+- ZipFile.testzip() now only traps BadZipfile exceptions.  Previously,
+  a bare except caught to much and reported all errors as a problem
+  in the archive.
+
+- The logging module now has a new function, makeLogRecord() making
+  LogHandler easier to interact with DatagramHandler and SocketHandler.
+
+- The cgitb module has been extended to support plain text display (SF patch
+  569574).
+
+- A brand new version of IDLE (from the IDLEfork project at
+  SourceForge) is now included as Lib/idlelib.  The old Tools/idle is
+  no more.
+
+- Added a new module: trace (documentation missing).  This module used
+  to be distributed in Tools/scripts.  It uses sys.settrace() to trace
+  code execution -- either function calls or individual lines.  It can
+  generate tracing output during execution or a post-mortem report of
+  code coverage.
+
+- The threading module has new functions settrace() and setprofile()
+  that cooperate with the functions of the same name in the sys
+  module.  A function registered with the threading module will
+  be used for all threads it creates.  The new trace module uses this
+  to provide tracing for code running in threads.
+
+- copy.py: applied SF patch 707900, fixing bug 702858, by Steven
+  Taschuk.  Copying a new-style class that had a reference to itself
+  didn't work.  (The same thing worked fine for old-style classes.)
+  Builtin functions are now treated as atomic, fixing bug #746304.
+
+- difflib.py has two new functions:  context_diff() and unified_diff().
+
+- More fixes to urllib (SF 549151): (a) When redirecting, always use
+  GET.  This is common practice and more-or-less sanctioned by the
+  HTTP standard. (b) Add a handler for 307 redirection, which becomes
+  an error for POST, but a regular redirect for GET and HEAD
+
+- Added optional 'onerror' argument to os.walk(), to control error
+  handling.
+
+- inspect.is{method|data}descriptor was added, to allow pydoc display
+  __doc__ of data descriptors.
+
+- Fixed socket speed loss caused by use of the _socketobject wrapper class
+  in socket.py.
+
+- timeit.py now checks the current directory for imports.
+
+- urllib2.py now knows how to order proxy classes, so the user doesn't
+  have to insert it in front of other classes, nor do dirty tricks like
+  inserting a "dummy" HTTPHandler after a ProxyHandler when building an
+  opener with proxy support.
+
+- Iterators have been added for dbm keys.
+
+- random.Random objects can now be pickled.
+
+Tools/Demos
+-----------
+
+- pydoc now offers help on keywords and topics.
+
+- Tools/idle is gone; long live Lib/idlelib.
+
+- diff.py prints file diffs in context, unified, or ndiff formats,
+  providing a command line interface to difflib.py.
+
+- texcheck.py is a new script for making a rough validation of Python LaTeX
+  files.
+
+Build
+-----
+
+- Setting DESTDIR during 'make install' now allows specifying a
+  different root directory.
+
+C API
+-----
+
+- PyType_Ready():  If a type declares that it participates in gc
+  (Py_TPFLAGS_HAVE_GC), and its base class does not, and its base class's
+  tp_free slot is the default _PyObject_Del, and type does not define
+  a tp_free slot itself, _PyObject_GC_Del is assigned to type->tp_free.
+  Previously _PyObject_Del was inherited, which could at best lead to a
+  segfault.  In addition, if even after this magic the type's tp_free
+  slot is _PyObject_Del or NULL, and the type is a base type
+  (Py_TPFLAGS_BASETYPE), TypeError is raised:  since the type is a base
+  type, its dealloc function must call type->tp_free, and since the type
+  is gc'able, tp_free must not be NULL or _PyObject_Del.
+
+- PyThreadState_SetAsyncExc(): A new API (deliberately accessible only
+  from C) to interrupt a thread by sending it an exception.  It is
+  intentional that you have to write your own C extension to call it
+  from Python.
+
+
+New platforms
+-------------
+
+None this time.
+
+Tests
+-----
+
+- test_imp rewritten so that it doesn't raise RuntimeError if run as a
+  side effect of being imported ("import test.autotest").
+
+Windows
+-------
+
+- The Windows installer ships with Tcl/Tk 8.4.3 (upgraded from 8.4.1).
+
+- The installer always suggested that Python be installed on the C:
+  drive, due to a hardcoded "C:" generated by the Wise installation
+  wizard.  People with machines where C: is not the system drive
+  usually want Python installed on whichever drive is their system drive
+  instead.  We removed the hardcoded "C:", and two testers on machines
+  where C: is not the system drive report that the installer now
+  suggests their system drive.  Note that you can always select the
+  directory you want in the "Select Destination Directory" dialog --
+  that's what it's for.
+
+Mac
+---
+
+- There's a new module called "autoGIL", which offers a mechanism to
+  automatically release the Global Interpreter Lock when an event loop
+  goes to sleep, allowing other threads to run. It's currently only
+  supported on OSX, in the Mach-O version.
+- The OSA modules now allow direct access to properties of the
+  toplevel application class (in AppleScript terminology).
+- The Package Manager can now update itself.
+
+SourceForge Bugs and Patches Applied
+------------------------------------
+
+430160, 471893, 501716, 542562, 549151, 569574, 595837, 596434,
+598163, 604210, 604716, 610332, 612627, 614770, 620190, 621891,
+622042, 639139, 640236, 644345, 649742, 649742, 658233, 660022,
+661318, 661676, 662807, 662923, 666219, 672855, 678325, 682347,
+683486, 684981, 685773, 686254, 692776, 692959, 693094, 696777,
+697989, 700827, 703666, 708495, 708604, 708901, 710733, 711902,
+713722, 715782, 718286, 719359, 719367, 723136, 723831, 723962,
+724588, 724767, 724767, 725942, 726150, 726446, 726869, 727051,
+727719, 727719, 727805, 728277, 728563, 728656, 729096, 729103,
+729293, 729297, 729300, 729317, 729395, 729622, 729817, 730170,
+730296, 730594, 730685, 730826, 730963, 731209, 731403, 731504,
+731514, 731626, 731635, 731643, 731644, 731644, 731689, 732124,
+732143, 732234, 732284, 732284, 732479, 732761, 732783, 732951,
+733667, 733781, 734118, 734231, 734869, 735051, 735293, 735527,
+735613, 735694, 736962, 736962, 737970, 738066, 739313, 740055,
+740234, 740301, 741806, 742126, 742741, 742860, 742860, 742911,
+744041, 744104, 744238, 744687, 744877, 745055, 745478, 745525,
+745620, 746012, 746304, 746366, 746801, 746953, 747348, 747667,
+747954, 748846, 748849, 748973, 748975, 749191, 749210, 749759,
+749831, 749911, 750008, 750092, 750542, 750595, 751038, 751107,
+751276, 751451, 751916, 751941, 751956, 751998, 752671, 753451,
+753602, 753617, 753845, 753925, 754014, 754340, 754447, 755031,
+755087, 755147, 755245, 755683, 755987, 756032, 756996, 757058,
+757229, 757818, 757821, 757822, 758112, 758910, 759227, 759889,
+760257, 760703, 760792, 761104, 761337, 761519, 761830, 762455
+
+
+What's New in Python 2.3 beta 1?
+================================
+
+*Release date: 25-Apr-2003*
+
+Core and builtins
+-----------------
+
+- New format codes B, H, I, k and K have been implemented for
+  PyArg_ParseTuple and PyBuild_Value.
+
+- New builtin function sum(seq, start=0) returns the sum of all the
+  items in iterable object seq, plus start (items are normally numbers,
+  and cannot be strings).
+
+- bool() called without arguments now returns False rather than
+  raising an exception.  This is consistent with calling the
+  constructors for the other builtin types -- called without argument
+  they all return the false value of that type.  (SF patch #724135)
+
+- In support of PEP 269 (making the pgen parser generator accessible
+  from Python), some changes to the pgen code structure were made; a
+  few files that used to be linked only with pgen are now linked with
+  Python itself.
+
+- The repr() of a weakref object now shows the __name__ attribute of
+  the referenced object, if it has one.
+
+- super() no longer ignores data descriptors, except __class__.  See
+  the thread started at
+  http://mail.python.org/pipermail/python-dev/2003-April/034338.html
+
+- list.insert(i, x) now interprets negative i as it would be
+  interpreted by slicing, so negative values count from the end of the
+  list.  This was the only place where such an interpretation was not
+  placed on a list index.
+
+- range() now works even if the arguments are longs with magnitude
+  larger than sys.maxint, as long as the total length of the sequence
+  fits.  E.g., range(2**100, 2**101, 2**100) is the following list:
+  [1267650600228229401496703205376L].  (SF patch #707427.)
+
+- Some horridly obscure problems were fixed involving interaction
+  between garbage collection and old-style classes with "ambitious"
+  getattr hooks.  If an old-style instance didn't have a __del__ method,
+  but did have a __getattr__ hook, and the instance became reachable
+  only from an unreachable cycle, and the hook resurrected or deleted
+  unreachable objects when asked to resolve "__del__", anything up to
+  a segfault could happen.  That's been repaired.
+
+- dict.pop now takes an optional argument specifying a default
+  value to return if the key is not in the dict.  If a default is not
+  given and the key is not found, a KeyError will still be raised.
+  Parallel changes were made to UserDict.UserDict and UserDict.DictMixin.
+  [SF patch #693753] (contributed by Michael Stone.)
+
+- sys.getfilesystemencoding() was added to expose
+  Py_FileSystemDefaultEncoding.
+
+- New function sys.exc_clear() clears the current exception.  This is
+  rarely needed, but can sometimes be useful to release objects
+  referenced by the traceback held in sys.exc_info()[2].  (SF patch
+  #693195.)
+
+- On 64-bit systems, a dictionary could contain duplicate long/int keys
+  if the key value was larger than 2**32.  See SF bug #689659.
+
+- Fixed SF bug #663074. The codec system was using global static
+  variables to store internal data. As a result, any attempts to use the
+  unicode system with multiple active interpreters, or successive
+  interpreter executions, would fail.
+
+- "%c" % u"a" now returns a unicode string instead of raising a
+  TypeError. u"%c" % 0xffffffff now raises a OverflowError instead
+  of a ValueError to be consistent with "%c" % 256. See SF patch #710127.
+
+Extension modules
+-----------------
+
+- The socket module now provides the functions inet_pton and inet_ntop
+  for converting between string and packed representation of IP
+  addresses.  There is also a new module variable, has_ipv6, which is
+  True iff the current Python has IPv6 support.  See SF patch #658327.
+
+- Tkinter wrappers around Tcl variables now pass objects directly
+  to Tcl, instead of first converting them to strings.
+
+- The .*? pattern in the re module is now special-cased to avoid the
+  recursion limit.  (SF patch #720991 -- many thanks to Gary Herron
+  and Greg Chapman.)
+
+- New function sys.call_tracing() allows pdb to debug code
+  recursively.
+
+- New function gc.get_referents(obj) returns a list of objects
+  directly referenced by obj.  In effect, it exposes what the object's
+  tp_traverse slot does, and can be helpful when debugging memory
+  leaks.
+
+- The iconv module has been removed from this release.
+
+- The platform-independent routines for packing floats in IEEE formats
+  (struct.pack's <f, >f, <d, and >d codes; pickle and cPickle's protocol 1
+  pickling of floats) ignored that rounding can cause a carry to
+  propagate.  The worst consequence was that, in rare cases, <f and >f
+  could produce strings that, when unpacked again, were a factor of 2
+  away from the original float.  This has been fixed.  See SF bug
+  #705836.
+
+- New function time.tzset() provides access to the C library tzset()
+  function, if supported.  (SF patch #675422.)
+
+- Using createfilehandler, deletefilehandler, createtimerhandler functions
+  on Tkinter.tkinter (_tkinter module) no longer crashes the interpreter.
+  See SF bug #692416.
+
+- Modified the fcntl.ioctl() function to allow modification of a passed
+  mutable buffer (for details see the reference documentation).
+
+- Made user requested changes to the itertools module.
+  Subsumed the times() function into repeat().
+  Added chain() and cycle().
+
+- The rotor module is now deprecated; the encryption algorithm it uses
+  is not believed to be secure, and including crypto code with Python
+  has implications for exporting and importing it in various countries.
+
+- The socket module now always uses the _socketobject wrapper class, even on
+  platforms which have dup(2).  The makefile() method is built directly
+  on top of the socket without duplicating the file descriptor, allowing
+  timeouts to work properly.
+
+Library
+-------
+
+- New generator function os.walk() is an easy-to-use alternative to
+  os.path.walk().  See os module docs for details.  os.path.walk()
+  isn't deprecated at this time, but may become deprecated in a
+  future release.
+
+- Added new module "platform" which provides a wide range of tools
+  for querying platform dependent features.
+
+- netrc now allows ASCII punctuation characters in passwords.
+
+- shelve now supports the optional writeback argument, and exposes
+  pickle protocol versions.
+
+- Several methods of nntplib.NNTP have grown an optional file argument
+  which specifies a file where to divert the command's output
+  (already supported by the body() method).  (SF patch #720468)
+
+- The self-documenting XML server library DocXMLRPCServer was added.
+
+- Support for internationalized domain names has been added through
+  the 'idna' and 'punycode' encodings, the 'stringprep' module, the
+  'mkstringprep' tool, and enhancements to the socket and httplib
+  modules.
+
+- htmlentitydefs has two new dictionaries: name2codepoint maps
+  HTML entity names to Unicode codepoints (as integers).
+  codepoint2name is the reverse mapping. See SF patch #722017.
+
+- pdb has a new command, "debug", which lets you step through
+  arbitrary code from the debugger's (pdb) prompt.
+
+- unittest.failUnlessEqual and its equivalent unittest.assertEqual now
+  return 'not a == b' rather than 'a != b'.  This gives the desired
+  result for classes that define __eq__ without defining __ne__.
+
+- sgmllib now supports SGML marked sections, in particular the
+  MS Office extensions.
+
+- The urllib module now offers support for the iterator protocol.
+  SF patch 698520 contributed by Brett Cannon.
+
+- New module timeit provides a simple framework for timing the
+  execution speed of expressions and statements.
+
+- sets.Set objects now support mixed-type __eq__ and __ne__, instead
+  of raising TypeError.  If x is a Set object and y is a non-Set object,
+  x == y is False, and x != y is True.  This is akin to the change made
+  for mixed-type comparisons of datetime objects in 2.3a2; more info
+  about the rationale is in the NEWS entry for that.  See also SF bug
+  report <http://www.python.org/sf/693121>.
+
+- On Unix platforms, if os.listdir() is called with a Unicode argument,
+  it now returns Unicode strings.  (This behavior was added earlier
+  to the Windows NT/2k/XP version of os.listdir().)
+
+- Distutils: both 'py_modules' and 'packages' keywords can now be specified
+  in core.setup().  Previously you could supply one or the other, but
+  not both of them.  (SF patch #695090 from Bernhard Herzog)
+
+- New csv package makes it easy to read/write CSV files.
+
+- Module shlex has been extended to allow posix-like shell parsings,
+  including a split() function for easy spliting of quoted strings and
+  commands. An iterator interface was also implemented.
+
+Tools/Demos
+-----------
+
+- New script combinerefs.py helps analyze new PYTHONDUMPREFS output.
+  See the module docstring for details.
+
+Build
+-----
+
+- Fix problem building on OSF1 because the compiler only accepted
+  preprocessor directives that start in column 1.  (SF bug #691793.)
+
+C API
+-----
+
+- Added PyGC_Collect(), equivalent to calling gc.collect().
+
+- PyThreadState_GetDict() was changed not to raise an exception or
+  issue a fatal error when no current thread state is available.  This
+  makes it possible to print dictionaries when no thread is active.
+
+- LONG_LONG was renamed to PY_LONG_LONG.  Extensions that use this and
+  need compatibility with previous versions can use this:
+
+    #ifndef  PY_LONG_LONG
+    #define  PY_LONG_LONG  LONG_LONG
+    #endif
+
+- Added PyObject_SelfIter() to fill the tp_iter slot for the
+  typical case where the method returns its self argument.
+
+- The extended type structure used for heap types (new-style
+  classes defined by Python code using a class statement) is now
+  exported from object.h as PyHeapTypeObject.  (SF patch #696193.)
+
+New platforms
+-------------
+
+None this time.
+
+Tests
+-----
+
+- test_timeout now requires -u network to be passed to regrtest to run.
+  See SF bug #692988.
+
+Windows
+-------
+
+- os.fsync() now exists on Windows, and calls the Microsoft _commit()
+  function.
+
+- New function winsound.MessageBeep() wraps the Win32 API
+  MessageBeep().
+
+Mac
+---
+
+- os.listdir() now returns Unicode strings on MacOS X when called with
+  a Unicode argument. See the general news item under "Library".
+
+- A new method MacOS.WMAvailable() returns true if it is safe to access
+  the window manager, false otherwise.
+
+- EasyDialogs dialogs are now movable-modal, and if the application is
+  currently in the background they will ask to be moved to the foreground
+  before displaying.
+
+- OSA Scripting support has improved a lot, and gensuitemodule.py can now
+  be used by mere mortals. The documentation is now also more or less
+  complete.
+
+- The IDE (in a framework build) now includes introductory documentation
+  in Apple Help Viewer format.
+
+
+What's New in Python 2.3 alpha 2?
+=================================
+
+*Release date: 19-Feb-2003*
+
+Core and builtins
+-----------------
+
+- Negative positions returned from PEP 293 error callbacks are now
+  treated as being relative to the end of the input string. Positions
+  that are out of bounds raise an IndexError.
+
+- sys.path[0] (the directory from which the script is loaded) is now
+  turned into an absolute pathname, unless it is the empty string.
+  (SF patch #664376.)
+
+- Finally fixed the bug in compile() and exec where a string ending
+  with an indented code block but no newline would raise SyntaxError.
+  This would have been a four-line change in parsetok.c...  Except
+  codeop.py depends on this behavior, so a compilation flag had to be
+  invented that causes the tokenizer to revert to the old behavior;
+  this required extra changes to 2 .h files, 2 .c files, and 2 .py
+  files.  (Fixes SF bug #501622.)
+
+- If a new-style class defines neither __new__ nor __init__, its
+  constructor would ignore all arguments.  This is changed now: the
+  constructor refuses arguments in this case.  This might break code
+  that worked under Python 2.2.  The simplest fix is to add a no-op
+  __init__: ``def __init__(self, *args, **kw): pass``.
+
+- Through a bytecode optimizer bug (and I bet you didn't even know
+  Python *had* a bytecode optimizer :-), "unsigned" hex/oct constants
+  with a leading minus sign would come out with the wrong sign.
+  ("Unsigned" hex/oct constants are those with a face value in the
+  range sys.maxint+1 through sys.maxint*2+1, inclusive; these have
+  always been interpreted as negative numbers through sign folding.)
+  E.g. 0xffffffff is -1, and -(0xffffffff) is 1, but -0xffffffff would
+  come out as -4294967295.  This was the case in Python 2.2 through
+  2.2.2 and 2.3a1, and in Python 2.4 it will once again have that
+  value, but according to PEP 237 it really needs to be 1 now.  This
+  will be backported to Python 2.2.3 a well.  (SF #660455)
+
+- int(s, base) sometimes sign-folds hex and oct constants; it only
+  does this when base is 0 and s.strip() starts with a '0'.  When the
+  sign is actually folded, as in int("0xffffffff", 0) on a 32-bit
+  machine, which returns -1, a FutureWarning is now issued; in Python
+  2.4, this will return 4294967295L, as do int("+0xffffffff", 0) and
+  int("0xffffffff", 16) right now.  (PEP 347)
+
+- super(X, x): x may now be a proxy for an X instance, i.e.
+  issubclass(x.__class__, X) but not issubclass(type(x), X).
+
+- isinstance(x, X): if X is a new-style class, this is now equivalent
+  to issubclass(type(x), X) or issubclass(x.__class__, X).  Previously
+  only type(x) was tested.  (For classic classes this was already the
+  case.)
+
+- compile(), eval() and the exec statement now fully support source code
+  passed as unicode strings.
+
+- int subclasses can be initialized with longs if the value fits in an int.
+  See SF bug #683467.
+
+- long(string, base) takes time linear in len(string) when base is a power
+  of 2 now.  It used to take time quadratic in len(string).
+
+- filter returns now Unicode results for Unicode arguments.
+
+- raw_input can now return Unicode objects.
+
+- List objects' sort() method now accepts None as the comparison function.
+  Passing None is semantically identical to calling sort() with no
+  arguments.
+
+- Fixed crash when printing a subclass of str and __str__ returned self.
+  See SF bug #667147.
+
+- Fixed an invalid RuntimeWarning and an undetected error when trying
+  to convert a long integer into a float which couldn't fit.
+  See SF bug #676155.
+
+- Function objects now have a __module__ attribute that is bound to
+  the name of the module in which the function was defined.  This
+  applies for C functions and methods as well as functions and methods
+  defined in Python.  This attribute is used by pickle.whichmodule(),
+  which changes the behavior of whichmodule slightly.  In Python 2.2
+  whichmodule() returns "__main__" for functions that are not defined
+  at the top-level of a module (examples: methods, nested functions).
+  Now whichmodule() will return the proper module name.
+
+Extension modules
+-----------------
+
+- operator.isNumberType() now checks that the object has a nb_int or
+  nb_float slot, rather than simply checking whether it has a non-NULL
+  tp_as_number pointer.
+
+- The imp module now has ways to acquire and release the "import
+  lock": imp.acquire_lock() and imp.release_lock().  Note: this is a
+  reentrant lock, so releasing the lock only truly releases it when
+  this is the last release_lock() call.  You can check with
+  imp.lock_held().  (SF bug #580952 and patch #683257.)
+
+- Change to cPickle to match pickle.py (see below and PEP 307).
+
+- Fix some bugs in the parser module.  SF bug #678518.
+
+- Thanks to Scott David Daniels, a subtle bug in how the zlib
+  extension implemented flush() was fixed.  Scott also rewrote the
+  zlib test suite using the unittest module.  (SF bug #640230 and
+  patch #678531.)
+
+- Added an itertools module containing high speed, memory efficient
+  looping constructs inspired by tools from Haskell and SML.
+
+- The SSL module now handles sockets with a timeout set correctly (SF
+  patch #675750, fixing SF bug #675552).
+
+- os/posixmodule has grown the sysexits.h constants (EX_OK and friends).
+
+- Fixed broken threadstate swap in readline that could cause fatal
+  errors when a readline hook was being invoked while a background
+  thread was active.  (SF bugs #660476 and #513033.)
+
+- fcntl now exposes the strops.h I_* constants.
+
+- Fix a crash on Solaris that occurred when calling close() on
+  an mmap'ed file which was already closed.  (SF patch #665913)
+
+- Fixed several serious bugs in the zipimport implementation.
+
+- datetime changes:
+
+  The date class is now properly subclassable.  (SF bug #720908)
+
+  The datetime and datetimetz classes have been collapsed into a single
+  datetime class, and likewise the time and timetz classes into a single
+  time class.  Previously, a datetimetz object with tzinfo=None acted
+  exactly like a datetime object, and similarly for timetz.  This wasn't
+  enough of a difference to justify distinct classes, and life is simpler
+  now.
+
+  today() and now() now round system timestamps to the closest
+  microsecond <http://www.python.org/sf/661086>.  This repairs an
+  irritation most likely seen on Windows systems.
+
+  In dt.astimezone(tz), if tz.utcoffset(dt) returns a duration,
+  ValueError is raised if tz.dst(dt) returns None (2.3a1 treated it
+  as 0 instead, but a tzinfo subclass wishing to participate in
+  time zone conversion has to take a stand on whether it supports
+  DST; if you don't care about DST, then code dst() to return 0 minutes,
+  meaning that DST is never in effect).
+
+  The tzinfo methods utcoffset() and dst() must return a timedelta object
+  (or None) now.  In 2.3a1 they could also return an int or long, but that
+  was an unhelpfully redundant leftover from an earlier version wherein
+  they couldn't return a timedelta.  TOOWTDI.
+
+  The example tzinfo class for local time had a bug.  It was replaced
+  by a later example coded by Guido.
+
+  datetime.astimezone(tz) no longer raises an exception when the
+  input datetime has no UTC equivalent in tz.  For typical "hybrid" time
+  zones (a single tzinfo subclass modeling both standard and daylight
+  time), this case can arise one hour per year, at the hour daylight time
+  ends.  See new docs for details.  In short, the new behavior mimics
+  the local wall clock's behavior of repeating an hour in local time.
+
+  dt.astimezone() can no longer be used to convert between naive and aware
+  datetime objects.  If you merely want to attach, or remove, a tzinfo
+  object, without any conversion of date and time members, use
+  dt.replace(tzinfo=whatever) instead, where "whatever" is None or a
+  tzinfo subclass instance.
+
+  A new method tzinfo.fromutc(dt) can be overridden in tzinfo subclasses
+  to give complete control over how a UTC time is to be converted to
+  a local time.  The default astimezone() implementation calls fromutc()
+  as its last step, so a tzinfo subclass can affect that too by overriding
+  fromutc().  It's expected that the default fromutc() implementation will
+  be suitable as-is for "almost all" time zone subclasses, but the
+  creativity of political time zone fiddling appears unbounded -- fromutc()
+  allows the highly motivated to emulate any scheme expressible in Python.
+
+  datetime.now():  The optional tzinfo argument was undocumented (that's
+  repaired), and its name was changed to tz ("tzinfo" is overloaded enough
+  already).  With a tz argument, now(tz) used to return the local date
+  and time, and attach tz to it, without any conversion of date and time
+  members.  This was less than useful.  Now now(tz) returns the current
+  date and time as local time in tz's time zone, akin to ::
+
+      tz.fromutc(datetime.utcnow().replace(tzinfo=utc))
+
+  where "utc" is an instance of a tzinfo subclass modeling UTC.  Without
+  a tz argument, now() continues to return the current local date and time,
+  as a naive datetime object.
+
+  datetime.fromtimestamp():  Like datetime.now() above, this had less than
+  useful behavior when the optional tinzo argument was specified.  See
+  also SF bug report <http://www.python.org/sf/660872>.
+
+  date and datetime comparison:  In order to prevent comparison from
+  falling back to the default compare-object-addresses strategy, these
+  raised TypeError whenever they didn't understand the other object type.
+  They still do, except when the other object has a "timetuple" attribute,
+  in which case they return NotImplemented now.  This gives other
+  datetime objects (e.g., mxDateTime) a chance to intercept the
+  comparison.
+
+  date, time, datetime and timedelta comparison:  When the exception
+  for mixed-type comparisons in the last paragraph doesn't apply, if
+  the comparison is == then False is returned, and if the comparison is
+  != then True is returned.  Because dict lookup and the "in" operator
+  only invoke __eq__, this allows, for example, ::
+
+      if some_datetime in some_sequence:
+
+  and ::
+
+      some_dict[some_timedelta] = whatever
+
+  to work as expected, without raising TypeError just because the
+  sequence is heterogeneous, or the dict has mixed-type keys.  [This
+  seems like a good idea to implement for all mixed-type comparisons
+  that don't want to allow falling back to address comparison.]
+
+  The constructors building a datetime from a timestamp could raise
+  ValueError if the platform C localtime()/gmtime() inserted "leap
+  seconds".  Leap seconds are ignored now.  On such platforms, it's
+  possible to have timestamps that differ by a second, yet where
+  datetimes constructed from them are equal.
+
+  The pickle format of date, time and datetime objects has changed
+  completely.  The undocumented pickler and unpickler functions no
+  longer exist.  The undocumented __setstate__() and __getstate__()
+  methods no longer exist either.
+
+Library
+-------
+
+- The logging module was updated slightly; the WARN level was renamed
+  to WARNING, and the matching function/method warn() to warning().
+
+- The pickle and cPickle modules were updated with a new pickling
+  protocol (documented by pickletools.py, see below) and several
+  extensions to the pickle customization API (__reduce__, __setstate__
+  etc.).  The copy module now uses more of the pickle customization
+  API to copy objects that don't implement __copy__ or __deepcopy__.
+  See PEP 307 for details.
+
+- The distutils "register" command now uses http://www.python.org/pypi
+  as the default repository.  (See PEP 301.)
+
+- the platform dependent path related variables sep, altsep, extsep,
+  pathsep, curdir, pardir and defpath are now defined in the platform
+  dependent path modules (e.g. ntpath.py) rather than os.py, so these
+  variables are now available via os.path.  They continue to be
+  available from the os module.
+  (see <http://www.python.org/sf/680789>).
+
+- array.array was added to the types repr.py knows about (see
+  <http://www.python.org/sf/680789>).
+
+- The new pickletools.py contains lots of documentation about pickle
+  internals, and supplies some helpers for working with pickles, such as
+  a symbolic pickle disassembler.
+
+- Xmlrpclib.py now supports the builtin boolean type.
+
+- py_compile has a new 'doraise' flag and a new PyCompileError
+  exception.
+
+- SimpleXMLRPCServer now supports CGI through the CGIXMLRPCRequestHandler
+  class.
+
+- The sets module now raises TypeError in __cmp__, to clarify that
+  sets are not intended to be three-way-compared; the comparison
+  operators are overloaded as subset/superset tests.
+
+- Bastion.py and rexec.py are disabled.  These modules are not safe in
+  Python 2.2. or 2.3.
+
+- realpath is now exported when doing ``from poxixpath import *``.
+  It is also exported for ntpath, macpath, and os2emxpath.
+  See SF bug #659228.
+
+- New module tarfile from Lars Gustäbel provides a comprehensive interface
+  to tar archive files with transparent gzip and bzip2 compression.
+  See SF patch #651082.
+
+- urlparse can now parse imap:// URLs.  See SF feature request #618024.
+
+- Tkinter.Canvas.scan_dragto() provides an optional parameter to support
+  the gain value which is passed to Tk.  SF bug# 602259.
+
+- Fix logging.handlers.SysLogHandler protocol when using UNIX domain sockets.
+  See SF patch #642974.
+
+- The dospath module was deleted.  Use the ntpath module when manipulating
+  DOS paths from other platforms.
+
+Tools/Demos
+-----------
+
+- Two new scripts (db2pickle.py and pickle2db.py) were added to the
+  Tools/scripts directory to facilitate conversion from the old bsddb module
+  to the new one.  While the user-visible API of the new module is
+  compatible with the old one, it's likely that the version of the
+  underlying database library has changed.  To convert from the old library,
+  run the db2pickle.py script using the old version of Python to convert it
+  to a pickle file.  After upgrading Python, run the pickle2db.py script
+  using the new version of Python to reconstitute your database.  For
+  example:
+
+    % python2.2 db2pickle.py -h some.db > some.pickle
+    % python2.3 pickle2db.py -h some.db.new < some.pickle
+
+  Run the scripts without any args to get a usage message.
+
+
+Build
+-----
+
+- The audio driver tests (test_ossaudiodev.py and
+  test_linuxaudiodev.py) are no longer run by default.  This is
+  because they don't always work, depending on your hardware and
+  software.  To run these tests, you must use an invocation like ::
+
+    ./python Lib/test/regrtest.py -u audio test_ossaudiodev
+
+- On systems which build using the configure script, compiler flags which
+  used to be lumped together using the OPT flag have been split into two
+  groups, OPT and BASECFLAGS.  OPT is meant to carry just optimization- and
+  debug-related flags like "-g" and "-O3".  BASECFLAGS is meant to carry
+  compiler flags that are required to get a clean compile.  On some
+  platforms (many Linux flavors in particular) BASECFLAGS will be empty by
+  default.  On others, such as Mac OS X and SCO, it will contain required
+  flags.  This change allows people building Python to override OPT without
+  fear of clobbering compiler flags which are required to get a clean build.
+
+- On Darwin/Mac OS X platforms, /sw/lib and /sw/include are added to the
+  relevant search lists in setup.py.  This allows users building Python to
+  take advantage of the many packages available from the fink project
+  <http://fink.sf.net/>.
+
+- A new Makefile target, scriptsinstall, installs a number of useful scripts
+  from the Tools/scripts directory.
+
+C API
+-----
+
+- PyEval_GetFrame() is now declared to return a ``PyFrameObject *``
+  instead of a plain ``PyObject *``.  (SF patch #686601.)
+
+- PyNumber_Check() now checks that the object has a nb_int or nb_float
+  slot, rather than simply checking whether it has a non-NULL
+  tp_as_number pointer.
+
+- A C type that inherits from a base type that defines tp_as_buffer
+  will now inherit the tp_as_buffer pointer if it doesn't define one.
+  (SF #681367)
+
+- The PyArg_Parse functions now issue a DeprecationWarning if a float
+  argument is provided when an integer is specified (this affects the 'b',
+  'B', 'h', 'H', 'i', and 'l' codes).  Future versions of Python will
+  raise a TypeError.
+
+Tests
+-----
+
+- Several tests weren't being run from regrtest.py (test_timeout.py,
+  test_tarfile.py, test_netrc.py, test_multifile.py,
+  test_importhooks.py and test_imp.py).  Now they are.  (Note to
+  developers: please read Lib/test/README when creating a new test, to
+  make sure to do it right!  All tests need to use either unittest or
+  pydoc.)
+
+- Added test_posix.py, a test suite for the posix module.
+
+- Added test_hexoct.py, a test suite for hex/oct constant folding.
+
+Windows
+-------
+
+- The timeout code for socket connect() didn't work right; this has
+  now been fixed.  test_timeout.py should pass (at least most of the
+  time).
+
+- distutils' msvccompiler class now passes the preprocessor options to
+  the resource compiler.  See SF patch #669198.
+
+- The bsddb module now ships with Sleepycat's 4.1.25.NC, the latest
+  release without strong cryptography.
+
+- sys.path[0], if it contains a directory name, is now always an
+  absolute pathname. (SF patch #664376.)
+
+- The new logging package is now installed by the Windows installer.  It
+  wasn't in 2.3a1 due to oversight.
+
+Mac
+---
+
+- There are new dialogs EasyDialogs.AskFileForOpen, AskFileForSave
+  and AskFolder. The old macfs.StandardGetFile and friends are deprecated.
+
+- Most of the standard library now uses pathnames or FSRefs in preference
+  of FSSpecs, and use the underlying Carbon.File and Carbon.Folder modules
+  in stead of macfs. macfs will probably be deprecated in the future.
+
+- Type Carbon.File.FSCatalogInfo and supporting methods have been implemented.
+  This also makes macfs.FSSpec.SetDates() work again.
+
+- There is a new module pimp, the package install manager for Python, and
+  accompanying applet PackageManager. These allow you to easily download
+  and install pretested extension packages either in source or binary
+  form. Only in MacPython-OSX.
+
+- Applets are now built with bundlebuilder in MacPython-OSX, which should make
+  them more robust and also provides a path towards BuildApplication. The
+  downside of this change is that applets can no longer be run from the
+  Terminal window, this will hopefully be fixed in the 2.3b1.
+
+
+What's New in Python 2.3 alpha 1?
+=================================
+
+*Release date: 31-Dec-2002*
+
+Type/class unification and new-style classes
+--------------------------------------------
+
+- One can now assign to __bases__ and __name__ of new-style classes.
+
+- dict() now accepts keyword arguments so that dict(one=1, two=2)
+  is the equivalent of {"one": 1, "two": 2}.  Accordingly,
+  the existing (but undocumented) 'items' keyword argument has
+  been eliminated.  This means that dict(items=someMapping) now has
+  a different meaning than before.
+
+- int() now returns a long object if the argument is outside the
+  integer range, so int("4" * 1000), int(1e200) and int(1L<<1000) will
+  all return long objects instead of raising an OverflowError.
+
+- Assignment to __class__ is disallowed if either the old or the new
+  class is a statically allocated type object (such as defined by an
+  extension module).  This prevents anomalies like 2.__class__ = bool.
+
+- New-style object creation and deallocation have been sped up
+  significantly; they are now faster than classic instance creation
+  and deallocation.
+
+- The __slots__ variable can now mention "private" names, and the
+  right thing will happen (e.g. __slots__ = ["__foo"]).
+
+- The built-ins slice() and buffer() are now callable types.  The
+  types classobj (formerly class), code, function, instance, and
+  instancemethod (formerly instance-method), which have no built-in
+  names but are accessible through the types module, are now also
+  callable.  The type dict-proxy is renamed to dictproxy.
+
+- Cycles going through the __class__ link of a new-style instance are
+  now detected by the garbage collector.
+
+- Classes using __slots__ are now properly garbage collected.
+  [SF bug 519621]
+
+- Tightened the __slots__ rules: a slot name must be a valid Python
+  identifier.
+
+- The constructor for the module type now requires a name argument and
+  takes an optional docstring argument.  Previously, this constructor
+  ignored its arguments.  As a consequence, deriving a class from a
+  module (not from the module type) is now illegal; previously this
+  created an unnamed module, just like invoking the module type did.
+  [SF bug 563060]
+
+- A new type object, 'basestring', is added.  This is a common base type
+  for 'str' and 'unicode', and can be used instead of
+  types.StringTypes, e.g. to test whether something is "a string":
+  isinstance(x, basestring) is True for Unicode and 8-bit strings.  This
+  is an abstract base class and cannot be instantiated directly.
+
+- Changed new-style class instantiation so that when C's __new__
+  method returns something that's not a C instance, its __init__ is
+  not called.  [SF bug #537450]
+
+- Fixed super() to work correctly with class methods.  [SF bug #535444]
+
+- If you try to pickle an instance of a class that has __slots__ but
+  doesn't define or override __getstate__, a TypeError is now raised.
+  This is done by adding a bozo __getstate__ to the class that always
+  raises TypeError.  (Before, this would appear to be pickled, but the
+  state of the slots would be lost.)
+
+Core and builtins
+-----------------
+
+- Import from zipfiles is now supported.  The name of a zipfile placed
+  on sys.path causes the import statement to look for importable Python
+  modules (with .py, pyc and .pyo extensions) and packages inside the
+  zipfile.  The zipfile import follows the specification (though not
+  the sample implementation) of PEP 273.  The semantics of __path__ are
+  compatible with those that have been implemented in Jython since
+  Jython 2.1.
+
+- PEP 302 has been accepted.  Although it was initially developed to
+  support zipimport, it offers a new, general import hook mechanism.
+  Several new variables have been added to the sys module:
+  sys.meta_path, sys.path_hooks, and sys.path_importer_cache; these
+  make extending the import statement much more convenient than
+  overriding the __import__ built-in function.  For a description of
+  these, see PEP 302.
+
+- A frame object's f_lineno attribute can now be written to from a
+  trace function to change which line will execute next.  A command to
+  exploit this from pdb has been added.  [SF patch #643835]
+
+- The _codecs support module for codecs.py was turned into a builtin
+  module to assure that at least the builtin codecs are available
+  to the Python parser for source code decoding according to PEP 263.
+
+- issubclass now supports a tuple as the second argument, just like
+  isinstance does. ``issubclass(X, (A, B))`` is equivalent to
+  ``issubclass(X, A) or issubclass(X, B)``.
+
+- Thanks to Armin Rigo, the last known way to provoke a system crash
+  by cleverly arranging for a comparison function to mutate a list
+  during a list.sort() operation has been fixed.  The effect of
+  attempting to mutate a list, or even to inspect its contents or
+  length, while a sort is in progress, is not defined by the language.
+  The C implementation of Python 2.3 attempts to detect mutations,
+  and raise ValueError if one occurs, but there's no guarantee that
+  all mutations will be caught, or that any will be caught across
+  releases or implementations.
+
+- Unicode file name processing for Windows (PEP 277) is implemented.
+  All platforms now have an os.path.supports_unicode_filenames attribute,
+  which is set to True on Windows NT/2000/XP, and False elsewhere.
+
+- Codec error handling callbacks (PEP 293) are implemented.
+  Error handling in unicode.encode or str.decode can now be customized.
+
+- A subtle change to the semantics of the built-in function intern():
+  interned strings are no longer immortal.  You must keep a reference
+  to the return value intern() around to get the benefit.
+
+- Use of 'None' as a variable, argument or attribute name now
+  issues a SyntaxWarning.  In the future, None may become a keyword.
+
+- SET_LINENO is gone.  co_lnotab is now consulted to determine when to
+  call the trace function.  C code that accessed f_lineno should call
+  PyCode_Addr2Line instead (f_lineno is still there, but only kept up
+  to date when there is a trace function set).
+
+- There's a new warning category, FutureWarning.  This is used to warn
+  about a number of situations where the value or sign of an integer
+  result will change in Python 2.4 as a result of PEP 237 (integer
+  unification).  The warnings implement stage B0 mentioned in that
+  PEP.  The warnings are about the following situations:
+
+    - Octal and hex literals without 'L' prefix in the inclusive range
+      [0x80000000..0xffffffff]; these are currently negative ints, but
+      in Python 2.4 they will be positive longs with the same bit
+      pattern.
+
+    - Left shifts on integer values that cause the outcome to lose
+      bits or have a different sign than the left operand.  To be
+      precise: x<<n where this currently doesn't yield the same value
+      as long(x)<<n; in Python 2.4, the outcome will be long(x)<<n.
+
+    - Conversions from ints to string that show negative values as
+      unsigned ints in the inclusive range [0x80000000..0xffffffff];
+      this affects the functions hex() and oct(), and the string
+      formatting codes %u, %o, %x, and %X.  In Python 2.4, these will
+      show signed values (e.g. hex(-1) currently returns "0xffffffff";
+      in Python 2.4 it will return "-0x1").
+
+- The bits manipulated under the cover by sys.setcheckinterval() have
+  been changed.  Both the check interval and the ticker used to be
+  per-thread values.  They are now just a pair of global variables.
+  In addition, the default check interval was boosted from 10 to 100
+  bytecode instructions.  This may have some effect on systems that
+  relied on the old default value.  In particular, in multi-threaded
+  applications which try to be highly responsive, response time will
+  increase by some (perhaps imperceptible) amount.
+
+- When multiplying very large integers, a version of the so-called
+  Karatsuba algorithm is now used.  This is most effective if the
+  inputs have roughly the same size.  If they both have about N digits,
+  Karatsuba multiplication has O(N**1.58) runtime (the exponent is
+  log_base_2(3)) instead of the previous O(N**2).  Measured results may
+  be better or worse than that, depending on platform quirks.  Besides
+  the O() improvement in raw instruction count, the Karatsuba algorithm
+  appears to have much better cache behavior on extremely large integers
+  (starting in the ballpark of a million bits).  Note that this is a
+  simple implementation, and there's no intent here to compete with,
+  e.g., GMP.  It gives a very nice speedup when it applies, but a package
+  devoted to fast large-integer arithmetic should run circles around it.
+
+- u'%c' will now raise a ValueError in case the argument is an
+  integer outside the valid range of Unicode code point ordinals.
+
+- The tempfile module has been overhauled for enhanced security.  The
+  mktemp() function is now deprecated; new, safe replacements are
+  mkstemp() (for files) and mkdtemp() (for directories), and the
+  higher-level functions NamedTemporaryFile() and TemporaryFile().
+  Use of some global variables in this module is also deprecated; the
+  new functions have keyword arguments to provide the same
+  functionality.  All Lib, Tools and Demo modules that used the unsafe
+  interfaces have been updated to use the safe replacements.  Thanks
+  to Zack Weinberg!
+
+- When x is an object whose class implements __mul__ and __rmul__,
+  1.0*x would correctly invoke __rmul__, but 1*x would erroneously
+  invoke __mul__.  This was due to the sequence-repeat code in the int
+  type.  This has been fixed now.
+
+- Previously, "str1 in str2" required str1 to be a string of length 1.
+  This restriction has been relaxed to allow str1 to be a string of
+  any length.  Thus "'el' in 'hello world'" returns True now.
+
+- File objects are now their own iterators.  For a file f, iter(f) now
+  returns f (unless f is closed), and f.next() is similar to
+  f.readline() when EOF is not reached; however, f.next() uses a
+  readahead buffer that messes up the file position, so mixing
+  f.next() and f.readline() (or other methods) doesn't work right.
+  Calling f.seek() drops the readahead buffer, but other operations
+  don't.  It so happens that this gives a nice additional speed boost
+  to "for line in file:"; the xreadlines method and corresponding
+  module are now obsolete.  Thanks to Oren Tirosh!
+
+- Encoding declarations (PEP 263, phase 1) have been implemented.  A
+  comment of the form "# -*- coding: <encodingname> -*-" in the first
+  or second line of a Python source file indicates the encoding.
+
+- list.sort() has a new implementation.  While cross-platform results
+  may vary, and in data-dependent ways, this is much faster on many
+  kinds of partially ordered lists than the previous implementation,
+  and reported to be just as fast on randomly ordered lists on
+  several major platforms.  This sort is also stable (if A==B and A
+  precedes B in the list at the start, A precedes B after the sort too),
+  although the language definition does not guarantee stability.  A
+  potential drawback is that list.sort() may require temp space of
+  len(list)*2 bytes (``*4`` on a 64-bit machine).  It's therefore possible
+  for list.sort() to raise MemoryError now, even if a comparison function
+  does not.  See <http://www.python.org/sf/587076> for full details.
+
+- All standard iterators now ensure that, once StopIteration has been
+  raised, all future calls to next() on the same iterator will also
+  raise StopIteration.  There used to be various counterexamples to
+  this behavior, which could caused confusion or subtle program
+  breakage, without any benefits.  (Note that this is still an
+  iterator's responsibility; the iterator framework does not enforce
+  this.)
+
+- Ctrl+C handling on Windows has been made more consistent with
+  other platforms.  KeyboardInterrupt can now reliably be caught,
+  and Ctrl+C at an interactive prompt no longer terminates the
+  process under NT/2k/XP (it never did under Win9x).  Ctrl+C will
+  interrupt time.sleep() in the main thread, and any child processes
+  created via the popen family (on win2k; we can't make win9x work
+  reliably) are also interrupted (as generally happens on for Linux/Unix.)
+  [SF bugs 231273, 439992 and 581232]
+
+- sys.getwindowsversion() has been added on Windows.  This
+  returns a tuple with information about the version of Windows
+  currently running.
+
+- Slices and repetitions of buffer objects now consistently return
+  a string.  Formerly, strings would be returned most of the time,
+  but a buffer object would be returned when the repetition count
+  was one or when the slice range was all inclusive.
+
+- Unicode objects in sys.path are no longer ignored but treated
+  as directory names.
+
+- Fixed string.startswith and string.endswith builtin methods
+  so they accept negative indices.  [SF bug 493951]
+
+- Fixed a bug with a continue inside a try block and a yield in the
+  finally clause.  [SF bug 567538]
+
+- Most builtin sequences now support "extended slices", i.e. slices
+  with a third "stride" parameter.  For example, "hello world"[::-1]
+  gives "dlrow olleh".
+
+- A new warning PendingDeprecationWarning was added to provide
+  direction on features which are in the process of being deprecated.
+  The warning will not be printed by default.  To see the pending
+  deprecations, use -Walways::PendingDeprecationWarning::
+  as a command line option or warnings.filterwarnings() in code.
+
+- Deprecated features of xrange objects have been removed as
+  promised.  The start, stop, and step attributes and the tolist()
+  method no longer exist.  xrange repetition and slicing have been
+  removed.
+
+- New builtin function enumerate(x), from PEP 279.  Example:
+  enumerate("abc") is an iterator returning (0,"a"), (1,"b"), (2,"c").
+  The argument can be an arbitrary iterable object.
+
+- The assert statement no longer tests __debug__ at runtime.  This means
+  that assert statements cannot be disabled by assigning a false value
+  to __debug__.
+
+- A method zfill() was added to str and unicode, that fills a numeric
+  string to the left with zeros.  For example,
+  "+123".zfill(6) -> "+00123".
+
+- Complex numbers supported divmod() and the // and % operators, but
+  these make no sense.  Since this was documented, they're being
+  deprecated now.
+
+- String and unicode methods lstrip(), rstrip() and strip() now take
+  an optional argument that specifies the characters to strip.  For
+  example, "Foo!!!?!?!?".rstrip("?!") -> "Foo".
+
+- There's a new dictionary constructor (a class method of the dict
+  class), dict.fromkeys(iterable, value=None).  It constructs a
+  dictionary with keys taken from the iterable and all values set to a
+  single value.  It can be used for building sets and for removing
+  duplicates from sequences.
+
+- Added a new dict method pop(key).  This removes and returns the
+  value corresponding to key.  [SF patch #539949]
+
+- A new built-in type, bool, has been added, as well as built-in
+  names for its two values, True and False.  Comparisons and sundry
+  other operations that return a truth value have been changed to
+  return a bool instead.  Read PEP 285 for an explanation of why this
+  is backward compatible.
+
+- Fixed two bugs reported as SF #535905: under certain conditions,
+  deallocating a deeply nested structure could cause a segfault in the
+  garbage collector, due to interaction with the "trashcan" code;
+  access to the current frame during destruction of a local variable
+  could access a pointer to freed memory.
+
+- The optional object allocator ("pymalloc") has been enabled by
+  default.  The recommended practice for memory allocation and
+  deallocation has been streamlined.  A header file is included,
+  Misc/pymemcompat.h, which can be bundled with 3rd party extensions
+  and lets them use the same API with Python versions from 1.5.2
+  onwards.
+
+- PyErr_Display will provide file and line information for all exceptions
+  that have an attribute print_file_and_line, not just SyntaxErrors.
+
+- The UTF-8 codec will now encode and decode Unicode surrogates
+  correctly and without raising exceptions for unpaired ones.
+
+- Universal newlines (PEP 278) is implemented.  Briefly, using 'U'
+  instead of 'r' when opening a text file for reading changes the line
+  ending convention so that any of '\r', '\r\n', and '\n' is
+  recognized (even mixed in one file); all three are converted to
+  '\n', the standard Python line end character.
+
+- file.xreadlines() now raises a ValueError if the file is closed:
+  Previously, an xreadlines object was returned which would raise
+  a ValueError when the xreadlines.next() method was called.
+
+- sys.exit() inadvertently allowed more than one argument.
+  An exception will now be raised if more than one argument is used.
+
+- Changed evaluation order of dictionary literals to conform to the
+  general left to right evaluation order rule. Now {f1(): f2()} will
+  evaluate f1 first.
+
+- Fixed bug #521782: when a file was in non-blocking mode, file.read()
+  could silently lose data or wrongly throw an unknown error.
+
+- The sq_repeat, sq_inplace_repeat, sq_concat and sq_inplace_concat
+  slots are now always tried after trying the corresponding nb_* slots.
+  This fixes a number of minor bugs (see bug #624807).
+
+- Fix problem with dynamic loading on 64-bit AIX (see bug #639945).
+
+Extension modules
+-----------------
+
+- Added three operators to the operator module:
+    operator.pow(a,b) which is equivalent to:  a**b.
+    operator.is_(a,b) which is equivalent to:  a is b.
+    operator.is_not(a,b) which is equivalent to:  a is not b.
+
+- posix.openpty now works on all systems that have /dev/ptmx.
+
+- A module zipimport exists to support importing code from zip
+  archives.
+
+- The new datetime module supplies classes for manipulating dates and
+  times.  The basic design came from the Zope "fishbowl process", and
+  favors practical commercial applications over calendar esoterica.  See
+
+      http://www.zope.org/Members/fdrake/DateTimeWiki/FrontPage
+
+- _tkinter now returns Tcl objects, instead of strings. Objects which
+  have Python equivalents are converted to Python objects, other objects
+  are wrapped. This can be configured through the wantobjects method,
+  or Tkinter.wantobjects.
+
+- The PyBSDDB wrapper around the Sleepycat Berkeley DB library has
+  been added as the package bsddb.  The traditional bsddb module is
+  still available in source code, but not built automatically anymore,
+  and is now named bsddb185.  This supports Berkeley DB versions from
+  3.0 to 4.1.  For help converting your databases from the old module (which
+  probably used an obsolete version of Berkeley DB) to the new module, see
+  the db2pickle.py and pickle2db.py scripts described in the Tools/Demos
+  section above.
+
+- unicodedata was updated to Unicode 3.2. It supports normalization
+  and names for Hangul syllables and CJK unified ideographs.
+
+- resource.getrlimit() now returns longs instead of ints.
+
+- readline now dynamically adjusts its input/output stream if
+  sys.stdin/stdout changes.
+
+- The _tkinter module (and hence Tkinter) has dropped support for
+  Tcl/Tk 8.0 and 8.1.  Only Tcl/Tk versions 8.2, 8.3 and 8.4 are
+  supported.
+
+- cPickle.BadPickleGet is now a class.
+
+- The time stamps in os.stat_result are floating point numbers
+  after stat_float_times has been called.
+
+- If the size passed to mmap.mmap() is larger than the length of the
+  file on non-Windows platforms, a ValueError is raised. [SF bug 585792]
+
+- The xreadlines module is slated for obsolescence.
+
+- The strptime function in the time module is now always available (a
+  Python implementation is used when the C library doesn't define it).
+
+- The 'new' module is no longer an extension, but a Python module that
+  only exists for backwards compatibility.  Its contents are no longer
+  functions but callable type objects.
+
+- The bsddb.*open functions can now take 'None' as a filename.
+  This will create a temporary in-memory bsddb that won't be
+  written to disk.
+
+- posix.getloadavg, posix.lchown, posix.killpg, posix.mknod, and
+  posix.getpgid have been added where available.
+
+- The locale module now exposes the C library's gettext interface. It
+  also has a new function getpreferredencoding.
+
+- A security hole ("double free") was found in zlib-1.1.3, a popular
+  third party compression library used by some Python modules.  The
+  hole was quickly plugged in zlib-1.1.4, and the Windows build of
+  Python now ships with zlib-1.1.4.
+
+- pwd, grp, and resource return enhanced tuples now, with symbolic
+  field names.
+
+- array.array is now a type object. A new format character
+  'u' indicates Py_UNICODE arrays. For those, .tounicode and
+  .fromunicode methods are available. Arrays now support __iadd__
+  and __imul__.
+
+- dl now builds on every system that has dlfcn.h.  Failure in case
+  of sizeof(int)!=sizeof(long)!=sizeof(void*) is delayed until dl.open
+  is called.
+
+- The sys module acquired a new attribute, api_version, which evaluates
+  to the value of the PYTHON_API_VERSION macro with which the
+  interpreter was compiled.
+
+- Fixed bug #470582: sre module would return a tuple (None, 'a', 'ab')
+  when applying the regular expression '^((a)c)?(ab)$' on 'ab'. It now
+  returns (None, None, 'ab'), as expected. Also fixed handling of
+  lastindex/lastgroup match attributes in similar cases. For example,
+  when running the expression r'(a)(b)?b' over 'ab', lastindex must be
+  1, not 2.
+
+- Fixed bug #581080: sre scanner was not checking the buffer limit
+  before increasing the current pointer. This was creating an infinite
+  loop in the search function, once the pointer exceeded the buffer
+  limit.
+
+- The os.fdopen function now enforces a file mode starting with the
+  letter 'r', 'w' or 'a', otherwise a ValueError is raised. This fixes
+  bug #623464.
+
+- The linuxaudiodev module is now deprecated; it is being replaced by
+  ossaudiodev.  The interface has been extended to cover a lot more of
+  OSS (see www.opensound.com), including most DSP ioctls and the
+  OSS mixer API.  Documentation forthcoming in 2.3a2.
+
+Library
+-------
+
+- imaplib.py now supports SSL (Tino Lange and Piers Lauder).
+
+- Freeze's modulefinder.py has been moved to the standard library;
+  slightly improved so it will issue less false missing submodule
+  reports (see sf path #643711 for details).  Documentation will follow
+  with Python 2.3a2.
+
+- os.path exposes getctime.
+
+- unittest.py now has two additional methods called assertAlmostEqual()
+  and failIfAlmostEqual().  They implement an approximate comparison
+  by rounding the difference between the two arguments and comparing
+  the result to zero.  Approximate comparison is essential for
+  unit tests of floating point results.
+
+- calendar.py now depends on the new datetime module rather than
+  the time module.  As a result, the range of allowable dates
+  has been increased.
+
+- pdb has a new 'j(ump)' command to select the next line to be
+  executed.
+
+- The distutils created windows installers now can run a
+  postinstallation script.
+
+- doctest.testmod can now be called without argument, which means to
+  test the current module.
+
+- When canceling a server that implemented threading with a keyboard
+  interrupt, the server would shut down but not terminate (waiting on
+  client threads). A new member variable, daemon_threads, was added to
+  the ThreadingMixIn class in SocketServer.py to make it explicit that
+  this behavior needs to be controlled.
+
+- A new module, optparse, provides a fancy alternative to getopt for
+  command line parsing.  It is a slightly modified version of Greg
+  Ward's Optik package.
+
+- UserDict.py now defines a DictMixin class which defines all dictionary
+  methods for classes that already have a minimum mapping interface.
+  This greatly simplifies writing classes that need to be substitutable
+  for dictionaries (such as the shelve module).
+
+- shelve.py now subclasses from UserDict.DictMixin.  Now shelve supports
+  all dictionary methods.  This eases the transition to persistent
+  storage for scripts originally written with dictionaries in mind.
+
+- shelve.open and the various classes in shelve.py now accept an optional
+  binary flag, which defaults to False.  If True, the values stored in the
+  shelf are binary pickles.
+
+- A new package, logging, implements the logging API defined by PEP
+  282.  The code is written by Vinay Sajip.
+
+- StreamReader, StreamReaderWriter and StreamRecoder in the codecs
+  modules are iterators now.
+
+- gzip.py now handles files exceeding 2GB.  Files over 4GB also work
+  now (provided the OS supports it, and Python is configured with large
+  file support), but in that case the underlying gzip file format can
+  record only the least-significant 32 bits of the file size, so that
+  some tools working with gzipped files may report an incorrect file
+  size.
+
+- xml.sax.saxutils.unescape has been added, to replace entity references
+  with their entity value.
+
+- Queue.Queue.{put,get} now support an optional timeout argument.
+
+- Various features of Tk 8.4 are exposed in Tkinter.py. The multiple
+  option of tkFileDialog is exposed as function askopenfile{,name}s.
+
+- Various configure methods of Tkinter have been stream-lined, so that
+  tag_configure, image_configure, window_configure now return a
+  dictionary when invoked with no argument.
+
+- Importing the readline module now no longer has the side effect of
+  calling setlocale(LC_CTYPE, "").  The initial "C" locale, or
+  whatever locale is explicitly set by the user, is preserved.  If you
+  want repr() of 8-bit strings in your preferred encoding to preserve
+  all printable characters of that encoding, you have to add the
+  following code to your $PYTHONSTARTUP file or to your application's
+  main():
+
+    import locale
+    locale.setlocale(locale.LC_CTYPE, "")
+
+- shutil.move was added. shutil.copytree now reports errors as an
+  exception at the end, instead of printing error messages.
+
+- Encoding name normalization was generalized to not only
+  replace hyphens with underscores, but also all other non-alphanumeric
+  characters (with the exception of the dot which is used for Python
+  package names during lookup). The aliases.py mapping was updated
+  to the new standard.
+
+- mimetypes has two new functions: guess_all_extensions() which
+  returns a list of all known extensions for a mime type, and
+  add_type() which adds one mapping between a mime type and
+  an extension to the database.
+
+- New module: sets, defines the class Set that implements a mutable
+  set type using the keys of a dict to represent the set.  There's
+  also a class ImmutableSet which is useful when you need sets of sets
+  or when you need to use sets as dict keys, and a class BaseSet which
+  is the base class of the two.
+
+- Added random.sample(population,k) for random sampling without replacement.
+  Returns a k length list of unique elements chosen from the population.
+
+- random.randrange(-sys.maxint-1, sys.maxint) no longer raises
+  OverflowError.  That is, it now accepts any combination of 'start'
+  and 'stop' arguments so long as each is in the range of Python's
+  bounded integers.
+
+- Thanks to Raymond Hettinger, random.random() now uses a new core
+  generator.  The Mersenne Twister algorithm is implemented in C,
+  threadsafe, faster than the previous generator, has an astronomically
+  large period (2**19937-1), creates random floats to full 53-bit
+  precision, and may be the most widely tested random number generator
+  in existence.
+
+  The random.jumpahead(n) method has different semantics for the new
+  generator.  Instead of jumping n steps ahead, it uses n and the
+  existing state to create a new state.  This means that jumpahead()
+  continues to support multi-threaded code needing generators of
+  non-overlapping sequences.  However, it will break code which relies
+  on jumpahead moving a specific number of steps forward.
+
+  The attributes random.whseed and random.__whseed have no meaning for
+  the new generator.  Code using these attributes should switch to a
+  new class, random.WichmannHill which is provided for backward
+  compatibility and to make an alternate generator available.
+
+- New "algorithms" module: heapq, implements a heap queue.  Thanks to
+  Kevin O'Connor for the code and François Pinard for an entertaining
+  write-up explaining the theory and practical uses of heaps.
+
+- New encoding for the Palm OS character set: palmos.
+
+- binascii.crc32() and the zipfile module had problems on some 64-bit
+  platforms.  These have been fixed.  On a platform with 8-byte C longs,
+  crc32() now returns a signed-extended 4-byte result, so that its value
+  as a Python int is equal to the value computed a 32-bit platform.
+
+- xml.dom.minidom.toxml and toprettyxml now take an optional encoding
+  argument.
+
+- Some fixes in the copy module: when an object is copied through its
+  __reduce__ method, there was no check for a __setstate__ method on
+  the result [SF patch 565085]; deepcopy should treat instances of
+  custom metaclasses the same way it treats instances of type 'type'
+  [SF patch 560794].
+
+- Sockets now support timeout mode.  After s.settimeout(T), where T is
+  a float expressing seconds, subsequent operations raise an exception
+  if they cannot be completed within T seconds.  To disable timeout
+  mode, use s.settimeout(None).  There's also a module function,
+  socket.setdefaulttimeout(T), which sets the default for all sockets
+  created henceforth.
+
+- getopt.gnu_getopt was added.  This supports GNU-style option
+  processing, where options can be mixed with non-option arguments.
+
+- Stop using strings for exceptions.  String objects used for
+  exceptions are now classes deriving from Exception.  The objects
+  changed were: Tkinter.TclError, bdb.BdbQuit, macpath.norm_error,
+  tabnanny.NannyNag, and xdrlib.Error.
+
+- Constants BOM_UTF8, BOM_UTF16, BOM_UTF16_LE, BOM_UTF16_BE,
+  BOM_UTF32, BOM_UTF32_LE and BOM_UTF32_BE that represent the Byte
+  Order Mark in UTF-8, UTF-16 and UTF-32 encodings for little and
+  big endian systems were added to the codecs module. The old names
+  BOM32_* and BOM64_* were off by a factor of 2.
+
+- Added conversion functions math.degrees() and math.radians().
+
+- math.log() now takes an optional argument:  math.log(x[, base]).
+
+- ftplib.retrlines() now tests for callback is None rather than testing
+  for False.  Was causing an error when given a callback object which
+  was callable but also returned len() as zero.  The change may
+  create new breakage if the caller relied on the undocumented behavior
+  and called with callback set to [] or some other False value not
+  identical to None.
+
+- random.gauss() uses a piece of hidden state used by nothing else,
+  and the .seed() and .whseed() methods failed to reset it.  In other
+  words, setting the seed didn't completely determine the sequence of
+  results produced by random.gauss().  It does now.  Programs repeatedly
+  mixing calls to a seed method with calls to gauss() may see different
+  results now.
+
+- The pickle.Pickler class grew a clear_memo() method to mimic that
+  provided by cPickle.Pickler.
+
+- difflib's SequenceMatcher class now does a dynamic analysis of
+  which elements are so frequent as to constitute noise.  For
+  comparing files as sequences of lines, this generally works better
+  than the IS_LINE_JUNK function, and function ndiff's linejunk
+  argument defaults to None now as a result.  A happy benefit is
+  that SequenceMatcher may run much faster now when applied
+  to large files with many duplicate lines (for example, C program
+  text with lots of repeated "}" and "return NULL;" lines).
+
+- New Text.dump() method in Tkinter module.
+
+- New distutils commands for building packagers were added to
+  support pkgtool on Solaris and swinstall on HP-UX.
+
+- distutils now has a new abstract binary packager base class
+  command/bdist_packager, which simplifies writing packagers.
+  This will hopefully provide the missing bits to encourage
+  people to submit more packagers, e.g. for Debian, FreeBSD
+  and other systems.
+
+- The UTF-16, -LE and -BE stream readers now raise a
+  NotImplementedError for all calls to .readline(). Previously, they
+  used to just produce garbage or fail with an encoding error --
+  UTF-16 is a 2-byte encoding and the C lib's line reading APIs don't
+  work well with these.
+
+- compileall now supports quiet operation.
+
+- The BaseHTTPServer now implements optional HTTP/1.1 persistent
+  connections.
+
+- socket module: the SSL support was broken out of the main
+  _socket module C helper and placed into a new _ssl helper
+  which now gets imported by socket.py if available and working.
+
+- encodings package: added aliases for all supported IANA character
+  sets
+
+- ftplib: to safeguard the user's privacy, anonymous login will use
+  "anonymous@" as default password, rather than the real user and host
+  name.
+
+- webbrowser: tightened up the command passed to os.system() so that
+  arbitrary shell code can't be executed because a bogus URL was
+  passed in.
+
+- gettext.translation has an optional fallback argument, and
+  gettext.find an optional all argument. Translations will now fallback
+  on a per-message basis. The module supports plural forms, by means
+  of gettext.[d]ngettext and Translation.[u]ngettext.
+
+- distutils bdist commands now offer a --skip-build option.
+
+- warnings.warn now accepts a Warning instance as first argument.
+
+- The xml.sax.expatreader.ExpatParser class will no longer create
+  circular references by using itself as the locator that gets passed
+  to the content handler implementation.  [SF bug #535474]
+
+- The email.Parser.Parser class now properly parses strings regardless
+  of their line endings, which can be any of \r, \n, or \r\n (CR, LF,
+  or CRLF).  Also, the Header class's constructor default arguments
+  has changed slightly so that an explicit maxlinelen value is always
+  honored, and so unicode conversion error handling can be specified.
+
+- distutils' build_ext command now links C++ extensions with the C++
+  compiler available in the Makefile or CXX environment variable, if
+  running under \*nix.
+
+- New module bz2: provides a comprehensive interface for the bz2 compression
+  library.  It implements a complete file interface, one-shot (de)compression
+  functions, and types for sequential (de)compression.
+
+- New pdb command 'pp' which is like 'p' except that it pretty-prints
+  the value of its expression argument.
+
+- Now bdist_rpm distutils command understands a verify_script option in
+  the config file, including the contents of the referred filename in
+  the "%verifyscript" section of the rpm spec file.
+
+- Fixed bug #495695: webbrowser module would run graphic browsers in a
+  unix environment even if DISPLAY was not set. Also, support for
+  skipstone browser was included.
+
+- Fixed bug #636769: rexec would run unallowed code if subclasses of
+  strings were used as parameters for certain functions.
+
+Tools/Demos
+-----------
+
+- pygettext.py now supports globbing on Windows, and accepts module
+  names in addition to accepting file names.
+
+- The SGI demos (Demo/sgi) have been removed.  Nobody thought they
+  were interesting any more.  (The SGI library modules and extensions
+  are still there; it is believed that at least some of these are
+  still used and useful.)
+
+- IDLE supports the new encoding declarations (PEP 263); it can also
+  deal with legacy 8-bit files if they use the locale's encoding. It
+  allows non-ASCII strings in the interactive shell and executes them
+  in the locale's encoding.
+
+- freeze.py now produces binaries which can import shared modules,
+  unlike before when this failed due to missing symbol exports in
+  the generated binary.
+
+Build
+-----
+
+- On Unix, IDLE is now installed automatically.
+
+- The fpectl module is not built by default; it's dangerous or useless
+  except in the hands of experts.
+
+- The public Python C API will generally be declared using PyAPI_FUNC
+  and PyAPI_DATA macros, while Python extension module init functions
+  will be declared with PyMODINIT_FUNC.  DL_EXPORT/DL_IMPORT macros
+  are deprecated.
+
+- A bug was fixed that could cause COUNT_ALLOCS builds to segfault, or
+  get into infinite loops, when a new-style class got garbage-collected.
+  Unfortunately, to avoid this, the way COUNT_ALLOCS works requires
+  that new-style classes be immortal in COUNT_ALLOCS builds.  Note that
+  COUNT_ALLOCS is not enabled by default, in either release or debug
+  builds, and that new-style classes are immortal only in COUNT_ALLOCS
+  builds.
+
+- Compiling out the cyclic garbage collector is no longer an option.
+  The old symbol WITH_CYCLE_GC is now ignored, and Python.h arranges
+  that it's always defined (for the benefit of any extension modules
+  that may be conditionalizing on it).  A bonus is that any extension
+  type participating in cyclic gc can choose to participate in the
+  Py_TRASHCAN mechanism now too; in the absence of cyclic gc, this used
+  to require editing the core to teach the trashcan mechanism about the
+  new type.
+
+- According to Annex F of the current C standard,
+
+    The Standard C macro HUGE_VAL and its float and long double analogs,
+    HUGE_VALF and HUGE_VALL, expand to expressions whose values are
+    positive infinities.
+
+  Python only uses the double HUGE_VAL, and only to #define its own symbol
+  Py_HUGE_VAL.  Some platforms have incorrect definitions for HUGE_VAL.
+  pyport.h used to try to worm around that, but the workarounds triggered
+  other bugs on other platforms, so we gave up.  If your platform defines
+  HUGE_VAL incorrectly, you'll need to #define Py_HUGE_VAL to something
+  that works on your platform.  The only instance of this I'm sure about
+  is on an unknown subset of Cray systems, described here:
+
+  http://www.cray.com/swpubs/manuals/SN-2194_2.0/html-SN-2194_2.0/x3138.htm
+
+  Presumably 2.3a1 breaks such systems.  If anyone uses such a system, help!
+
+- The configure option --without-doc-strings can be used to remove the
+  doc strings from the builtin functions and modules; this reduces the
+  size of the executable.
+
+- The universal newlines option (PEP 278) is on by default.  On Unix
+  it can be disabled by passing --without-universal-newlines to the
+  configure script.  On other platforms, remove
+  WITH_UNIVERSAL_NEWLINES from pyconfig.h.
+
+- On Unix, a shared libpython2.3.so can be created with --enable-shared.
+
+- All uses of the CACHE_HASH, INTERN_STRINGS, and DONT_SHARE_SHORT_STRINGS
+  preprocessor symbols were eliminated.  The internal decisions they
+  controlled stopped being experimental long ago.
+
+- The tools used to build the documentation now work under Cygwin as
+  well as Unix.
+
+- The bsddb and dbm module builds have been changed to try and avoid version
+  skew problems and disable linkage with Berkeley DB 1.85 unless the
+  installer knows what s/he's doing.  See the section on building these
+  modules in the README file for details.
+
+C API
+-----
+
+- PyNumber_Check() now returns true for string and unicode objects.
+  This is a result of these types having a partially defined
+  tp_as_number slot.  (This is not a feature, but an indication that
+  PyNumber_Check() is not very useful to determine numeric behavior.
+  It may be deprecated.)
+
+- The string object's layout has changed: the pointer member
+  ob_sinterned has been replaced by an int member ob_sstate.  On some
+  platforms (e.g. most 64-bit systems) this may change the offset of
+  the ob_sval member, so as a precaution the API_VERSION has been
+  incremented.  The apparently unused feature of "indirect interned
+  strings", supported by the ob_sinterned member, is gone.  Interned
+  strings are now usually mortal; there is a new API,
+  PyString_InternImmortal() that creates immortal interned strings.
+  (The ob_sstate member can only take three values; however, while
+  making it a char saves a few bytes per string object on average, in
+  it also slowed things down a bit because ob_sval was no longer
+  aligned.)
+
+- The Py_InitModule*() functions now accept NULL for the 'methods'
+  argument.  Modules without global functions are becoming more common
+  now that factories can be types rather than functions.
+
+- New C API PyUnicode_FromOrdinal() which exposes unichr() at C
+  level.
+
+- New functions PyErr_SetExcFromWindowsErr() and
+  PyErr_SetExcFromWindowsErrWithFilename(). Similar to
+  PyErr_SetFromWindowsErrWithFilename() and
+  PyErr_SetFromWindowsErr(), but they allow to specify
+  the exception type to raise. Available on Windows.
+
+- Py_FatalError() is now declared as taking a const char* argument.  It
+  was previously declared without const.  This should not affect working
+  code.
+
+- Added new macro PySequence_ITEM(o, i) that directly calls
+  sq_item without rechecking that o is a sequence and without
+  adjusting for negative indices.
+
+- PyRange_New() now raises ValueError if the fourth argument is not 1.
+  This is part of the removal of deprecated features of the xrange
+  object.
+
+- PyNumber_Coerce() and PyNumber_CoerceEx() now also invoke the type's
+  coercion if both arguments have the same type but this type has the
+  CHECKTYPES flag set.  This is to better support proxies.
+
+- The type of tp_free has been changed from "``void (*)(PyObject *)``" to
+  "``void (*)(void *)``".
+
+- PyObject_Del, PyObject_GC_Del are now functions instead of macros.
+
+- A type can now inherit its metatype from its base type.  Previously,
+  when PyType_Ready() was called, if ob_type was found to be NULL, it
+  was always set to &PyType_Type; now it is set to base->ob_type,
+  where base is tp_base, defaulting to &PyObject_Type.
+
+- PyType_Ready() accidentally did not inherit tp_is_gc; now it does.
+
+- The PyCore_* family of APIs have been removed.
+
+- The "u#" parser marker will now pass through Unicode objects as-is
+  without going through the buffer API.
+
+- The enumerators of cmp_op have been renamed to use the prefix ``PyCmp_``.
+
+- An old #define of ANY as void has been removed from pyport.h.  This
+  hasn't been used since Python's pre-ANSI days, and the #define has
+  been marked as obsolete since then.  SF bug 495548 says it created
+  conflicts with other packages, so keeping it around wasn't harmless.
+
+- Because Python's magic number scheme broke on January 1st, we decided
+  to stop Python development.  Thanks for all the fish!
+
+- Some of us don't like fish, so we changed Python's magic number
+  scheme to a new one. See Python/import.c for details.
+
+New platforms
+-------------
+
+- OpenVMS is now supported.
+
+- AtheOS is now supported.
+
+- the EMX runtime environment on OS/2 is now supported.
+
+- GNU/Hurd is now supported.
+
+Tests
+-----
+
+- The regrtest.py script's -u option now provides a way to say "allow
+  all resources except this one."  For example, to allow everything
+  except bsddb, give the option '-uall,-bsddb'.
+
+Windows
+-------
+
+- The Windows distribution now ships with version 4.0.14 of the
+  Sleepycat Berkeley database library.  This should be a huge
+  improvement over the previous Berkeley DB 1.85, which had many
+  bugs.
+  XXX What are the licensing issues here?
+  XXX If a user has a database created with a previous version of
+  XXX     Python, what must they do to convert it?
+  XXX I'm still not sure how to link this thing (see PCbuild/readme.txt).
+  XXX The version # is likely to change before 2.3a1.
+
+- The Windows distribution now ships with a Secure Sockets Library (SLL)
+   module (_ssl.pyd)
+
+- The Windows distribution now ships with Tcl/Tk version 8.4.1 (it
+  previously shipped with Tcl/Tk 8.3.2).
+
+- When Python is built under a Microsoft compiler, sys.version now
+  includes the compiler version number (_MSC_VER).  For example, under
+  MSVC 6, sys.version contains the substring "MSC v.1200 ".  1200 is
+  the value of _MSC_VER under MSVC 6.
+
+- Sometimes the uninstall executable (UNWISE.EXE) vanishes.  One cause
+  of that has been fixed in the installer (disabled Wise's "delete in-
+  use files" uninstall option).
+
+- Fixed a bug in urllib's proxy handling in Windows.  [SF bug #503031]
+
+- The installer now installs Start menu shortcuts under (the local
+  equivalent of) "All Users" when doing an Admin install.
+
+- file.truncate([newsize]) now works on Windows for all newsize values.
+  It used to fail if newsize didn't fit in 32 bits, reflecting a
+  limitation of MS _chsize (which is no longer used).
+
+- os.waitpid() is now implemented for Windows, and can be used to block
+  until a specified process exits.  This is similar to, but not exactly
+  the same as, os.waitpid() on POSIX systems.  If you're waiting for
+  a specific process whose pid was obtained from one of the spawn()
+  functions, the same Python os.waitpid() code works across platforms.
+  See the docs for details.  The docs were changed to clarify that
+  spawn functions return, and waitpid requires, a process handle on
+  Windows (not the same thing as a Windows process id).
+
+- New tempfile.TemporaryFile implementation for Windows:  this doesn't
+  need a TemporaryFileWrapper wrapper anymore, and should be immune
+  to a nasty problem:  before 2.3, if you got a temp file on Windows, it
+  got wrapped in an object whose close() method first closed the
+  underlying file, then deleted the file.  This usually worked fine.
+  However, the spawn family of functions on Windows create (at a low C
+  level) the same set of open files in the spawned process Q as were
+  open in the spawning process P.  If a temp file f was among them, then
+  doing f.close() in P first closed P's C-level file handle on f, but Q's
+  C-level file handle on f remained open, so the attempt in P to delete f
+  blew up with a "Permission denied" error (Windows doesn't allow
+  deleting open files).  This was surprising, subtle, and difficult to
+  work around.
+
+- The os module now exports all the symbolic constants usable with the
+  low-level os.open() on Windows:  the new constants in 2.3 are
+  O_NOINHERIT, O_SHORT_LIVED, O_TEMPORARY, O_RANDOM and O_SEQUENTIAL.
+  The others were also available in 2.2:  O_APPEND, O_BINARY, O_CREAT,
+  O_EXCL, O_RDONLY, O_RDWR, O_TEXT, O_TRUNC and O_WRONLY.  Contrary
+  to Microsoft docs, O_SHORT_LIVED does not seem to imply O_TEMPORARY
+  (so specify both if you want both; note that neither is useful unless
+  specified with O_CREAT too).
+
+Mac
+----
+
+- Mac/Relnotes is gone, the release notes are now here.
+
+- Python (the OSX-only, unix-based version, not the OS9-compatible CFM
+  version) now fully supports unicode strings as arguments to various file
+  system calls, eg. open(), file(), os.stat() and os.listdir().
+
+- The current naming convention for Python on the Macintosh is that MacPython
+  refers to the unix-based OSX-only version, and MacPython-OS9 refers to the
+  CFM-based version that runs on both OS9 and OSX.
+
+- All MacPython-OS9 functionality is now available in an OSX unix build,
+  including the Carbon modules, the IDE, OSA support, etc. A lot of this
+  will only work correctly in a framework build, though, because you cannot
+  talk to the window manager unless your application is run from a .app
+  bundle. There is a command line tool "pythonw" that runs your script
+  with an interpreter living in such a .app bundle, this interpreter should
+  be used to run any Python script using the window manager (including
+  Tkinter or wxPython scripts).
+
+- Most of Mac/Lib has moved to Lib/plat-mac, which is again used both in
+  MacPython-OSX and MacPython-OS9. The only modules remaining in Mac/Lib
+  are specifically for MacPython-OS9 (CFM support, preference resources, etc).
+
+- A new utility PythonLauncher will start a Python interpreter when a .py or
+  .pyw script is double-clicked in the Finder. By default .py scripts are
+  run with a normal Python interpreter in a Terminal window and .pyw
+  files are run with a window-aware pythonw interpreter without a Terminal
+  window, but all this can be customized.
+
+- MacPython-OS9 is now Carbon-only, so it runs on Mac OS 9 or Mac OS X and
+  possibly on Mac OS 8.6 with the right CarbonLib installed, but not on earlier
+  releases.
+
+- Many tools such as BuildApplet.py and gensuitemodule.py now support a command
+  line interface too.
+
+- All the Carbon classes are now PEP253 compliant, meaning that you can
+  subclass them from Python. Most of the attributes have gone, you should
+  now use the accessor function call API, which is also what Apple's
+  documentation uses. Some attributes such as grafport.visRgn are still
+  available for convenience.
+
+- New Carbon modules File (implementing the APIs in Files.h and Aliases.h)
+  and Folder (APIs from Folders.h). The old macfs builtin module is
+  gone, and replaced by a Python wrapper around the new modules.
+
+- Pathname handling should now be fully consistent: MacPython-OSX always uses
+  unix pathnames and MacPython-OS9 always uses colon-separated Mac pathnames
+  (also when running on Mac OS X).
+
+- New Carbon modules Help and AH give access to the Carbon Help Manager.
+  There are hooks in the IDE to allow accessing the Python documentation
+  (and Apple's Carbon and Cocoa documentation) through the Help Viewer.
+  See Mac/OSX/README for converting the Python documentation to a
+  Help Viewer compatible form and installing it.
+
+- OSA support has been redesigned and the generated Python classes now
+  mirror the inheritance defined by the underlying OSA classes.
+
+- MacPython no longer maps both \r and \n to \n on input for any text file.
+  This feature has been replaced by universal newline support (PEP278).
+
+- The default encoding for Python sourcefiles in MacPython-OS9 is no longer
+  mac-roman (or whatever your local Mac encoding was) but "ascii", like on
+  other platforms. If you really need sourcefiles with Mac characters in them
+  you can change this in site.py.
+
+
+What's New in Python 2.2 final?
+===============================
+
+*Release date: 21-Dec-2001*
+
+Type/class unification and new-style classes
+--------------------------------------------
+
+- pickle.py, cPickle: allow pickling instances of new-style classes
+  with a custom metaclass.
+
+Core and builtins
+-----------------
+
+- weakref proxy object: when comparing, unwrap both arguments if both
+  are proxies.
+
+Extension modules
+-----------------
+
+- binascii.b2a_base64(): fix a potential buffer overrun when encoding
+  very short strings.
+
+- cPickle: the obscure "fast" mode was suspected of causing stack
+  overflows on the Mac.  Hopefully fixed this by setting the recursion
+  limit much smaller.  If the limit is too low (it only affects
+  performance), you can change it by defining PY_CPICKLE_FAST_LIMIT
+  when compiling cPickle.c (or in pyconfig.h).
+
+Library
+-------
+
+- dumbdbm.py: fixed a dumb old bug (the file didn't get synched at
+  close or delete time).
+
+- rfc822.py: fixed a bug where the address '<>' was converted to None
+  instead of an empty string (also fixes the email.Utils module).
+
+- xmlrpclib.py: version 1.0.0; uses precision for doubles.
+
+- test suite: the pickle and cPickle tests were not executing any code
+  when run from the standard regression test.
+
+Tools/Demos
+-----------
+
+Build
+-----
+
+C API
+-----
+
+New platforms
+-------------
+
+Tests
+-----
+
+Windows
+-------
+
+- distutils package: fixed broken Windows installers (bdist_wininst).
+
+- tempfile.py: prevent mysterious warnings when TemporaryFileWrapper
+  instances are deleted at process exit time.
+
+- socket.py: prevent mysterious warnings when socket instances are
+  deleted at process exit time.
+
+- posixmodule.c: fix a Windows crash with stat() of a filename ending
+  in backslash.
+
+Mac
+----
+
+- The Carbon toolbox modules have been upgraded to Universal Headers
+  3.4, and experimental CoreGraphics and CarbonEvents modules have
+  been added.  All only for framework-enabled MacOSX.
+
+
+What's New in Python 2.2c1?
+===========================
+
+*Release date: 14-Dec-2001*
+
+Type/class unification and new-style classes
+--------------------------------------------
+
+- Guido's tutorial introduction to the new type/class features has
+  been extensively updated.  See
+
+      http://www.python.org/2.2/descrintro.html
+
+  That remains the primary documentation in this area.
+
+- Fixed a leak: instance variables declared with __slots__ were never
+  deleted!
+
+- The "delete attribute" method of descriptor objects is called
+  __delete__, not __del__.  In previous releases, it was mistakenly
+  called __del__, which created an unfortunate overloading condition
+  with finalizers.  (The "get attribute" and "set attribute" methods
+  are still called __get__ and __set__, respectively.)
+
+- Some subtle issues with the super built-in were fixed:
+
+  (a) When super itself is subclassed, its __get__ method would still
+      return an instance of the base class (i.e., of super).
+
+  (b) super(C, C()).__class__ would return C rather than super.  This
+      is confusing.  To fix this, I decided to change the semantics of
+      super so that it only applies to code attributes, not to data
+      attributes.  After all, overriding data attributes is not
+      supported anyway.
+
+  (c) The __get__ method didn't check whether the argument was an
+      instance of the type used in creation of the super instance.
+
+- Previously, hash() of an instance of a subclass of a mutable type
+  (list or dictionary) would return some value, rather than raising
+  TypeError.  This has been fixed.  Also, directly calling
+  dict.__hash__ and list.__hash__ now raises the same TypeError
+  (previously, these were the same as object.__hash__).
+
+- New-style objects now support deleting their __dict__.  This is for
+  all intents and purposes equivalent to assigning a brand new empty
+  dictionary, but saves space if the object is not used further.
+
+Core and builtins
+-----------------
+
+- -Qnew now works as documented in PEP 238:  when -Qnew is passed on
+  the command line, all occurrences of "/" use true division instead
+  of classic division.  See the PEP for details.  Note that "all"
+  means all instances in library and 3rd-party modules, as well as in
+  your own code.  As the PEP says, -Qnew is intended for use only in
+  educational environments with control over the libraries in use.
+  Note that test_coercion.py in the standard Python test suite fails
+  under -Qnew; this is expected, and won't be repaired until true
+  division becomes the default (in the meantime, test_coercion is
+  testing the current rules).
+
+- complex() now only allows the first argument to be a string
+  argument, and raises TypeError if either the second arg is a string
+  or if the second arg is specified when the first is a string.
+
+Extension modules
+-----------------
+
+- gc.get_referents was renamed to gc.get_referrers.
+
+Library
+-------
+
+- Functions in the os.spawn() family now release the global interpreter
+  lock around calling the platform spawn.  They should always have done
+  this, but did not before 2.2c1.  Multithreaded programs calling
+  an os.spawn function with P_WAIT will no longer block all Python threads
+  until the spawned program completes.  It's possible that some programs
+  relies on blocking, although more likely by accident than by design.
+
+- webbrowser defaults to netscape.exe on OS/2 now.
+
+- Tix.ResizeHandle exposes detach_widget, hide, and show.
+
+- The charset alias windows_1252 has been added.
+
+- types.StringTypes is a tuple containing the defined string types;
+  usually this will be (str, unicode), but if Python was compiled
+  without Unicode support it will be just (str,).
+
+- The pulldom and minidom modules were synchronized to PyXML.
+
+Tools/Demos
+-----------
+
+- A new script called Tools/scripts/google.py was added, which fires
+  off a search on Google.
+
+Build
+-----
+
+- Note that release builds of Python should arrange to define the
+  preprocessor symbol NDEBUG on the command line (or equivalent).
+  In the 2.2 pre-release series we tried to define this by magic in
+  Python.h instead, but it proved to cause problems for extension
+  authors.  The Unix, Windows and Mac builds now all define NDEBUG in
+  release builds via cmdline (or equivalent) instead.  Ports to
+  other platforms should do likewise.
+
+- It is no longer necessary to use --with-suffix when building on a
+  case-insensitive file system (such as Mac OS X HFS+). In the build
+  directory an extension is used, but not in the installed python.
+
+C API
+-----
+
+- New function PyDict_MergeFromSeq2() exposes the builtin dict
+  constructor's logic for updating a dictionary from an iterable object
+  producing key-value pairs.
+
+- PyArg_ParseTupleAndKeywords() requires that the number of entries in
+  the keyword list equal the number of argument specifiers.  This
+  wasn't checked correctly, and PyArg_ParseTupleAndKeywords could even
+  dump core in some bad cases.  This has been repaired.  As a result,
+  PyArg_ParseTupleAndKeywords may raise RuntimeError in bad cases that
+  previously went unchallenged.
+
+New platforms
+-------------
+
+Tests
+-----
+
+Windows
+-------
+
+Mac
+----
+
+- In unix-Python on Mac OS X (and darwin) sys.platform is now "darwin",
+  without any trailing digits.
+
+- Changed logic for finding python home in Mac OS X framework Pythons.
+  Now sys.executable points to the executable again, in stead of to
+  the shared library. The latter is used only for locating the python
+  home.
+
+
+What's New in Python 2.2b2?
+===========================
+
+*Release date: 16-Nov-2001*
+
+Type/class unification and new-style classes
+--------------------------------------------
+
+- Multiple inheritance mixing new-style and classic classes in the
+  list of base classes is now allowed, so this works now:
+
+      class Classic: pass
+      class Mixed(Classic, object): pass
+
+  The MRO (method resolution order) for each base class is respected
+  according to its kind, but the MRO for the derived class is computed
+  using new-style MRO rules if any base class is a new-style class.
+  This needs to be documented.
+
+- The new builtin dictionary() constructor, and dictionary type, have
+  been renamed to dict.  This reflects a decade of common usage.
+
+- dict() now accepts an iterable object producing 2-sequences.  For
+  example, dict(d.items()) == d for any dictionary d.  The argument,
+  and the elements of the argument, can be any iterable objects.
+
+- New-style classes can now have a __del__ method, which is called
+  when the instance is deleted (just like for classic classes).
+
+- Assignment to object.__dict__ is now possible, for objects that are
+  instances of new-style classes that have a __dict__ (unless the base
+  class forbids it).
+
+- Methods of built-in types now properly check for keyword arguments
+  (formerly these were silently ignored).  The only built-in methods
+  that take keyword arguments are __call__, __init__ and __new__.
+
+- The socket function has been converted to a type; see below.
+
+Core and builtins
+-----------------
+
+- Assignment to __debug__ raises SyntaxError at compile-time.  This
+  was promised when 2.1c1 was released as "What's New in Python 2.1c1"
+  (see below) says.
+
+- Clarified the error messages for unsupported operands to an operator
+  (like 1 + '').
+
+Extension modules
+-----------------
+
+- mmap has a new keyword argument, "access", allowing a uniform way for
+  both Windows and Unix users to create read-only, write-through and
+  copy-on-write memory mappings.  This was previously possible only on
+  Unix.  A new keyword argument was required to support this in a
+  uniform way because the mmap() signatures had diverged across
+  platforms.  Thanks to Jay T Miller for repairing this!
+
+- By default, the gc.garbage list now contains only those instances in
+  unreachable cycles that have __del__ methods; in 2.1 it contained all
+  instances in unreachable cycles.  "Instances" here has been generalized
+  to include instances of both new-style and old-style classes.
+
+- The socket module defines a new method for socket objects,
+  sendall().  This is like send() but may make multiple calls to
+  send() until all data has been sent.  Also, the socket function has
+  been converted to a subclassable type, like list and tuple (etc.)
+  before it; socket and SocketType are now the same thing.
+
+- Various bugfixes to the curses module.  There is now a test suite
+  for the curses module (you have to run it manually).
+
+- binascii.b2a_base64 no longer places an arbitrary restriction of 57
+  bytes on its input.
+
+Library
+-------
+
+- tkFileDialog exposes a Directory class and askdirectory
+  convenience function.
+
+- Symbolic group names in regular expressions must be unique.  For
+  example, the regexp r'(?P<abc>)(?P<abc>)' is not allowed, because a
+  single name can't mean both "group 1" and "group 2" simultaneously.
+  Python 2.2 detects this error at regexp compilation time;
+  previously, the error went undetected, and results were
+  unpredictable.  Also in sre, the pattern.split(), pattern.sub(), and
+  pattern.subn() methods have been rewritten in C.  Also, an
+  experimental function/method finditer() has been added, which works
+  like findall() but returns an iterator.
+
+- Tix exposes more commands through the classes DirSelectBox,
+  DirSelectDialog, ListNoteBook, Meter, CheckList, and the
+  methods tix_addbitmapdir, tix_cget, tix_configure, tix_filedialog,
+  tix_getbitmap, tix_getimage, tix_option_get, and tix_resetoptions.
+
+- Traceback objects are now scanned by cyclic garbage collection, so
+  cycles created by casual use of sys.exc_info() no longer cause
+  permanent memory leaks (provided garbage collection is enabled).
+
+- os.extsep -- a new variable needed by the RISCOS support.  It is the
+  separator used by extensions, and is '.' on all platforms except
+  RISCOS, where it is '/'.  There is no need to use this variable
+  unless you have a masochistic desire to port your code to RISCOS.
+
+- mimetypes.py has optional support for non-standard, but commonly
+  found types.  guess_type() and guess_extension() now accept an
+  optional 'strict' flag, defaulting to true, which controls whether
+  recognize non-standard types or not.  A few non-standard types we
+  know about have been added.  Also, when run as a script, there are
+  new -l and -e options.
+
+- statcache is now deprecated.
+
+- email.Utils.formatdate() now produces the preferred RFC 2822 style
+  dates with numeric timezones (it used to produce obsolete dates
+  hard coded to "GMT" timezone).  An optional 'localtime' flag is
+  added to produce dates in the local timezone, with daylight savings
+  time properly taken into account.
+
+- In pickle and cPickle, instead of masking errors in load() by
+  transforming them into SystemError, we let the original exception
+  propagate out.  Also, implement support for __safe_for_unpickling__
+  in pickle, as it already was supported in cPickle.
+
+Tools/Demos
+-----------
+
+Build
+-----
+
+- The dbm module is built using libdb1 if available.  The bsddb module
+  is built with libdb3 if available.
+
+- Misc/Makefile.pre.in has been removed by BDFL pronouncement.
+
+C API
+-----
+
+- New function PySequence_Fast_GET_SIZE() returns the size of a non-
+  NULL result from PySequence_Fast(), more quickly than calling
+  PySequence_Size().
+
+- New argument unpacking function PyArg_UnpackTuple() added.
+
+- New functions PyObject_CallFunctionObjArgs() and
+  PyObject_CallMethodObjArgs() have been added to make it more
+  convenient and efficient to call functions and methods from C.
+
+- PyArg_ParseTupleAndKeywords() no longer masks errors, so it's
+  possible that this will propagate errors it didn't before.
+
+- New function PyObject_CheckReadBuffer(), which returns true if its
+  argument supports the single-segment readable buffer interface.
+
+New platforms
+-------------
+
+- We've finally confirmed that this release builds on HP-UX 11.00,
+  *with* threads, and passes the test suite.
+
+- Thanks to a series of patches from Michael Muller, Python may build
+  again under OS/2 Visual Age C++.
+
+- Updated RISCOS port by Dietmar Schwertberger.
+
+Tests
+-----
+
+- Added a test script for the curses module.  It isn't run automatically;
+  regrtest.py must be run with '-u curses' to enable it.
+
+Windows
+-------
+
+Mac
+----
+
+- PythonScript has been moved to unsupported and is slated to be
+  removed completely in the next release.
+
+- It should now be possible to build applets that work on both OS9 and
+  OSX.
+
+- The core is now linked with CoreServices not Carbon; as a side
+  result, default 8bit encoding on OSX is now ASCII.
+
+- Python should now build on OSX 10.1.1
+
+
+What's New in Python 2.2b1?
+===========================
+
+*Release date: 19-Oct-2001*
+
+Type/class unification and new-style classes
+--------------------------------------------
+
+- New-style classes are now always dynamic (except for built-in and
+  extension types).  There is no longer a performance penalty, and I
+  no longer see another reason to keep this baggage around.  One relic
+  remains: the __dict__ of a new-style class is a read-only proxy; you
+  must set the class's attribute to modify it.  As a consequence, the
+  __defined__ attribute of new-style types no longer exists, for lack
+  of need: there is once again only one __dict__ (although in the
+  future a __cache__ may be resurrected with a similar function, if I
+  can prove that it actually speeds things up).
+
+- C.__doc__ now works as expected for new-style classes (in 2.2a4 it
+  always returned None, even when there was a class docstring).
+
+- doctest now finds and runs docstrings attached to new-style classes,
+  class methods, static methods, and properties.
+
+Core and builtins
+-----------------
+
+- A very subtle syntactical pitfall in list comprehensions was fixed.
+  For example: [a+b for a in 'abc', for b in 'def'].  The comma in
+  this example is a mistake.  Previously, this would silently let 'a'
+  iterate over the singleton tuple ('abc',), yielding ['abcd', 'abce',
+  'abcf'] rather than the intended ['ad', 'ae', 'af', 'bd', 'be',
+  'bf', 'cd', 'ce', 'cf'].  Now, this is flagged as a syntax error.
+  Note that [a for a in <singleton>] is a convoluted way to say
+  [<singleton>] anyway, so it's not like any expressiveness is lost.
+
+- getattr(obj, name, default) now only catches AttributeError, as
+  documented, rather than returning the default value for all
+  exceptions (which could mask bugs in a __getattr__ hook, for
+  example).
+
+- Weak reference objects are now part of the core and offer a C API.
+  A bug which could allow a core dump when binary operations involved
+  proxy reference has been fixed.  weakref.ReferenceError is now a
+  built-in exception.
+
+- unicode(obj) now behaves more like str(obj), accepting arbitrary
+  objects, and calling a __unicode__ method if it exists.
+  unicode(obj, encoding) and unicode(obj, encoding, errors) still
+  require an 8-bit string or character buffer argument.
+
+- isinstance() now allows any object as the first argument and a
+  class, a type or something with a __bases__ tuple attribute for the
+  second argument.  The second argument may also be a tuple of a
+  class, type, or something with __bases__, in which case isinstance()
+  will return true if the first argument is an instance of any of the
+  things contained in the second argument tuple.  E.g.
+
+  isinstance(x, (A, B))
+
+  returns true if x is an instance of A or B.
+
+Extension modules
+-----------------
+
+- thread.start_new_thread() now returns the thread ID (previously None).
+
+- binascii has now two quopri support functions, a2b_qp and b2a_qp.
+
+- readline now supports setting the startup_hook and the
+  pre_event_hook, and adds the add_history() function.
+
+- os and posix supports chroot(), setgroups() and unsetenv() where
+  available.  The stat(), fstat(), statvfs() and fstatvfs() functions
+  now return "pseudo-sequences" -- the various fields can now be
+  accessed as attributes (e.g. os.stat("/").st_mtime) but for
+  backwards compatibility they also behave as a fixed-length sequence.
+  Some platform-specific fields (e.g. st_rdev) are only accessible as
+  attributes.
+
+- time: localtime(), gmtime() and strptime() now return a
+  pseudo-sequence similar to the os.stat() return value, with
+  attributes like tm_year etc.
+
+- Decompression objects in the zlib module now accept an optional
+  second parameter to decompress() that specifies the maximum amount
+  of memory to use for the uncompressed data.
+
+- optional SSL support in the socket module now exports OpenSSL
+  functions RAND_add(), RAND_egd(), and RAND_status().  These calls
+  are useful on platforms like Solaris where OpenSSL does not
+  automatically seed its PRNG.  Also, the keyfile and certfile
+  arguments to socket.ssl() are now optional.
+
+- posixmodule (and by extension, the os module on POSIX platforms) now
+  exports O_LARGEFILE, O_DIRECT, O_DIRECTORY, and O_NOFOLLOW.
+
+Library
+-------
+
+- doctest now excludes functions and classes not defined by the module
+  being tested, thanks to Tim Hochberg.
+
+- HotShot, a new profiler implemented using a C-based callback, has
+  been added.  This substantially reduces the overhead of profiling,
+  but it is still quite preliminary.  Support modules and
+  documentation will be added in upcoming releases (before 2.2 final).
+
+- profile now produces correct output in situations where an exception
+  raised in Python is cleared by C code (e.g. hasattr()).  This used
+  to cause wrong output, including spurious claims of recursive
+  functions and attribution of time spent to the wrong function.
+
+  The code and documentation for the derived OldProfile and HotProfile
+  profiling classes was removed.  The code hasn't worked for years (if
+  you tried to use them, they raised exceptions).  OldProfile
+  intended to reproduce the behavior of the profiler Python used more
+  than 7 years ago, and isn't interesting anymore.  HotProfile intended
+  to provide a faster profiler (but producing less information), and
+  that's a worthy goal we intend to meet via a different approach (but
+  without losing information).
+
+- Profile.calibrate() has a new implementation that should deliver
+  a much better system-specific calibration constant.  The constant can
+  now be specified in an instance constructor, or as a Profile class or
+  instance variable, instead of by editing profile.py's source code.
+  Calibration must still be done manually (see the docs for the profile
+  module).
+
+  Note that Profile.calibrate() must be overridden by subclasses.
+  Improving the accuracy required exploiting detailed knowledge of
+  profiler internals; the earlier method abstracted away the details
+  and measured a simplified model instead, but consequently computed
+  a constant too small by a factor of 2 on some modern machines.
+
+- quopri's encode and decode methods take an optional header parameter,
+  which indicates whether output is intended for the header 'Q'
+  encoding.
+
+- The SocketServer.ThreadingMixIn class now closes the request after
+  finish_request() returns.  (Not when it errors out though.)
+
+- The nntplib module's NNTP.body() method has grown a 'file' argument
+  to allow saving the message body to a file.
+
+- The email package has added a class email.Parser.HeaderParser which
+  only parses headers and does not recurse into the message's body.
+  Also, the module/class MIMEAudio has been added for representing
+  audio data (contributed by Anthony Baxter).
+
+- ftplib should be able to handle files > 2GB.
+
+- ConfigParser.getboolean() now also interprets TRUE, FALSE, YES, NO,
+  ON, and OFF.
+
+- xml.dom.minidom NodeList objects now support the length attribute
+  and item() method as required by the DOM specifications.
+
+Tools/Demos
+-----------
+
+- Demo/dns was removed.  It no longer serves any purpose; a package
+  derived from it is now maintained by Anthony Baxter, see
+  http://PyDNS.SourceForge.net.
+
+- The freeze tool has been made more robust, and two new options have
+  been added: -X and -E.
+
+Build
+-----
+
+- configure will use CXX in LINKCC if CXX is used to build main() and
+  the system requires to link a C++ main using the C++ compiler.
+
+C API
+-----
+
+- The documentation for the tp_compare slot is updated to require that
+  the return value must be -1, 0, 1; an arbitrary number <0 or >0 is
+  not correct.  This is not yet enforced but will be enforced in
+  Python 2.3; even later, we may use -2 to indicate errors and +2 for
+  "NotImplemented".  Right now, -1 should be used for an error return.
+
+- PyLong_AsLongLong() now accepts int (as well as long) arguments.
+  Consequently, PyArg_ParseTuple's 'L' code also accepts int (as well
+  as long) arguments.
+
+- PyThread_start_new_thread() now returns a long int giving the thread
+  ID, if one can be calculated; it returns -1 for error, 0 if no
+  thread ID is calculated (this is an incompatible change, but only
+  the thread module used this API).  This code has only really been
+  tested on Linux and Windows; other platforms please beware (and
+  report any bugs or strange behavior).
+
+- PyUnicode_FromEncodedObject() no longer accepts Unicode objects as
+  input.
+
+New platforms
+-------------
+
+Tests
+-----
+
+Windows
+-------
+
+- Installer:  If you install IDLE, and don't disable file-extension
+  registration, a new "Edit with IDLE" context (right-click) menu entry
+  is created for .py and .pyw files.
+
+- The signal module now supports SIGBREAK on Windows, thanks to Steven
+  Scott.  Note that SIGBREAK is unique to Windows.  The default SIGBREAK
+  action remains to call Win32 ExitProcess().  This can be changed via
+  signal.signal().  For example::
+
+      # Make Ctrl+Break raise KeyboardInterrupt, like Python's default Ctrl+C
+      # (SIGINT) behavior.
+      import signal
+      signal.signal(signal.SIGBREAK, signal.default_int_handler)
+
+      try:
+          while 1:
+              pass
+      except KeyboardInterrupt:
+          # We get here on Ctrl+C or Ctrl+Break now; if we had not changed
+          # SIGBREAK, only on Ctrl+C (and Ctrl+Break would terminate the
+          # program without the possibility for any Python-level cleanup).
+          print "Clean exit"
+
+
+What's New in Python 2.2a4?
+===========================
+
+*Release date: 28-Sep-2001*
+
+Type/class unification and new-style classes
+--------------------------------------------
+
+- pydoc and inspect are now aware of new-style classes;
+  e.g. help(list) at the interactive prompt now shows proper
+  documentation for all operations on list objects.
+
+- Applications using Jim Fulton's ExtensionClass module can now safely
+  be used with Python 2.2.  In particular, Zope 2.4.1 now works with
+  Python 2.2 (as well as with Python 2.1.1).  The Demo/metaclass
+  examples also work again.  It is hoped that Gtk and Boost also work
+  with 2.2a4 and beyond.  (If you can confirm this, please write
+  webmaster@python.org; if there are still problems, please open a bug
+  report on SourceForge.)
+
+- property() now takes 4 keyword arguments:  fget, fset, fdel and doc.
+  These map to read-only attributes 'fget', 'fset', 'fdel', and '__doc__'
+  in the constructed property object.  fget, fset and fdel weren't
+  discoverable from Python in 2.2a3.  __doc__ is new, and allows to
+  associate a docstring with a property.
+
+- Comparison overloading is now more completely implemented.  For
+  example, a str subclass instance can properly be compared to a str
+  instance, and it can properly overload comparison.  Ditto for most
+  other built-in object types.
+
+- The repr() of new-style classes has changed; instead of <type
+  'M.Foo'> a new-style class is now rendered as <class 'M.Foo'>,
+  *except* for built-in types, which are still rendered as <type
+  'Foo'> (to avoid upsetting existing code that might parse or
+  otherwise rely on repr() of certain type objects).
+
+- The repr() of new-style objects is now always <Foo object at XXX>;
+  previously, it was sometimes <Foo instance at XXX>.
+
+- For new-style classes, what was previously called __getattr__ is now
+  called __getattribute__.  This method, if defined, is called for
+  *every* attribute access.  A new __getattr__ hook more similar to the
+  one in classic classes is defined which is called only if regular
+  attribute access raises AttributeError; to catch *all* attribute
+  access, you can use __getattribute__ (for new-style classes).  If
+  both are defined, __getattribute__ is called first, and if it raises
+  AttributeError, __getattr__ is called.
+
+- The __class__ attribute of new-style objects can be assigned to.
+  The new class must have the same C-level object layout as the old
+  class.
+
+- The builtin file type can be subclassed now.  In the usual pattern,
+  "file" is the name of the builtin type, and file() is a new builtin
+  constructor, with the same signature as the builtin open() function.
+  file() is now the preferred way to open a file.
+
+- Previously, __new__ would only see sequential arguments passed to
+  the type in a constructor call; __init__ would see both sequential
+  and keyword arguments.  This made no sense whatsoever any more, so
+  now both __new__ and __init__ see all arguments.
+
+- Previously, hash() applied to an instance of a subclass of str or
+  unicode always returned 0.  This has been repaired.
+
+- Previously, an operation on an instance of a subclass of an
+  immutable type (int, long, float, complex, tuple, str, unicode),
+  where the subtype didn't override the operation (and so the
+  operation was handled by the builtin type), could return that
+  instance instead a value of the base type.  For example, if s was of
+  a str subclass type, s[:] returned s as-is.  Now it returns a str
+  with the same value as s.
+
+- Provisional support for pickling new-style objects has been added.
+
+Core
+----
+
+- file.writelines() now accepts any iterable object producing strings.
+
+- PyUnicode_FromEncodedObject() now works very much like
+  PyObject_Str(obj) in that it tries to use __str__/tp_str
+  on the object if the object is not a string or buffer. This
+  makes unicode() behave like str() when applied to non-string/buffer
+  objects.
+
+- PyFile_WriteObject now passes Unicode objects to the file's write
+  method. As a result, all file-like objects which may be the target
+  of a print statement must support Unicode objects, i.e. they must
+  at least convert them into ASCII strings.
+
+- Thread scheduling on Solaris should be improved; it is no longer
+  necessary to insert a small sleep at the start of a thread in order
+  to let other runnable threads be scheduled.
+
+Library
+-------
+
+- StringIO.StringIO instances and cStringIO.StringIO instances support
+  read character buffer compatible objects for their .write() methods.
+  These objects are converted to strings and then handled as such
+  by the instances.
+
+- The "email" package has been added.  This is basically a port of the
+  mimelib package <http://sf.net/projects/mimelib> with API changes
+  and some implementations updated to use iterators and generators.
+
+- difflib.ndiff() and difflib.Differ.compare() are generators now.  This
+  restores the ability of Tools/scripts/ndiff.py to start producing output
+  before the entire comparison is complete.
+
+- StringIO.StringIO instances and cStringIO.StringIO instances support
+  iteration just like file objects (i.e. their .readline() method is
+  called for each iteration until it returns an empty string).
+
+- The codecs module has grown four new helper APIs to access
+  builtin codecs: getencoder(), getdecoder(), getreader(),
+  getwriter().
+
+- SimpleXMLRPCServer: a new module (based upon SimpleHTMLServer)
+  simplifies writing XML RPC servers.
+
+- os.path.realpath(): a new function that returns the absolute pathname
+  after interpretation of symbolic links.  On non-Unix systems, this
+  is an alias for os.path.abspath().
+
+- operator.indexOf() (PySequence_Index() in the C API) now works with any
+  iterable object.
+
+- smtplib now supports various authentication and security features of
+  the SMTP protocol through the new login() and starttls() methods.
+
+- hmac: a new module implementing keyed hashing for message
+  authentication.
+
+- mimetypes now recognizes more extensions and file types.  At the
+  same time, some mappings not sanctioned by IANA were removed.
+
+- The "compiler" package has been brought up to date to the state of
+  Python 2.2 bytecode generation.  It has also been promoted from a
+  Tool to a standard library package.  (Tools/compiler still exists as
+  a sample driver.)
+
+Build
+-----
+
+- Large file support (LFS) is now automatic when the platform supports
+  it; no more manual configuration tweaks are needed.  On Linux, at
+  least, it's possible to have a system whose C library supports large
+  files but whose kernel doesn't; in this case, large file support is
+  still enabled but doesn't do you any good unless you upgrade your
+  kernel or share your Python executable with another system whose
+  kernel has large file support.
+
+- The configure script now supplies plausible defaults in a
+  cross-compilation environment.  This doesn't mean that the supplied
+  values are always correct, or that cross-compilation now works
+  flawlessly -- but it's a first step (and it shuts up most of
+  autoconf's warnings about AC_TRY_RUN).
+
+- The Unix build is now a bit less chatty, courtesy of the parser
+  generator.  The build is completely silent (except for errors) when
+  using "make -s", thanks to a -q option to setup.py.
+
+C API
+-----
+
+- The "structmember" API now supports some new flag bits to deny read
+  and/or write access to attributes in restricted execution mode.
+
+New platforms
+-------------
+
+- Compaq's iPAQ handheld, running the "familiar" Linux distribution
+  (http://familiar.handhelds.org).
+
+Tests
+-----
+
+- The "classic" standard tests, which work by comparing stdout to
+  an expected-output file under Lib/test/output/, no longer stop at
+  the first mismatch.  Instead the test is run to completion, and a
+  variant of ndiff-style comparison is used to report all differences.
+  This is much easier to understand than the previous style of reporting.
+
+- The unittest-based standard tests now use regrtest's test_main()
+  convention, instead of running as a side-effect of merely being
+  imported.  This allows these tests to be run in more natural and
+  flexible ways as unittests, outside the regrtest framework.
+
+- regrtest.py is much better integrated with unittest and doctest now,
+  especially in regard to reporting errors.
+
+Windows
+-------
+
+- Large file support now also works for files > 4GB, on filesystems
+  that support it (NTFS under Windows 2000).  See "What's New in
+  Python 2.2a3" for more detail.
+
+
+What's New in Python 2.2a3?
+===========================
+
+*Release Date: 07-Sep-2001*
+
+Core
+----
+
+- Conversion of long to float now raises OverflowError if the long is too
+  big to represent as a C double.
+
+- The 3-argument builtin pow() no longer allows a third non-None argument
+  if either of the first two arguments is a float, or if both are of
+  integer types and the second argument is negative (in which latter case
+  the arguments are converted to float, so this is really the same
+  restriction).
+
+- The builtin dir() now returns more information, and sometimes much
+  more, generally naming all attributes of an object, and all attributes
+  reachable from the object via its class, and from its class's base
+  classes, and so on from them too.  Example:  in 2.2a2, dir([]) returned
+  an empty list.  In 2.2a3,
+
+  >>> dir([])
+  ['__add__', '__class__', '__contains__', '__delattr__', '__delitem__',
+   '__eq__', '__ge__', '__getattr__', '__getitem__', '__getslice__',
+   '__gt__', '__hash__', '__iadd__', '__imul__', '__init__', '__le__',
+   '__len__', '__lt__', '__mul__', '__ne__', '__new__', '__repr__',
+   '__rmul__', '__setattr__', '__setitem__', '__setslice__', '__str__',
+   'append', 'count', 'extend', 'index', 'insert', 'pop', 'remove',
+   'reverse', 'sort']
+
+  dir(module) continues to return only the module's attributes, though.
+
+- Overflowing operations on plain ints now return a long int rather
+  than raising OverflowError.  This is a partial implementation of PEP
+  237.  You can use -Wdefault::OverflowWarning to enable a warning for
+  this situation, and -Werror::OverflowWarning to revert to the old
+  OverflowError exception.
+
+- A new command line option, -Q<arg>, is added to control run-time
+  warnings for the use of classic division.  (See PEP 238.)  Possible
+  values are -Qold, -Qwarn, -Qwarnall, and -Qnew.  The default is
+  -Qold, meaning the / operator has its classic meaning and no
+  warnings are issued.  Using -Qwarn issues a run-time warning about
+  all uses of classic division for int and long arguments; -Qwarnall
+  also warns about classic division for float and complex arguments
+  (for use with fixdiv.py).
+  [Note:  the remainder of this item (preserved below) became
+  obsolete in 2.2c1 -- -Qnew has global effect in 2.2] ::
+
+    Using -Qnew is questionable; it turns on new division by default, but
+    only in the __main__ module.  You can usefully combine -Qwarn or
+    -Qwarnall and -Qnew: this gives the __main__ module new division, and
+    warns about classic division everywhere else.
+
+- Many built-in types can now be subclassed.  This applies to int,
+  long, float, str, unicode, and tuple.  (The types complex, list and
+  dictionary can also be subclassed; this was introduced earlier.)
+  Note that restrictions apply when subclassing immutable built-in
+  types: you can only affect the value of the instance by overloading
+  __new__.  You can add mutable attributes, and the subclass instances
+  will have a __dict__ attribute, but you cannot change the "value"
+  (as implemented by the base class) of an immutable subclass instance
+  once it is created.
+
+- The dictionary constructor now takes an optional argument, a
+  mapping-like object, and initializes the dictionary from its
+  (key, value) pairs.
+
+- A new built-in type, super, has been added.  This facilitates making
+  "cooperative super calls" in a multiple inheritance setting.  For an
+  explanation, see http://www.python.org/2.2/descrintro.html#cooperation
+
+- A new built-in type, property, has been added.  This enables the
+  creation of "properties".  These are attributes implemented by
+  getter and setter functions (or only one of these for read-only or
+  write-only attributes), without the need to override __getattr__.
+  See http://www.python.org/2.2/descrintro.html#property
+
+- The syntax of floating-point and imaginary literals has been
+  liberalized, to allow leading zeroes.  Examples of literals now
+  legal that were SyntaxErrors before:
+
+      00.0    0e3   0100j   07.5   00000000000000000008.
+
+- An old tokenizer bug allowed floating point literals with an incomplete
+  exponent, such as 1e and 3.1e-.  Such literals now raise SyntaxError.
+
+Library
+-------
+
+- telnetlib includes symbolic names for the options, and support for
+  setting an option negotiation callback. It also supports processing
+  of suboptions.
+
+- The new C standard no longer requires that math libraries set errno to
+  ERANGE on overflow.  For platform libraries that exploit this new
+  freedom, Python's overflow-checking was wholly broken.  A new overflow-
+  checking scheme attempts to repair that, but may not be reliable on all
+  platforms (C doesn't seem to provide anything both useful and portable
+  in this area anymore).
+
+- Asynchronous timeout actions are available through the new class
+  threading.Timer.
+
+- math.log and math.log10 now return sensible results for even huge
+  long arguments.  For example, math.log10(10 ** 10000) ~= 10000.0.
+
+- A new function, imp.lock_held(), returns 1 when the import lock is
+  currently held.  See the docs for the imp module.
+
+- pickle, cPickle and marshal on 32-bit platforms can now correctly read
+  dumps containing ints written on platforms where Python ints are 8 bytes.
+  When read on a box where Python ints are 4 bytes, such values are
+  converted to Python longs.
+
+- In restricted execution mode (using the rexec module), unmarshalling
+  code objects is no longer allowed.  This plugs a security hole.
+
+- unittest.TestResult instances no longer store references to tracebacks
+  generated by test failures. This prevents unexpected dangling references
+  to objects that should be garbage collected between tests.
+
+Tools
+-----
+
+- Tools/scripts/fixdiv.py has been added which can be used to fix
+  division operators as per PEP 238.
+
+Build
+-----
+
+- If you are an adventurous person using Mac OS X you may want to look at
+  Mac/OSX. There is a Makefile there that will build Python as a real Mac
+  application, which can be used for experimenting with Carbon or Cocoa.
+  Discussion of this on pythonmac-sig, please.
+
+C API
+-----
+
+- New function PyObject_Dir(obj), like Python __builtin__.dir(obj).
+
+- Note that PyLong_AsDouble can fail!  This has always been true, but no
+  callers checked for it.  It's more likely to fail now, because overflow
+  errors are properly detected now.  The proper way to check::
+
+      double x = PyLong_AsDouble(some_long_object);
+      if (x == -1.0 && PyErr_Occurred()) {
+              /* The conversion failed. */
+      }
+
+- The GC API has been changed.  Extensions that use the old API will still
+  compile but will not participate in GC.  To upgrade an extension
+  module:
+
+    - rename Py_TPFLAGS_GC to PyTPFLAGS_HAVE_GC
+
+    - use PyObject_GC_New or PyObject_GC_NewVar to allocate objects and
+      PyObject_GC_Del to deallocate them
+
+    - rename PyObject_GC_Init to PyObject_GC_Track and PyObject_GC_Fini
+      to PyObject_GC_UnTrack
+
+    - remove PyGC_HEAD_SIZE from object size calculations
+
+    - remove calls to PyObject_AS_GC and PyObject_FROM_GC
+
+- Two new functions: PyString_FromFormat() and PyString_FromFormatV().
+  These can be used safely to construct string objects from a
+  sprintf-style format string (similar to the format string supported
+  by PyErr_Format()).
+
+New platforms
+-------------
+
+- Stephen Hansen contributed patches sufficient to get a clean compile
+  under Borland C (Windows), but he reports problems running it and ran
+  out of time to complete the port.  Volunteers?  Expect a MemoryError
+  when importing the types module; this is probably shallow, and
+  causing later failures too.
+
+Tests
+-----
+
+Windows
+-------
+
+- Large file support is now enabled on Win32 platforms as well as on
+  Win64.  This means that, for example, you can use f.tell() and f.seek()
+  to manipulate files larger than 2 gigabytes (provided you have enough
+  disk space, and are using a Windows filesystem that supports large
+  partitions).  Windows filesystem limits:  FAT has a 2GB (gigabyte)
+  filesize limit, and large file support makes no difference there.
+  FAT32's limit is 4GB, and files >= 2GB are easier to use from Python now.
+  NTFS has no practical limit on file size, and files of any size can be
+  used from Python now.
+
+- The w9xpopen hack is now used on Windows NT and 2000 too when COMPSPEC
+  points to command.com (patch from Brian Quinlan).
+
+
+What's New in Python 2.2a2?
+===========================
+
+*Release Date: 22-Aug-2001*
+
+Build
+-----
+
+- Tim Peters developed a brand new Windows installer using Wise 8.1,
+  generously donated to us by Wise Solutions.
+
+- configure supports a new option --enable-unicode, with the values
+  ucs2 and ucs4 (new in 2.2a1). With --disable-unicode, the Unicode
+  type and supporting code is completely removed from the interpreter.
+
+- A new configure option --enable-framework builds a Mac OS X framework,
+  which "make frameworkinstall" will install. This provides a starting
+  point for more mac-like functionality, join pythonmac-sig@python.org
+  if you are interested in helping.
+
+- The NeXT platform is no longer supported.
+
+- The 'new' module is now statically linked.
+
+Tools
+-----
+
+- The new Tools/scripts/cleanfuture.py can be used to automatically
+  edit out obsolete future statements from Python source code.  See
+  the module docstring for details.
+
+Tests
+-----
+
+- regrtest.py now knows which tests are expected to be skipped on some
+  platforms, allowing to give clearer test result output.  regrtest
+  also has optional --use/-u switch to run normally disabled tests
+  which require network access or consume significant disk resources.
+
+- Several new tests in the standard test suite, with special thanks to
+  Nick Mathewson.
+
+Core
+----
+
+- The floor division operator // has been added as outlined in PEP
+  238.  The / operator still provides classic division (and will until
+  Python 3.0) unless "from __future__ import division" is included, in
+  which case the / operator will provide true division.  The operator
+  module provides truediv() and floordiv() functions.  Augmented
+  assignment variants are included, as are the equivalent overloadable
+  methods and C API methods.  See the PEP for a full discussion:
+  <http://python.sf.net/peps/pep-0238.html>
+
+- Future statements are now effective in simulated interactive shells
+  (like IDLE).  This should "just work" by magic, but read Michael
+  Hudson's "Future statements in simulated shells" PEP 264 for full
+  details:  <http://python.sf.net/peps/pep-0264.html>.
+
+- The type/class unification (PEP 252-253) was integrated into the
+  trunk and is not so tentative any more (the exact specification of
+  some features is still tentative).  A lot of work has done on fixing
+  bugs and adding robustness and features (performance still has to
+  come a long way).
+
+- Warnings about a mismatch in the Python API during extension import
+  now use the Python warning framework (which makes it possible to
+  write filters for these warnings).
+
+- A function's __dict__ (aka func_dict) will now always be a
+  dictionary.  It used to be possible to delete it or set it to None,
+  but now both actions raise TypeErrors.  It is still legal to set it
+  to a dictionary object.  Getting func.__dict__ before any attributes
+  have been assigned now returns an empty dictionary instead of None.
+
+- A new command line option, -E, was added which disables the use of
+  all environment variables, or at least those that are specifically
+  significant to Python.  Usually those have a name starting with
+  "PYTHON".  This was used to fix a problem where the tests fail if
+  the user happens to have PYTHONHOME or PYTHONPATH pointing to an
+  older distribution.
+
+Library
+-------
+
+- New class Differ and new functions ndiff() and restore() in difflib.py.
+  These package the algorithms used by the popular Tools/scripts/ndiff.py,
+  for programmatic reuse.
+
+- New function xml.sax.saxutils.quoteattr():  Quote an XML attribute
+  value using the minimal quoting required for the value; more
+  reliable than using xml.sax.saxutils.escape() for attribute values.
+
+- Readline completion support for cmd.Cmd was added.
+
+- Calling os.tempnam() or os.tmpnam() generate RuntimeWarnings.
+
+- Added function threading.BoundedSemaphore()
+
+- Added Ka-Ping Yee's cgitb.py module.
+
+- The 'new' module now exposes the CO_xxx flags.
+
+- The gc module offers the get_referents function.
+
+New platforms
+-------------
+
+C API
+-----
+
+- Two new APIs PyOS_snprintf() and PyOS_vsnprintf() were added
+  which provide a cross-platform implementations for the
+  relatively new snprintf()/vsnprintf() C lib APIs. In contrast to
+  the standard sprintf() and vsprintf() C lib APIs, these versions
+  apply bounds checking on the used buffer which enhances protection
+  against buffer overruns.
+
+- Unicode APIs now use name mangling to assure that mixing interpreters
+  and extensions using different Unicode widths is rendered next to
+  impossible. Trying to import an incompatible Unicode-aware extension
+  will result in an ImportError.  Unicode extensions writers must make
+  sure to check the Unicode width compatibility in their extensions by
+  using at least one of the mangled Unicode APIs in the extension.
+
+- Two new flags METH_NOARGS and METH_O are available in method definition
+  tables to simplify implementation of methods with no arguments and a
+  single untyped argument. Calling such methods is more efficient than
+  calling corresponding METH_VARARGS methods. METH_OLDARGS is now
+  deprecated.
+
+Windows
+-------
+
+- "import module" now compiles module.pyw if it exists and nothing else
+  relevant is found.
+
+
+What's New in Python 2.2a1?
+===========================
+
+*Release date: 18-Jul-2001*
+
+Core
+----
+
+- TENTATIVELY, a large amount of code implementing much of what's
+  described in PEP 252 (Making Types Look More Like Classes) and PEP
+  253 (Subtyping Built-in Types) was added.  This will be released
+  with Python 2.2a1.  Documentation will be provided separately
+  through http://www.python.org/2.2/.  The purpose of releasing this
+  with Python 2.2a1 is to test backwards compatibility.  It is
+  possible, though not likely, that a decision is made not to release
+  this code as part of 2.2 final, if any serious backwards
+  incompatibilities are found during alpha testing that cannot be
+  repaired.
+
+- Generators were added; this is a new way to create an iterator (see
+  below) using what looks like a simple function containing one or
+  more 'yield' statements.  See PEP 255.  Since this adds a new
+  keyword to the language, this feature must be enabled by including a
+  future statement: "from __future__ import generators" (see PEP 236).
+  Generators will become a standard feature in a future release
+  (probably 2.3).  Without this future statement, 'yield' remains an
+  ordinary identifier, but a warning is issued each time it is used.
+  (These warnings currently don't conform to the warnings framework of
+  PEP 230; we intend to fix this in 2.2a2.)
+
+- The UTF-16 codec was modified to be more RFC compliant. It will now
+  only remove BOM characters at the start of the string and then
+  only if running in native mode (UTF-16-LE and -BE won't remove a
+  leading BMO character).
+
+- Strings now have a new method .decode() to complement the already
+  existing .encode() method. These two methods provide direct access
+  to the corresponding decoders and encoders of the registered codecs.
+
+  To enhance the usability of the .encode() method, the special
+  casing of Unicode object return values was dropped (Unicode objects
+  were auto-magically converted to string using the default encoding).
+
+  Both methods will now return whatever the codec in charge of the
+  requested encoding returns as object, e.g. Unicode codecs will
+  return Unicode objects when decoding is requested ("äöü".decode("latin-1")
+  will return u"äöü"). This enables codec writer to create codecs
+  for various simple to use conversions.
+
+  New codecs were added to demonstrate these new features (the .encode()
+  and .decode() columns indicate the type of the returned objects):
+
+  +---------+-----------+-----------+-----------------------------+
+  |Name     | .encode() | .decode() | Description                 |
+  +=========+===========+===========+=============================+
+  |uu       | string    | string    | UU codec (e.g. for email)   |
+  +---------+-----------+-----------+-----------------------------+
+  |base64   | string    | string    | base64 codec                |
+  +---------+-----------+-----------+-----------------------------+
+  |quopri   | string    | string    | quoted-printable codec      |
+  +---------+-----------+-----------+-----------------------------+
+  |zlib     | string    | string    | zlib compression            |
+  +---------+-----------+-----------+-----------------------------+
+  |hex      | string    | string    | 2-byte hex codec            |
+  +---------+-----------+-----------+-----------------------------+
+  |rot-13   | string    | Unicode   | ROT-13 Unicode charmap codec|
+  +---------+-----------+-----------+-----------------------------+
+
+- Some operating systems now support the concept of a default Unicode
+  encoding for file system operations.  Notably, Windows supports 'mbcs'
+  as the default.  The Macintosh will also adopt this concept in the medium
+  term, although the default encoding for that platform will be other than
+  'mbcs'.
+
+  On operating system that support non-ASCII filenames, it is common for
+  functions that return filenames (such as os.listdir()) to return Python
+  string objects pre-encoded using the default file system encoding for
+  the platform.  As this encoding is likely to be different from Python's
+  default encoding, converting this name to a Unicode object before passing
+  it back to the Operating System would result in a Unicode error, as Python
+  would attempt to use its default encoding (generally ASCII) rather than
+  the default encoding for the file system.
+
+  In general, this change simply removes surprises when working with
+  Unicode and the file system, making these operations work as you expect,
+  increasing the transparency of Unicode objects in this context.
+  See [????] for more details, including examples.
+
+- Float (and complex) literals in source code were evaluated to full
+  precision only when running from a .py file; the same code loaded from a
+  .pyc (or .pyo) file could suffer numeric differences starting at about the
+  12th significant decimal digit.  For example, on a machine with IEEE-754
+  floating arithmetic,
+
+      x = 9007199254740992.0
+      print long(x)
+
+  printed 9007199254740992 if run directly from .py, but 9007199254740000
+  if from a compiled (.pyc or .pyo) file.  This was due to marshal using
+  str(float) instead of repr(float) when building code objects.  marshal
+  now uses repr(float) instead, which should reproduce floats to full
+  machine precision (assuming the platform C float<->string I/O conversion
+  functions are of good quality).
+
+  This may cause floating-point results to change in some cases, and
+  usually for the better, but may also cause numerically unstable
+  algorithms to break.
+
+- The implementation of dicts suffers fewer collisions, which has speed
+  benefits.  However, the order in which dict entries appear in dict.keys(),
+  dict.values() and dict.items() may differ from previous releases for a
+  given dict.  Nothing is defined about this order, so no program should
+  rely on it.  Nevertheless, it's easy to write test cases that rely on the
+  order by accident, typically because of printing the str() or repr() of a
+  dict to an "expected results" file.  See Lib/test/test_support.py's new
+  sortdict(dict) function for a simple way to display a dict in sorted
+  order.
+
+- Many other small changes to dicts were made, resulting in faster
+  operation along the most common code paths.
+
+- Dictionary objects now support the "in" operator: "x in dict" means
+  the same as dict.has_key(x).
+
+- The update() method of dictionaries now accepts generic mapping
+  objects.  Specifically the argument object must support the .keys()
+  and __getitem__() methods.  This allows you to say, for example,
+  {}.update(UserDict())
+
+- Iterators were added; this is a generalized way of providing values
+  to a for loop.  See PEP 234.  There's a new built-in function iter()
+  to return an iterator.  There's a new protocol to get the next value
+  from an iterator using the next() method (in Python) or the
+  tp_iternext slot (in C).  There's a new protocol to get iterators
+  using the __iter__() method (in Python) or the tp_iter slot (in C).
+  Iterating (i.e. a for loop) over a dictionary generates its keys.
+  Iterating over a file generates its lines.
+
+- The following functions were generalized to work nicely with iterator
+  arguments::
+
+    map(), filter(), reduce(), zip()
+    list(), tuple() (PySequence_Tuple() and PySequence_Fast() in C API)
+    max(), min()
+    join() method of strings
+    extend() method of lists
+    'x in y' and 'x not in y' (PySequence_Contains() in C API)
+    operator.countOf() (PySequence_Count() in C API)
+    right-hand side of assignment statements with multiple targets, such as ::
+        x, y, z = some_iterable_object_returning_exactly_3_values
+
+- Accessing module attributes is significantly faster (for example,
+  random.random or os.path or yourPythonModule.yourAttribute).
+
+- Comparing dictionary objects via == and != is faster, and now works even
+  if the keys and values don't support comparisons other than ==.
+
+- Comparing dictionaries in ways other than == and != is slower:  there were
+  insecurities in the dict comparison implementation that could cause Python
+  to crash if the element comparison routines for the dict keys and/or
+  values mutated the dicts.  Making the code bulletproof slowed it down.
+
+- Collisions in dicts are resolved via a new approach, which can help
+  dramatically in bad cases.  For example, looking up every key in a dict
+  d with d.keys() == [i << 16 for i in range(20000)] is approximately 500x
+  faster now.  Thanks to Christian Tismer for pointing out the cause and
+  the nature of an effective cure (last December! better late than never).
+
+- repr() is much faster for large containers (dict, list, tuple).
+
+
+Library
+-------
+
+- The constants ascii_letters, ascii_lowercase. and ascii_uppercase
+  were added to the string module.  These a locale-independent
+  constants, unlike letters, lowercase, and uppercase.  These are now
+  use in appropriate locations in the standard library.
+
+- The flags used in dlopen calls can now be configured using
+  sys.setdlopenflags and queried using sys.getdlopenflags.
+
+- Fredrik Lundh's xmlrpclib is now a standard library module.  This
+  provides full client-side XML-RPC support.  In addition,
+  Demo/xmlrpc/ contains two server frameworks (one SocketServer-based,
+  one asyncore-based).  Thanks to Eric Raymond for the documentation.
+
+- The xrange() object is simplified: it no longer supports slicing,
+  repetition, comparisons, efficient 'in' checking, the tolist()
+  method, or the start, stop and step attributes.  See PEP 260.
+
+- A new function fnmatch.filter to filter lists of file names was added.
+
+- calendar.py uses month and day names based on the current locale.
+
+- strop is now *really* obsolete (this was announced before with 1.6),
+  and issues DeprecationWarning when used (except for the four items
+  that are still imported into string.py).
+
+- Cookie.py now sorts key+value pairs by key in output strings.
+
+- pprint.isrecursive(object) didn't correctly identify recursive objects.
+  Now it does.
+
+- pprint functions now much faster for large containers (tuple, list, dict).
+
+- New 'q' and 'Q' format codes in the struct module, corresponding to C
+  types "long long" and "unsigned long long" (on Windows, __int64).  In
+  native mode, these can be used only when the platform C compiler supports
+  these types (when HAVE_LONG_LONG is #define'd by the Python config
+  process), and then they inherit the sizes and alignments of the C types.
+  In standard mode, 'q' and 'Q' are supported on all platforms, and are
+  8-byte integral types.
+
+- The site module installs a new built-in function 'help' that invokes
+  pydoc.help.  It must be invoked as 'help()'; when invoked as 'help',
+  it displays a message reminding the user to use 'help()' or
+  'help(object)'.
+
+Tests
+-----
+
+- New test_mutants.py runs dict comparisons where the key and value
+  comparison operators mutate the dicts randomly during comparison.  This
+  rapidly causes Python to crash under earlier releases (not for the faint
+  of heart:  it can also cause Win9x to freeze or reboot!).
+
+- New test_pprint.py verifies that pprint.isrecursive() and
+  pprint.isreadable() return sensible results.  Also verifies that simple
+  cases produce correct output.
+
+C API
+-----
+
+- Removed the unused last_is_sticky argument from the internal
+  _PyTuple_Resize().  If this affects you, you were cheating.
+
 What's New in Python 2.1 (final)?
 =================================
 
diff --git a/Misc/README.valgrind b/Misc/README.valgrind
index 157bdc3..b5a9a32 100644
--- a/Misc/README.valgrind
+++ b/Misc/README.valgrind
@@ -24,6 +24,12 @@
 the memory usage down to allow the tests to complete.  It seems to take
 about 5 times longer to run --without-pymalloc.
 
+Apr 15, 2006:
+  test_ctypes causes Valgrind 3.1.1 to fail (crash).
+  test_socket_ssl should be skipped when running valgrind.
+	The reason is that it purposely uses uninitialized memory.
+	This causes many spurious warnings, so it's easier to just skip it.
+
 
 Details:
 --------
diff --git a/Misc/RPM/python-2.4.spec b/Misc/RPM/python-2.5.spec
similarity index 97%
rename from Misc/RPM/python-2.4.spec
rename to Misc/RPM/python-2.5.spec
index bd4c7f7..3515856 100644
--- a/Misc/RPM/python-2.4.spec
+++ b/Misc/RPM/python-2.5.spec
@@ -6,7 +6,7 @@
 #  "python2"?
 #WARNING: Commenting out doesn't work.  Last line is what's used.
 %define config_binsuffix none
-%define config_binsuffix 2.4
+%define config_binsuffix 2.5
 
 #  Build tkinter?  "auto" enables it if /usr/bin/wish exists.
 #WARNING: Commenting out doesn't work.  Last line is what's used.
@@ -33,9 +33,9 @@
 #################################
 
 %define name python
-%define version 2.4
-%define libvers 2.4
-%define release 2pydotorg
+%define version 2.5a1
+%define libvers 2.5
+%define release 1pydotorg
 %define __prefix /usr
 
 #  kludge to get around rpm <percent>define weirdness
@@ -62,6 +62,7 @@
 BuildPrereq: expat-devel
 BuildPrereq: db4-devel
 BuildPrereq: gdbm-devel
+BuildPrereq: sqlite-devel
 Prefix: %{__prefix}
 Packager: Sean Reifschneider <jafo-rpms@tummy.com>
 
@@ -354,6 +355,8 @@
 %{__prefix}/%{libdirname}/python%{libvers}/test
 %{__prefix}/%{libdirname}/python%{libvers}/xml
 %{__prefix}/%{libdirname}/python%{libvers}/email
+%{__prefix}/%{libdirname}/python%{libvers}/email/mime
+%{__prefix}/%{libdirname}/python%{libvers}/sqlite3
 %{__prefix}/%{libdirname}/python%{libvers}/compiler
 %{__prefix}/%{libdirname}/python%{libvers}/bsddb
 %{__prefix}/%{libdirname}/python%{libvers}/hotshot
diff --git a/Misc/SpecialBuilds.txt b/Misc/SpecialBuilds.txt
index ae9ab4c..e0b3315 100644
--- a/Misc/SpecialBuilds.txt
+++ b/Misc/SpecialBuilds.txt
@@ -1,8 +1,8 @@
 This file describes some special Python build types enabled via
 compile-time preprocessor defines.
 
-It is best to define these options in the EXTRA_CFLAGS environment variable;
-``EXTRA_CFLAGS="-DPy_REF_DEBUG" ./configure``.
+It is best to define these options in the EXTRA_CFLAGS make variable;
+``make EXTRA_CFLAGS="-DPy_REF_DEBUG"``.
 
 ---------------------------------------------------------------------------
 Py_REF_DEBUG                                              introduced in 1.4
diff --git a/Misc/build.sh b/Misc/build.sh
index 3c669a0..de51539 100755
--- a/Misc/build.sh
+++ b/Misc/build.sh
@@ -55,13 +55,21 @@
 RSYNC_OPTS="-aC -e ssh"
 
 REFLOG="build/reflog.txt.out"
-# These tests are not stable and sometimes report leaks; however,
-# test_generators really leaks.  Since test_generators probably won't
-# be fixed real soon, disable warning about it for now.
+# These tests are not stable and falsely report leaks sometimes.
 # The entire leak report will be mailed if any test not in this list leaks.
-LEAKY_TESTS="test_(capi|cfgparser|charmapcodec|cmd_line|compiler|filecmp|generators|quopri|socket|threaded_import|threadedtempfile|threading|threading_local|urllib2)"
+# Note: test_XXX (none currently) really leak, but are disabled
+# so we don't send spam.  Any test which really leaks should only 
+# be listed here if there are also test cases under Lib/test/leakers.
+LEAKY_TESTS="test_(ctypes|filecmp|socket|threadedtempfile|threading|urllib2)"
 
-# Change this flag to "yes" for old releases to just update/build the docs.
+# Skip these tests altogether when looking for leaks.  These tests
+# do not need to be stored above in LEAKY_TESTS too.
+# test_compiler almost never finishes with the same number of refs
+# since it depends on other modules, skip it.
+# test_logging causes hangs, skip it.
+LEAKY_SKIPS="-x test_compiler test_logging"
+
+# Change this flag to "yes" for old releases to only update/build the docs.
 BUILD_DISABLED="no"
 
 ## utility functions
@@ -159,7 +167,9 @@
             ## run the tests looking for leaks
             F=make-test-refleak.out
             start=`current_time`
-            ./python ./Lib/test/regrtest.py -R 4:3:$REFLOG -u network >& build/$F
+            ## ensure that the reflog exists so the grep doesn't fail
+            touch $REFLOG
+            ./python ./Lib/test/regrtest.py -R 4:3:$REFLOG -u network $LEAKY_SKIPS >& build/$F
             NUM_FAILURES=`egrep -vc "$LEAKY_TESTS" $REFLOG`
             update_status "Testing refleaks ($NUM_FAILURES failures)" "$F" $start
             mail_on_failure "refleak" $REFLOG
diff --git a/Misc/cheatsheet b/Misc/cheatsheet
index b8de4281..024545d 100644
--- a/Misc/cheatsheet
+++ b/Misc/cheatsheet
@@ -1946,8 +1946,6 @@
 rand             Don't use unless you want compatibility with C's rand().
 random           Random variable generators
 re               Regular Expressions.
-reconvert        Convert old ("regex") regular expressions to new syntax
-                 ("re").
 repr             Redo repr() but with limits on most sizes.
 rexec            Restricted execution facilities ("safe" exec, eval, etc).
 rfc822           RFC-822 message manipulation class.
@@ -2025,7 +2023,6 @@
             array               Obj efficiently representing arrays of basic values
             math                Math functions of C standard
             time                Time-related functions (also the newer datetime module)
-            regex               Regular expression matching operations
             marshal             Read and write some python values in binary format
             struct              Convert between python values and C structs
 
diff --git a/Misc/developers.txt b/Misc/developers.txt
index c3dc306..ff8470e 100644
--- a/Misc/developers.txt
+++ b/Misc/developers.txt
@@ -17,6 +17,12 @@
 Permissions History
 -------------------
 
+- George Yoshida (SF name "quiver") added to the SourceForge Python
+  project 14 Apr 2006, by Tim Peters, as a tracker admin.  See
+  contemporaneous python-checkins thread with the unlikely Subject:
+
+      r45329 - python/trunk/Doc/whatsnew/whatsnew25.tex
+
 - Ronald Oussoren was given SVN access on 3 Mar 2006 by NCN, for Mac
   related work.
 
diff --git a/Misc/python-config.in b/Misc/python-config.in
new file mode 100644
index 0000000..24e699e
--- /dev/null
+++ b/Misc/python-config.in
@@ -0,0 +1,50 @@
+#!@BINDIR@/python
+
+import sys
+import os
+import getopt
+from distutils import sysconfig
+
+valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags', 
+              'ldflags', 'help']
+
+def exit_with_usage(code=1):
+    print >>sys.stderr, "Usage: %s [%s]" % (sys.argv[0], 
+                                            '|'.join('--'+opt for opt in valid_opts))
+    sys.exit(code)
+
+try:
+    opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
+except getopt.error:
+    exit_with_usage()
+
+if not opts:
+    exit_with_usage()
+
+opt = opts[0][0]
+
+pyver = sysconfig.get_config_var('VERSION')
+getvar = sysconfig.get_config_var
+
+if opt == '--help':
+    exit_with_usage(0)
+
+elif opt == '--prefix':
+    print sysconfig.PREFIX
+
+elif opt == '--exec-prefix':
+    print sysconfig.EXEC_PREFIX
+
+elif opt in ('--includes', '--cflags'):
+    flags = ['-I'+dir for dir in getvar('INCLDIRSTOMAKE').split()]
+    if opt == '--cflags':
+        flags.extend(getvar('CFLAGS').split())
+    print ' '.join(flags)
+
+elif opt in ('--libs', '--ldflags'):
+    libs = sysconfig.get_config_var('LIBS').split()
+    libs.append('-lpython'+pyver)
+    if opt == '--ldflags':
+        libs.insert(0, '-L' + getvar('LIBPL'))
+    print ' '.join(libs)
+
diff --git a/Modules/Setup.dist b/Modules/Setup.dist
index 3a512b5..49c8425 100644
--- a/Modules/Setup.dist
+++ b/Modules/Setup.dist
@@ -236,9 +236,9 @@
 
 # The md5 module implements the RSA Data Security, Inc. MD5
 # Message-Digest Algorithm, described in RFC 1321.  The necessary files
-# md5c.c and md5.h are included here.
+# md5.c and md5.h are included here.
 
-#md5 md5module.c md5c.c
+#md5 md5module.c md5.c
 
 
 # The sha module implements the SHA checksum algorithm.
diff --git a/Modules/_bsddb.c b/Modules/_bsddb.c
index 8e9ec0f..1641e20 100644
--- a/Modules/_bsddb.c
+++ b/Modules/_bsddb.c
@@ -101,6 +101,10 @@
 static char *rcs_id = "$Id$";
 
 
+#if (PY_VERSION_HEX < 0x02050000)
+#define Py_ssize_t      int
+#endif
+
 #ifdef WITH_THREAD
 
 /* These are for when calling Python --> C */
@@ -4688,7 +4692,11 @@
 
 
 static PyMappingMethods DB_mapping = {
+#if (PY_VERSION_HEX < 0x02050000)
+        (inquiry)DB_length,          /*mp_length*/
+#else
         (lenfunc)DB_length,          /*mp_length*/
+#endif
         (binaryfunc)DB_subscript,    /*mp_subscript*/
         (objobjargproc)DB_ass_sub,   /*mp_ass_subscript*/
 };
@@ -5385,9 +5393,21 @@
     ADD_INT(d, DB_SET_TXN_TIMEOUT);
 #endif
 
+    /* The exception name must be correct for pickled exception *
+     * objects to unpickle properly.                            */
+#ifdef PYBSDDB_STANDALONE  /* different value needed for standalone pybsddb */
+#define PYBSDDB_EXCEPTION_BASE  "bsddb3.db."
+#else
+#define PYBSDDB_EXCEPTION_BASE  "bsddb.db."
+#endif
+
+    /* All the rest of the exceptions derive only from DBError */
+#define MAKE_EX(name)   name = PyErr_NewException(PYBSDDB_EXCEPTION_BASE #name, DBError, NULL); \
+                        PyDict_SetItemString(d, #name, name)
+
     /* The base exception class is DBError */
-    DBError = PyErr_NewException("bsddb._db.DBError", NULL, NULL);
-    PyDict_SetItemString(d, "DBError", DBError);
+    DBError = NULL;     /* used in MAKE_EX so that it derives from nothing */
+    MAKE_EX(DBError);
 
     /* Some magic to make DBNotFoundError and DBKeyEmptyError derive
      * from both DBError and KeyError, since the API only supports
@@ -5401,10 +5421,6 @@
     PyDict_DelItemString(d, "KeyError");
 
 
-    /* All the rest of the exceptions derive only from DBError */
-#define MAKE_EX(name)   name = PyErr_NewException("bsddb._db." #name, DBError, NULL); \
-                        PyDict_SetItemString(d, #name, name)
-
 #if !INCOMPLETE_IS_WARNING
     MAKE_EX(DBIncompleteError);
 #endif
diff --git a/Modules/_csv.c b/Modules/_csv.c
index 469cd64..4704c16 100644
--- a/Modules/_csv.c
+++ b/Modules/_csv.c
@@ -37,6 +37,28 @@
 #		define PyMODINIT_FUNC void
 #	endif /* __cplusplus */
 #endif
+
+#ifndef Py_CLEAR
+#define Py_CLEAR(op)						\
+	do {							\
+		if (op) {					\
+			PyObject *tmp = (PyObject *)(op);	\
+			(op) = NULL;				\
+			Py_DECREF(tmp);				\
+		}						\
+	} while (0)
+#endif
+#ifndef Py_VISIT
+#define Py_VISIT(op)							\
+        do { 								\
+                if (op) {						\
+                        int vret = visit((PyObject *)(op), arg);	\
+                        if (vret)					\
+                                return vret;				\
+                }							\
+        } while (0)
+#endif
+
 /* end 2.2 compatibility macros */
 
 #define IS_BASESTRING(o) \
@@ -812,28 +834,18 @@
 static int
 Reader_traverse(ReaderObj *self, visitproc visit, void *arg)
 {
-	int err;
-#define VISIT(SLOT) \
-	if (SLOT) { \
-		err = visit((PyObject *)(SLOT), arg); \
-		if (err) \
-			return err; \
-	}
-	VISIT(self->dialect);
-	VISIT(self->input_iter);
-	VISIT(self->fields);
+	Py_VISIT(self->dialect);
+	Py_VISIT(self->input_iter);
+	Py_VISIT(self->fields);
 	return 0;
 }
 
 static int
 Reader_clear(ReaderObj *self)
 {
-        Py_XDECREF(self->dialect);
-        Py_XDECREF(self->input_iter);
-        Py_XDECREF(self->fields);
-        self->dialect = NULL;
-        self->input_iter = NULL;
-        self->fields = NULL;
+	Py_CLEAR(self->dialect);
+	Py_CLEAR(self->input_iter);
+	Py_CLEAR(self->fields);
 	return 0;
 }
 
@@ -1245,25 +1257,16 @@
 static int
 Writer_traverse(WriterObj *self, visitproc visit, void *arg)
 {
-	int err;
-#define VISIT(SLOT) \
-	if (SLOT) { \
-		err = visit((PyObject *)(SLOT), arg); \
-		if (err) \
-			return err; \
-	}
-	VISIT(self->dialect);
-	VISIT(self->writeline);
+	Py_VISIT(self->dialect);
+	Py_VISIT(self->writeline);
 	return 0;
 }
 
 static int
 Writer_clear(WriterObj *self)
 {
-        Py_XDECREF(self->dialect);
-        Py_XDECREF(self->writeline);
-	self->dialect = NULL;
-	self->writeline = NULL;
+	Py_CLEAR(self->dialect);
+	Py_CLEAR(self->writeline);
 	return 0;
 }
 
@@ -1495,7 +1498,7 @@
 PyDoc_STRVAR(csv_writer_doc,
 "    csv_writer = csv.writer(fileobj [, dialect='excel']\n"
 "                            [optional keyword args])\n"
-"    for row in csv_writer:\n"
+"    for row in sequence:\n"
 "        csv_writer.writerow(row)\n"
 "\n"
 "    [or]\n"
diff --git a/Modules/_ctypes/_ctypes.c b/Modules/_ctypes/_ctypes.c
index 926c85b..e6d6aa4 100644
--- a/Modules/_ctypes/_ctypes.c
+++ b/Modules/_ctypes/_ctypes.c
@@ -105,6 +105,10 @@
 #include <ffi.h>
 #ifdef MS_WIN32
 #include <windows.h>
+#include <malloc.h>
+#ifndef IS_INTRESOURCE
+#define IS_INTRESOURCE(x) (((size_t)(x) >> 16) == 0)
+#endif
 # ifdef _WIN32_WCE
 /* Unlike desktop Windows, WinCE has both W and A variants of
    GetProcAddress, but the default W version is not what we want */
@@ -285,6 +289,7 @@
 	if (PyCArg_CheckExact(value)) {
 		PyCArgObject *p = (PyCArgObject *)value;
 		PyObject *ob = p->obj;
+		const char *ob_name;
 		StgDictObject *dict;
 		dict = PyType_stgdict(type);
 
@@ -296,10 +301,10 @@
 			Py_INCREF(value);
 			return value;
 		}
+		ob_name = (ob) ? ob->ob_type->tp_name : "???";
 		PyErr_Format(PyExc_TypeError,
 			     "expected %s instance instead of pointer to %s",
-			     ((PyTypeObject *)type)->tp_name,
-			     ob->ob_type->tp_name);
+			     ((PyTypeObject *)type)->tp_name, ob_name);
 		return NULL;
 	}
 #if 1
@@ -506,12 +511,12 @@
 static int
 PointerType_SetProto(StgDictObject *stgdict, PyObject *proto)
 {
-	if (proto && !PyType_Check(proto)) {
+	if (!proto || !PyType_Check(proto)) {
 		PyErr_SetString(PyExc_TypeError,
 				"_type_ must be a type");
 		return -1;
 	}
-	if (proto && !PyType_stgdict(proto)) {
+	if (!PyType_stgdict(proto)) {
 		PyErr_SetString(PyExc_TypeError,
 				"_type_ must have storage info");
 		return -1;
@@ -543,8 +548,8 @@
 		return NULL;
 	stgdict->size = sizeof(void *);
 	stgdict->align = getentry("P")->pffi_type->alignment;
-	stgdict->length = 2;
-	stgdict->ffi_type = ffi_type_pointer;
+	stgdict->length = 1;
+	stgdict->ffi_type_pointer = ffi_type_pointer;
 
 	proto = PyDict_GetItemString(typedict, "_type_"); /* Borrowed ref */
 	if (proto && -1 == PointerType_SetProto(stgdict, proto)) {
@@ -899,7 +904,7 @@
 	PyObject *typedict;
 	int length;
 
-	int itemsize, itemalign, itemlen;
+	int itemsize, itemalign;
 
 	typedict = PyTuple_GetItem(args, 2);
 	if (!typedict)
@@ -936,7 +941,6 @@
 
 	itemsize = itemdict->size;
 	itemalign = itemdict->align;
-	itemlen = itemdict->length;
 
 	stgdict->size = itemsize * length;
 	stgdict->align = itemalign;
@@ -945,7 +949,7 @@
 	stgdict->proto = proto;
 
 	/* Arrays are passed as pointers to function calls. */
-	stgdict->ffi_type = ffi_type_pointer;
+	stgdict->ffi_type_pointer = ffi_type_pointer;
 
 	/* create the new instance (which is a class,
 	   since we are a metatype!) */
@@ -1264,9 +1268,13 @@
 	PyTypeObject *result;
 	StgDictObject *stgdict;
 	PyObject *name = PyTuple_GET_ITEM(args, 0);
-	PyObject *swapped_args = PyTuple_New(PyTuple_GET_SIZE(args));
+	PyObject *swapped_args;
 	static PyObject *suffix;
-	int i;
+	Py_ssize_t i;
+
+	swapped_args = PyTuple_New(PyTuple_GET_SIZE(args));
+	if (!swapped_args)
+		return NULL;
 
 	if (suffix == NULL)
 #ifdef WORDS_BIGENDIAN
@@ -1275,8 +1283,10 @@
 		suffix = PyString_FromString("_be");
 #endif
 
-	Py_INCREF(suffix);
-	PyString_ConcatAndDel(&name, suffix);
+	Py_INCREF(name);
+	PyString_Concat(&name, suffix);
+	if (name == NULL)
+		return NULL;
 
 	PyTuple_SET_ITEM(swapped_args, 0, name);
 	for (i=1; i<PyTuple_GET_SIZE(args); ++i) {
@@ -1297,7 +1307,7 @@
 	if (!stgdict) /* XXX leaks result! */
 		return NULL;
 
-	stgdict->ffi_type = *fmt->pffi_type;
+	stgdict->ffi_type_pointer = *fmt->pffi_type;
 	stgdict->align = fmt->pffi_type->alignment;
 	stgdict->length = 0;
 	stgdict->size = fmt->pffi_type->size;
@@ -1355,7 +1365,7 @@
 
 	fmt = getentry(PyString_AS_STRING(proto));
 
-	stgdict->ffi_type = *fmt->pffi_type;
+	stgdict->ffi_type_pointer = *fmt->pffi_type;
 	stgdict->align = fmt->pffi_type->alignment;
 	stgdict->length = 0;
 	stgdict->size = fmt->pffi_type->size;
@@ -1450,6 +1460,7 @@
 		PyObject_SetAttrString(swapped, "__ctype_le__", (PyObject *)result);
 		PyObject_SetAttrString(swapped, "__ctype_be__", swapped);
 #endif
+		Py_DECREF(swapped);
 	};
 
 	return (PyObject *)result;
@@ -1624,7 +1635,7 @@
 	stgdict->size = sizeof(void *);
 	stgdict->setfunc = NULL;
 	stgdict->getfunc = NULL;
-	stgdict->ffi_type = ffi_type_pointer;
+	stgdict->ffi_type_pointer = ffi_type_pointer;
 
 	ob = PyDict_GetItemString((PyObject *)stgdict, "_flags_");
 	if (!ob || !PyInt_Check(ob)) {
@@ -1846,7 +1857,7 @@
 	StgDictObject *dict = PyObject_stgdict((PyObject *)self);
 	Py_CLEAR(self->b_objects);
 	if ((self->b_needsfree)
-	    && (dict->size > sizeof(self->b_value)))
+	    && ((size_t)dict->size > sizeof(self->b_value)))
 		PyMem_Free(self->b_ptr);
 	self->b_ptr = NULL;
 	Py_CLEAR(self->b_base);
@@ -1873,8 +1884,9 @@
 	{ NULL },
 };
 
-static Py_ssize_t CData_GetBuffer(CDataObject *self, Py_ssize_t seg, void **pptr)
+static Py_ssize_t CData_GetBuffer(PyObject *_self, Py_ssize_t seg, void **pptr)
 {
+	CDataObject *self = (CDataObject *)_self;
 	if (seg != 0) {
 		/* Hm. Must this set an exception? */
 		return -1;
@@ -1883,7 +1895,7 @@
 	return self->b_size;
 }
 
-static Py_ssize_t CData_GetSegcount(CDataObject *self, Py_ssize_t *lenp)
+static Py_ssize_t CData_GetSegcount(PyObject *_self, Py_ssize_t *lenp)
 {
 	if (lenp)
 		*lenp = 1;
@@ -1891,10 +1903,10 @@
 }
 
 static PyBufferProcs CData_as_buffer = {
-	(readbufferproc)CData_GetBuffer,
-	(writebufferproc)CData_GetBuffer,
-	(segcountproc)CData_GetSegcount,
-	(charbufferproc)NULL,
+	CData_GetBuffer,
+	CData_GetBuffer,
+	CData_GetSegcount,
+	NULL,
 };
 
 /*
@@ -1967,7 +1979,7 @@
 
 static void CData_MallocBuffer(CDataObject *obj, StgDictObject *dict)
 {
-	if (dict->size <= sizeof(obj->b_value)) {
+	if ((size_t)dict->size <= sizeof(obj->b_value)) {
 		/* No need to call malloc, can use the default buffer */
 		obj->b_ptr = (char *)&obj->b_value;
 		obj->b_needsfree = 1;
@@ -1975,7 +1987,7 @@
 		/* In python 2.4, and ctypes 0.9.6, the malloc call took about
 		   33% of the creation time for c_int().
 		*/
-		obj->b_ptr = PyMem_Malloc(dict->size);
+		obj->b_ptr = (char *)PyMem_Malloc(dict->size);
 		obj->b_needsfree = 1;
 		memset(obj->b_ptr, 0, dict->size);
 	}
@@ -2040,7 +2052,7 @@
 	if (!pd)
 		return NULL;
 	assert(CDataObject_Check(pd));
-	pd->b_ptr = buf;
+	pd->b_ptr = (char *)buf;
 	pd->b_length = dict->length;
 	pd->b_size = dict->size;
 	return (PyObject *)pd;
@@ -2383,6 +2395,11 @@
 	address = (PPROC)GetProcAddress(handle, name);
 	if (address)
 		return address;
+
+	if (((size_t)name & ~0xFFFF) == 0) {
+		return NULL;
+	}
+
 	/* It should not happen that dict is NULL, but better be safe */
 	if (dict==NULL || dict->flags & FUNCFLAG_CDECL)
 		return address;
@@ -2391,7 +2408,7 @@
 	   funcname -> _funcname@<n>
 	   where n is 0, 4, 8, 12, ..., 128
 	 */
-	mangled_name = _alloca(strlen(name) + 1 + 1 + 1 + 3); /* \0 _ @ %d */
+	mangled_name = alloca(strlen(name) + 1 + 1 + 1 + 3); /* \0 _ @ %d */
 	for (i = 0; i < 32; ++i) {
 		sprintf(mangled_name, "_%s@%d", name, i*4);
 		address = (PPROC)GetProcAddress(handle, mangled_name);
@@ -2488,6 +2505,28 @@
 	return 1;
 }
 
+static int
+_get_name(PyObject *obj, char **pname)
+{
+#ifdef MS_WIN32
+	if (PyInt_Check(obj) || PyLong_Check(obj)) {
+		/* We have to use MAKEINTRESOURCEA for Windows CE.
+		   Works on Windows as well, of course.
+		*/
+		*pname = MAKEINTRESOURCEA(PyInt_AsUnsignedLongMask(obj) & 0xFFFF);
+		return 1;
+	}
+#endif
+	if (PyString_Check(obj) || PyUnicode_Check(obj)) {
+		*pname = PyString_AsString(obj);
+		return *pname ? 1 : 0;
+	}
+	PyErr_SetString(PyExc_TypeError,
+			"function name must be string or integer");
+	return 0;
+}
+
+
 static PyObject *
 CFuncPtr_FromDll(PyTypeObject *type, PyObject *args, PyObject *kwds)
 {
@@ -2499,7 +2538,7 @@
 	void *handle;
 	PyObject *paramflags = NULL;
 
-	if (!PyArg_ParseTuple(args, "sO|O", &name, &dll, &paramflags))
+	if (!PyArg_ParseTuple(args, "(O&O)|O", _get_name, &name, &dll, &paramflags))
 		return NULL;
 	if (paramflags == Py_None)
 		paramflags = NULL;
@@ -2524,9 +2563,14 @@
 #ifdef MS_WIN32
 	address = FindAddress(handle, name, (PyObject *)type);
 	if (!address) {
-		PyErr_Format(PyExc_AttributeError,
-			     "function '%s' not found",
-			     name);
+		if (!IS_INTRESOURCE(name))
+			PyErr_Format(PyExc_AttributeError,
+				     "function '%s' not found",
+				     name);
+		else
+			PyErr_Format(PyExc_AttributeError,
+				     "function ordinal %d not found",
+				     (WORD)(size_t)name);
 		return NULL;
 	}
 #else
@@ -2603,8 +2647,9 @@
   "O" - must be a callable, creates a C callable function
 
   two or more argument forms (the third argument is a paramflags tuple)
-  "sO|O" - function name, dll object (with an integer handle)
-  "is|O" - vtable index, method name, creates callable calling COM vtbl
+  "(sO)|..." - (function name, dll object (with an integer handle)), paramflags
+  "(iO)|..." - (function ordinal, dll object (with an integer handle)), paramflags
+  "is|..." - vtable index, method name, creates callable calling COM vtbl
 */
 static PyObject *
 CFuncPtr_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
@@ -2612,19 +2657,18 @@
 	CFuncPtrObject *self;
 	PyObject *callable;
 	StgDictObject *dict;
-	THUNK thunk;
+	ffi_info *thunk;
 
 	if (PyTuple_GET_SIZE(args) == 0)
 		return GenericCData_new(type, args, kwds);
 
-	/* Shouldn't the following better be done in __init__? */
-	if (2 <= PyTuple_GET_SIZE(args)) {
-#ifdef MS_WIN32
-		if (PyInt_Check(PyTuple_GET_ITEM(args, 0)))
-			return CFuncPtr_FromVtblIndex(type, args, kwds);
-#endif
+	if (1 <= PyTuple_GET_SIZE(args) && PyTuple_Check(PyTuple_GET_ITEM(args, 0)))
 		return CFuncPtr_FromDll(type, args, kwds);
-	}
+
+#ifdef MS_WIN32
+	if (2 <= PyTuple_GET_SIZE(args) && PyInt_Check(PyTuple_GET_ITEM(args, 0)))
+		return CFuncPtr_FromVtblIndex(type, args, kwds);
+#endif
 
 	if (1 == PyTuple_GET_SIZE(args)
 	    && (PyInt_Check(PyTuple_GET_ITEM(args, 0))
@@ -2781,7 +2825,7 @@
 static PyObject *
 _build_callargs(CFuncPtrObject *self, PyObject *argtypes,
 		PyObject *inargs, PyObject *kwds,
-		int *poutmask, int *pinoutmask, int *pnumretvals)
+		int *poutmask, int *pinoutmask, unsigned int *pnumretvals)
 {
 	PyObject *paramflags = self->paramflags;
 	PyObject *callargs;
@@ -2835,8 +2879,14 @@
 
 		switch (flag & (PARAMFLAG_FIN | PARAMFLAG_FOUT | PARAMFLAG_FLCID)) {
 		case PARAMFLAG_FIN | PARAMFLAG_FLCID:
-			/* ['in', 'lcid'] parameter.  Always taken from defval */
-			Py_INCREF(defval);
+			/* ['in', 'lcid'] parameter.  Always taken from defval,
+			 if given, else the integer 0. */
+			if (defval == NULL) {
+				defval = PyInt_FromLong(0);
+				if (defval == NULL)
+					goto error;
+			} else
+				Py_INCREF(defval);
 			PyTuple_SET_ITEM(callargs, i, defval);
 			break;
 		case (PARAMFLAG_FIN | PARAMFLAG_FOUT):
@@ -2939,9 +2989,10 @@
 */
 static PyObject *
 _build_result(PyObject *result, PyObject *callargs,
-	      int outmask, int inoutmask, int numretvals)
+	      int outmask, int inoutmask, unsigned int numretvals)
 {
-	int i, index, bit;
+	unsigned int i, index;
+	int bit;
 	PyObject *tup = NULL;
 
 	if (callargs == NULL)
@@ -2952,6 +3003,7 @@
 	}
 	Py_DECREF(result);
 
+	/* tup will not be allocated if numretvals == 1 */
 	/* allocate tuple to hold the result */
 	if (numretvals > 1) {
 		tup = PyTuple_New(numretvals);
@@ -3009,7 +3061,7 @@
 
 	int inoutmask;
 	int outmask;
-	int numretvals;
+	unsigned int numretvals;
 
 	assert(dict); /* if not, it's a bug */
 	restype = self->restype ? self->restype : dict->restype;
@@ -3145,9 +3197,11 @@
 	Py_CLEAR(self->converters);
 	Py_CLEAR(self->paramflags);
 
-	if (self->thunk)
-		FreeCallback(self->thunk);
-	self->thunk = NULL;
+	if (self->thunk) {
+		FreeClosure(self->thunk->pcl);
+		PyMem_Free(self->thunk);
+		self->thunk = NULL;
+	}
 
 	return CData_clear((CDataObject *)self);
 }
@@ -3241,7 +3295,7 @@
 
 	parg->tag = 'V';
 	stgdict = PyObject_stgdict((PyObject *)self);
-	parg->pffi_type = &stgdict->ffi_type;
+	parg->pffi_type = &stgdict->ffi_type_pointer;
 	/* For structure parameters (by value), parg->value doesn't contain the structure
 	   data itself, instead parg->value.p *points* to the structure's data
 	   See also _ctypes.c, function _call_function_pointer().
@@ -3275,6 +3329,8 @@
 		if (!fields) {
 			PyErr_Clear();
 			fields = PyTuple_New(0);
+			if (!fields)
+				return -1;
 		}
 
 		if (PyTuple_GET_SIZE(args) > PySequence_Length(fields)) {
@@ -3445,8 +3501,9 @@
 }
 
 static PyObject *
-Array_item(CDataObject *self, int index)
+Array_item(PyObject *_self, Py_ssize_t index)
 {
+	CDataObject *self = (CDataObject *)_self;
 	int offset, size;
 	StgDictObject *stgdict;
 
@@ -3469,8 +3526,9 @@
 }
 
 static PyObject *
-Array_slice(CDataObject *self, Py_ssize_t ilow, Py_ssize_t ihigh)
+Array_slice(PyObject *_self, Py_ssize_t ilow, Py_ssize_t ihigh)
 {
+	CDataObject *self = (CDataObject *)_self;
 	StgDictObject *stgdict, *itemdict;
 	PyObject *proto;
 	PyListObject *np;
@@ -3504,15 +3562,16 @@
 		return NULL;
 
 	for (i = 0; i < len; i++) {
-		PyObject *v = Array_item(self, i+ilow);
+		PyObject *v = Array_item(_self, i+ilow);
 		PyList_SET_ITEM(np, i, v);
 	}
 	return (PyObject *)np;
 }
 
 static int
-Array_ass_item(CDataObject *self, int index, PyObject *value)
+Array_ass_item(PyObject *_self, Py_ssize_t index, PyObject *value)
 {
+	CDataObject *self = (CDataObject *)_self;
 	int size, offset;
 	StgDictObject *stgdict;
 	char *ptr;
@@ -3538,8 +3597,9 @@
 }
 
 static int
-Array_ass_slice(CDataObject *self, int ilow, int ihigh, PyObject *value)
+Array_ass_slice(PyObject *_self, Py_ssize_t ilow, Py_ssize_t ihigh, PyObject *value)
 {
+	CDataObject *self = (CDataObject *)_self;
 	int i, len;
 
 	if (value == NULL) {
@@ -3570,7 +3630,7 @@
 		int result;
 		if (item == NULL)
 			return -1;
-		result = Array_ass_item(self, i+ilow, item);
+		result = Array_ass_item(_self, i+ilow, item);
 		Py_DECREF(item);
 		if (result == -1)
 			return -1;
@@ -3578,20 +3638,21 @@
 	return 0;
 }
 
-static int
-Array_length(CDataObject *self)
+static Py_ssize_t
+Array_length(PyObject *_self)
 {
+	CDataObject *self = (CDataObject *)_self;
 	return self->b_length;
 }
 
 static PySequenceMethods Array_as_sequence = {
-	(lenfunc)Array_length,			/* sq_length; */
+	Array_length,				/* sq_length; */
 	0,					/* sq_concat; */
 	0,					/* sq_repeat; */
-	(ssizeargfunc)Array_item,		/* sq_item; */
-	(ssizessizeargfunc)Array_slice,		/* sq_slice; */
-	(ssizeobjargproc)Array_ass_item,	/* sq_ass_item; */
-	(ssizessizeobjargproc)Array_ass_slice,	/* sq_ass_slice; */
+	Array_item,				/* sq_item; */
+	Array_slice,				/* sq_slice; */
+	Array_ass_item,				/* sq_ass_item; */
+	Array_ass_slice,			/* sq_ass_slice; */
 	0,					/* sq_contains; */
 	
 	0,					/* sq_inplace_concat; */
@@ -3942,8 +4003,9 @@
   Pointer_Type
 */
 static PyObject *
-Pointer_item(CDataObject *self, int index)
+Pointer_item(PyObject *_self, Py_ssize_t index)
 {
+	CDataObject *self = (CDataObject *)_self;
 	int size, offset;
 	StgDictObject *stgdict, *itemdict;
 	PyObject *proto;
@@ -3969,8 +4031,9 @@
 }
 
 static int
-Pointer_ass_item(CDataObject *self, int index, PyObject *value)
+Pointer_ass_item(PyObject *_self, Py_ssize_t index, PyObject *value)
 {
+	CDataObject *self = (CDataObject *)_self;
 	int size;
 	StgDictObject *stgdict;
 
@@ -4111,8 +4174,9 @@
 }
 
 static PyObject *
-Pointer_slice(CDataObject *self, Py_ssize_t ilow, Py_ssize_t ihigh)
+Pointer_slice(PyObject *_self, Py_ssize_t ilow, Py_ssize_t ihigh)
 {
+	CDataObject *self = (CDataObject *)_self;
 	PyListObject *np;
 	StgDictObject *stgdict, *itemdict;
 	PyObject *proto;
@@ -4142,7 +4206,7 @@
 		return NULL;
 
 	for (i = 0; i < len; i++) {
-		PyObject *v = Pointer_item(self, i+ilow);
+		PyObject *v = Pointer_item(_self, i+ilow);
 		PyList_SET_ITEM(np, i, v);
 	}
 	return (PyObject *)np;
@@ -4152,9 +4216,9 @@
 	0,					/* inquiry sq_length; */
 	0,					/* binaryfunc sq_concat; */
 	0,					/* intargfunc sq_repeat; */
-	(ssizeargfunc)Pointer_item,		/* intargfunc sq_item; */
-	(ssizessizeargfunc)Pointer_slice,	/* intintargfunc sq_slice; */
-	(ssizeobjargproc)Pointer_ass_item,	/* intobjargproc sq_ass_item; */
+	Pointer_item,				/* intargfunc sq_item; */
+	Pointer_slice,				/* intintargfunc sq_slice; */
+	Pointer_ass_item,			/* intobjargproc sq_ass_item; */
 	0,					/* intintobjargproc sq_ass_slice; */
 	0,					/* objobjproc sq_contains; */
 	/* Added in release 2.0 */
@@ -4334,6 +4398,42 @@
 	return PyString_FromStringAndSize(ptr, size);
 }
 
+static int
+cast_check_pointertype(PyObject *arg)
+{
+	StgDictObject *dict;
+
+	if (PointerTypeObject_Check(arg))
+		return 1;
+	dict = PyType_stgdict(arg);
+	if (dict) {
+		if (PyString_Check(dict->proto)
+		    && (strchr("sPzUZXO", PyString_AS_STRING(dict->proto)[0]))) {
+			/* simple pointer types, c_void_p, c_wchar_p, BSTR, ... */
+			return 1;
+		}
+	}
+	PyErr_Format(PyExc_TypeError,
+		     "cast() argument 2 must be a pointer type, not %s",
+		     PyType_Check(arg)
+		     ? ((PyTypeObject *)arg)->tp_name
+		     : arg->ob_type->tp_name);
+	return 0;
+}
+
+static PyObject *
+cast(void *ptr, PyObject *ctype)
+{
+	CDataObject *result;
+	if (0 == cast_check_pointertype(ctype))
+		return NULL;
+	result = (CDataObject *)PyObject_CallFunctionObjArgs(ctype, NULL);
+	if (result == NULL)
+		return NULL;
+	/* Should we assert that result is a pointer type? */
+	memcpy(result->b_ptr, &ptr, sizeof(void *));
+	return (PyObject *)result;
+}
 
 #ifdef CTYPES_UNICODE
 static PyObject *
@@ -4469,14 +4569,25 @@
 	PyModule_AddObject(m, "_memmove_addr", PyLong_FromVoidPtr(memmove));
 	PyModule_AddObject(m, "_memset_addr", PyLong_FromVoidPtr(memset));
 	PyModule_AddObject(m, "_string_at_addr", PyLong_FromVoidPtr(string_at));
+	PyModule_AddObject(m, "_cast_addr", PyLong_FromVoidPtr(cast));
 #ifdef CTYPES_UNICODE
 	PyModule_AddObject(m, "_wstring_at_addr", PyLong_FromVoidPtr(wstring_at));
 #endif
 
-#ifdef RTLD_LOCAL
+/* If RTLD_LOCAL is not defined (Windows!), set it to zero. */
+#ifndef RTLD_LOCAL
+#define RTLD_LOCAL 0
+#endif
+
+/* If RTLD_GLOBAL is not defined (cygwin), set it to the same value as
+   RTLD_LOCAL.
+*/
+#ifndef RTLD_GLOBAL
+#define RTLD_GLOBAL RTLD_LOCAL
+#endif
+
 	PyModule_AddObject(m, "RTLD_LOCAL", PyInt_FromLong(RTLD_LOCAL));
 	PyModule_AddObject(m, "RTLD_GLOBAL", PyInt_FromLong(RTLD_GLOBAL));
-#endif
 	
 	PyExc_ArgError = PyErr_NewException("ctypes.ArgumentError", NULL, NULL);
 	if (PyExc_ArgError) {
diff --git a/Modules/_ctypes/_ctypes_test.c b/Modules/_ctypes/_ctypes_test.c
index a46f5e4..ad3b047 100644
--- a/Modules/_ctypes/_ctypes_test.c
+++ b/Modules/_ctypes/_ctypes_test.c
@@ -51,21 +51,21 @@
 	*presult = a + b;
 }
 
-EXPORT(int) _testfunc_i_bhilfd(char b, short h, int i, long l, float f, double d)
+EXPORT(int) _testfunc_i_bhilfd(signed char b, short h, int i, long l, float f, double d)
 {
 //	printf("_testfunc_i_bhilfd got %d %d %d %ld %f %f\n",
 //	       b, h, i, l, f, d);
 	return (int)(b + h + i + l + f + d);
 }
 
-EXPORT(float) _testfunc_f_bhilfd(char b, short h, int i, long l, float f, double d)
+EXPORT(float) _testfunc_f_bhilfd(signed char b, short h, int i, long l, float f, double d)
 {
 //	printf("_testfunc_f_bhilfd got %d %d %d %ld %f %f\n",
 //	       b, h, i, l, f, d);
 	return (float)(b + h + i + l + f + d);
 }
 
-EXPORT(double) _testfunc_d_bhilfd(char b, short h, int i, long l, float f, double d)
+EXPORT(double) _testfunc_d_bhilfd(signed char b, short h, int i, long l, float f, double d)
 {
 //	printf("_testfunc_d_bhilfd got %d %d %d %ld %f %f\n",
 //	       b, h, i, l, f, d);
@@ -74,7 +74,7 @@
 
 EXPORT(char *) _testfunc_p_p(void *s)
 {
-	return s;
+	return (char *)s;
 }
 
 EXPORT(void *) _testfunc_c_p_p(int *argcp, char **argv)
@@ -89,7 +89,7 @@
 
 EXPORT(char *) my_strdup(char *src)
 {
-	char *dst = malloc(strlen(src)+1);
+	char *dst = (char *)malloc(strlen(src)+1);
 	if (!dst)
 		return NULL;
 	strcpy(dst, src);
@@ -99,8 +99,8 @@
 #ifdef HAVE_WCHAR_H
 EXPORT(wchar_t *) my_wcsdup(wchar_t *src)
 {
-	int len = wcslen(src);
-	wchar_t *ptr = malloc((len + 1) * sizeof(wchar_t));
+	size_t len = wcslen(src);
+	wchar_t *ptr = (wchar_t *)malloc((len + 1) * sizeof(wchar_t));
 	if (ptr == NULL)
 		return NULL;
 	memcpy(ptr, src, (len+1) * sizeof(wchar_t));
@@ -152,13 +152,13 @@
 }
 
 #ifdef HAVE_LONG_LONG
-EXPORT(PY_LONG_LONG) _testfunc_q_bhilfdq(char b, short h, int i, long l, float f,
+EXPORT(PY_LONG_LONG) _testfunc_q_bhilfdq(signed char b, short h, int i, long l, float f,
 				     double d, PY_LONG_LONG q)
 {
 	return (PY_LONG_LONG)(b + h + i + l + f + d + q);
 }
 
-EXPORT(PY_LONG_LONG) _testfunc_q_bhilfd(char b, short h, int i, long l, float f, double d)
+EXPORT(PY_LONG_LONG) _testfunc_q_bhilfd(signed char b, short h, int i, long l, float f, double d)
 {
 	return (PY_LONG_LONG)(b + h + i + l + f + d);
 }
@@ -191,7 +191,7 @@
 {
 	static char message[] = "Hello, World";
 	if (p) {
-		*p = malloc(sizeof(char *));
+		*p = (char **)malloc(sizeof(char *));
 		printf("malloc returned %p\n", *p);
 		**p = message;
 		return 1;
@@ -385,7 +385,7 @@
 #define S last_tf_arg_s = (PY_LONG_LONG)c
 #define U last_tf_arg_u = (unsigned PY_LONG_LONG)c
 
-EXPORT(char) tf_b(char c) { S; return c/3; }
+EXPORT(signed char) tf_b(signed char c) { S; return c/3; }
 EXPORT(unsigned char) tf_B(unsigned char c) { U; return c/3; }
 EXPORT(short) tf_h(short c) { S; return c/3; }
 EXPORT(unsigned short) tf_H(unsigned short c) { U; return c/3; }
@@ -399,7 +399,7 @@
 EXPORT(double) tf_d(double c) { S; return c/3; }
 
 #ifdef MS_WIN32
-EXPORT(char) __stdcall s_tf_b(char c) { S; return c/3; }
+EXPORT(signed char) __stdcall s_tf_b(signed char c) { S; return c/3; }
 EXPORT(unsigned char) __stdcall s_tf_B(unsigned char c) { U; return c/3; }
 EXPORT(short) __stdcall s_tf_h(short c) { S; return c/3; }
 EXPORT(unsigned short) __stdcall s_tf_H(unsigned short c) { U; return c/3; }
@@ -414,33 +414,33 @@
 #endif
 /*******/
 
-EXPORT(char) tf_bb(char x, char c) { S; return c/3; }
-EXPORT(unsigned char) tf_bB(char x, unsigned char c) { U; return c/3; }
-EXPORT(short) tf_bh(char x, short c) { S; return c/3; }
-EXPORT(unsigned short) tf_bH(char x, unsigned short c) { U; return c/3; }
-EXPORT(int) tf_bi(char x, int c) { S; return c/3; }
-EXPORT(unsigned int) tf_bI(char x, unsigned int c) { U; return c/3; }
-EXPORT(long) tf_bl(char x, long c) { S; return c/3; }
-EXPORT(unsigned long) tf_bL(char x, unsigned long c) { U; return c/3; }
-EXPORT(PY_LONG_LONG) tf_bq(char x, PY_LONG_LONG c) { S; return c/3; }
-EXPORT(unsigned PY_LONG_LONG) tf_bQ(char x, unsigned PY_LONG_LONG c) { U; return c/3; }
-EXPORT(float) tf_bf(char x, float c) { S; return c/3; }
-EXPORT(double) tf_bd(char x, double c) { S; return c/3; }
+EXPORT(signed char) tf_bb(signed char x, signed char c) { S; return c/3; }
+EXPORT(unsigned char) tf_bB(signed char x, unsigned char c) { U; return c/3; }
+EXPORT(short) tf_bh(signed char x, short c) { S; return c/3; }
+EXPORT(unsigned short) tf_bH(signed char x, unsigned short c) { U; return c/3; }
+EXPORT(int) tf_bi(signed char x, int c) { S; return c/3; }
+EXPORT(unsigned int) tf_bI(signed char x, unsigned int c) { U; return c/3; }
+EXPORT(long) tf_bl(signed char x, long c) { S; return c/3; }
+EXPORT(unsigned long) tf_bL(signed char x, unsigned long c) { U; return c/3; }
+EXPORT(PY_LONG_LONG) tf_bq(signed char x, PY_LONG_LONG c) { S; return c/3; }
+EXPORT(unsigned PY_LONG_LONG) tf_bQ(signed char x, unsigned PY_LONG_LONG c) { U; return c/3; }
+EXPORT(float) tf_bf(signed char x, float c) { S; return c/3; }
+EXPORT(double) tf_bd(signed char x, double c) { S; return c/3; }
 EXPORT(void) tv_i(int c) { S; return; }
 
 #ifdef MS_WIN32
-EXPORT(char) __stdcall s_tf_bb(char x, char c) { S; return c/3; }
-EXPORT(unsigned char) __stdcall s_tf_bB(char x, unsigned char c) { U; return c/3; }
-EXPORT(short) __stdcall s_tf_bh(char x, short c) { S; return c/3; }
-EXPORT(unsigned short) __stdcall s_tf_bH(char x, unsigned short c) { U; return c/3; }
-EXPORT(int) __stdcall s_tf_bi(char x, int c) { S; return c/3; }
-EXPORT(unsigned int) __stdcall s_tf_bI(char x, unsigned int c) { U; return c/3; }
-EXPORT(long) __stdcall s_tf_bl(char x, long c) { S; return c/3; }
-EXPORT(unsigned long) __stdcall s_tf_bL(char x, unsigned long c) { U; return c/3; }
-EXPORT(PY_LONG_LONG) __stdcall s_tf_bq(char x, PY_LONG_LONG c) { S; return c/3; }
-EXPORT(unsigned PY_LONG_LONG) __stdcall s_tf_bQ(char x, unsigned PY_LONG_LONG c) { U; return c/3; }
-EXPORT(float) __stdcall s_tf_bf(char x, float c) { S; return c/3; }
-EXPORT(double) __stdcall s_tf_bd(char x, double c) { S; return c/3; }
+EXPORT(signed char) __stdcall s_tf_bb(signed char x, signed char c) { S; return c/3; }
+EXPORT(unsigned char) __stdcall s_tf_bB(signed char x, unsigned char c) { U; return c/3; }
+EXPORT(short) __stdcall s_tf_bh(signed char x, short c) { S; return c/3; }
+EXPORT(unsigned short) __stdcall s_tf_bH(signed char x, unsigned short c) { U; return c/3; }
+EXPORT(int) __stdcall s_tf_bi(signed char x, int c) { S; return c/3; }
+EXPORT(unsigned int) __stdcall s_tf_bI(signed char x, unsigned int c) { U; return c/3; }
+EXPORT(long) __stdcall s_tf_bl(signed char x, long c) { S; return c/3; }
+EXPORT(unsigned long) __stdcall s_tf_bL(signed char x, unsigned long c) { U; return c/3; }
+EXPORT(PY_LONG_LONG) __stdcall s_tf_bq(signed char x, PY_LONG_LONG c) { S; return c/3; }
+EXPORT(unsigned PY_LONG_LONG) __stdcall s_tf_bQ(signed char x, unsigned PY_LONG_LONG c) { U; return c/3; }
+EXPORT(float) __stdcall s_tf_bf(signed char x, float c) { S; return c/3; }
+EXPORT(double) __stdcall s_tf_bd(signed char x, double c) { S; return c/3; }
 EXPORT(void) __stdcall s_tv_i(int c) { S; return; }
 #endif
 
diff --git a/Modules/_ctypes/callbacks.c b/Modules/_ctypes/callbacks.c
index 2948d98..8c29c55 100644
--- a/Modules/_ctypes/callbacks.c
+++ b/Modules/_ctypes/callbacks.c
@@ -264,16 +264,6 @@
 	PyGILState_Release(state);
 }
 
-typedef struct {
-	ffi_closure *pcl; /* the C callable */
-	ffi_cif cif;
-	PyObject *converters;
-	PyObject *callable;
-	SETFUNC setfunc;
-	ffi_type *restype;
-	ffi_type *atypes[0];
-} ffi_info;
-
 static void closure_fcn(ffi_cif *cif,
 			void *resp,
 			void **args,
@@ -289,16 +279,10 @@
 			  args);
 }
 
-void FreeCallback(THUNK thunk)
-{
-	FreeClosure(((ffi_info *)thunk)->pcl);
-	PyMem_Free(thunk);
-}
-
-THUNK AllocFunctionCallback(PyObject *callable,
-			    PyObject *converters,
-			    PyObject *restype,
-			    int is_cdecl)
+ffi_info *AllocFunctionCallback(PyObject *callable,
+				PyObject *converters,
+				PyObject *restype,
+				int is_cdecl)
 {
 	int result;
 	ffi_info *p;
@@ -313,13 +297,14 @@
 	}
 	p->pcl = MallocClosure();
 	if (p->pcl == NULL) {
-		PyMem_Free(p);
 		PyErr_NoMemory();
-		return NULL;
+		goto error;
 	}
 
 	for (i = 0; i < nArgs; ++i) {
 		PyObject *cnv = PySequence_GetItem(converters, i);
+		if (cnv == NULL)
+			goto error;
 		p->atypes[i] = GetType(cnv);
 		Py_DECREF(cnv);
 	}
@@ -330,12 +315,10 @@
 		p->restype = &ffi_type_void;
 	} else {
 		StgDictObject *dict = PyType_stgdict(restype);
-		if (dict == NULL) {
-			PyMem_Free(p);
-			return NULL;
-		}
+		if (dict == NULL)
+			goto error;
 		p->setfunc = dict->setfunc;
-		p->restype = &dict->ffi_type;
+		p->restype = &dict->ffi_type_pointer;
 	}
 
 	cc = FFI_DEFAULT_ABI;
@@ -349,21 +332,26 @@
 	if (result != FFI_OK) {
 		PyErr_Format(PyExc_RuntimeError,
 			     "ffi_prep_cif failed with %d", result);
-		PyMem_Free(p);
-		return NULL;
+		goto error;
 	}
 	result = ffi_prep_closure(p->pcl, &p->cif, closure_fcn, p);
 	if (result != FFI_OK) {
 		PyErr_Format(PyExc_RuntimeError,
 			     "ffi_prep_closure failed with %d", result);
-		PyMem_Free(p);
-		return NULL;
+		goto error;
 	}
 
 	p->converters = converters;
 	p->callable = callable;
+	return p;
 
-	return (THUNK)p;
+  error:
+	if (p) {
+		if (p->pcl)
+			FreeClosure(p->pcl);
+		PyMem_Free(p);
+	}
+	return NULL;
 }
 
 /****************************************************************************
diff --git a/Modules/_ctypes/callproc.c b/Modules/_ctypes/callproc.c
index 9d9e322..8163f49 100644
--- a/Modules/_ctypes/callproc.c
+++ b/Modules/_ctypes/callproc.c
@@ -64,14 +64,17 @@
 #endif
 
 #ifdef MS_WIN32
-#define alloca _alloca
+#include <malloc.h>
 #endif
 
 #include <ffi.h>
 #include "ctypes.h"
 
-#ifdef _DEBUG
-#define DEBUG_EXCEPTIONS /* */
+#if defined(_DEBUG) || defined(__MINGW32__)
+/* Don't use structured exception handling on Windows if this is defined.
+   MingW, AFAIK, doesn't support it.
+*/
+#define DONT_USE_SEH
 #endif
 
 #ifdef MS_WIN32
@@ -96,6 +99,7 @@
 	return lpMsgBuf;
 }
 
+#ifndef DONT_USE_SEH
 void SetException(DWORD code, EXCEPTION_RECORD *pr)
 {
 	TCHAR *lpMsgBuf;
@@ -254,6 +258,7 @@
 	*record = *ptrs->ExceptionRecord;
 	return EXCEPTION_EXECUTE_HANDLER;
 }
+#endif
 
 static PyObject *
 check_hresult(PyObject *self, PyObject *args)
@@ -576,14 +581,14 @@
 	/* This little trick works correctly with MSVC.
 	   It returns small structures in registers
 	*/
-	if (dict->ffi_type.type == FFI_TYPE_STRUCT) {
-		if (dict->ffi_type.size <= 4)
+	if (dict->ffi_type_pointer.type == FFI_TYPE_STRUCT) {
+		if (dict->ffi_type_pointer.size <= 4)
 			return &ffi_type_sint32;
-		else if (dict->ffi_type.size <= 8)
+		else if (dict->ffi_type_pointer.size <= 8)
 			return &ffi_type_sint64;
 	}
 #endif
-	return &dict->ffi_type;
+	return &dict->ffi_type_pointer;
 }
 
 
@@ -612,9 +617,11 @@
 	int cc;
 #ifdef MS_WIN32
 	int delta;
+#ifndef DONT_USE_SEH
 	DWORD dwExceptionCode = 0;
 	EXCEPTION_RECORD record;
 #endif
+#endif
 	/* XXX check before here */
 	if (restype == NULL) {
 		PyErr_SetString(PyExc_RuntimeError,
@@ -640,14 +647,14 @@
 	if ((flags & FUNCFLAG_PYTHONAPI) == 0)
 		Py_UNBLOCK_THREADS
 #ifdef MS_WIN32
-#ifndef DEBUG_EXCEPTIONS
+#ifndef DONT_USE_SEH
 	__try {
 #endif
 		delta =
 #endif
 			ffi_call(&cif, (void *)pProc, resmem, avalues);
 #ifdef MS_WIN32
-#ifndef DEBUG_EXCEPTIONS
+#ifndef DONT_USE_SEH
 	}
 	__except (HandleException(GetExceptionInformation(),
 				  &dwExceptionCode, &record)) {
@@ -658,10 +665,12 @@
 	if ((flags & FUNCFLAG_PYTHONAPI) == 0)
 		Py_BLOCK_THREADS
 #ifdef MS_WIN32
+#ifndef DONT_USE_SEH
 	if (dwExceptionCode) {
 		SetException(dwExceptionCode, &record);
 		return -1;
 	}
+#endif
 	if (delta < 0) {
 		if (flags & FUNCFLAG_CDECL)
 			PyErr_Format(PyExc_ValueError,
@@ -758,6 +767,8 @@
 	if (cls_str) {
 		PyString_ConcatAndDel(&s, cls_str);
 		PyString_ConcatAndDel(&s, PyString_FromString(": "));
+		if (s == NULL)
+			goto error;
 	} else
 		PyErr_Clear();
 	msg_str = PyObject_Str(v);
@@ -766,12 +777,15 @@
 	else {
 		PyErr_Clear();
 		PyString_ConcatAndDel(&s, PyString_FromString("???"));
+		if (s == NULL)
+			goto error;
 	}
 	PyErr_SetObject(exc_class, s);
+error:
 	Py_XDECREF(tp);
 	Py_XDECREF(v);
 	Py_XDECREF(tb);
-	Py_DECREF(s);
+	Py_XDECREF(s);
 }
 
 
@@ -1363,7 +1377,7 @@
 converter(PyObject *obj, void **address)
 {
 	*address = PyLong_AsVoidPtr(obj);
-	return address != NULL;
+	return *address != NULL;
 }
 
 static PyObject *
@@ -1423,71 +1437,7 @@
 }
 #endif
 
-static char cast_doc[] =
-"cast(cobject, ctype) -> ctype-instance\n\
-\n\
-Create an instance of ctype, and copy the internal memory buffer\n\
-of cobject to the new instance.  Should be used to cast one type\n\
-of pointer to another type of pointer.\n\
-Doesn't work correctly with ctypes integers.\n";
-
-static int cast_check_pointertype(PyObject *arg, PyObject **pobj)
-{
-	StgDictObject *dict;
-
-	if (PointerTypeObject_Check(arg)) {
-		*pobj = arg;
-		return 1;
-	}
-	dict = PyType_stgdict(arg);
-	if (dict) {
-		if (PyString_Check(dict->proto)
-		    && (strchr("sPzUZXO", PyString_AS_STRING(dict->proto)[0]))) {
-			/* simple pointer types, c_void_p, c_wchar_p, BSTR, ... */
-			*pobj = arg;
-			return 1;
-		}
-	}
-	if (PyType_Check(arg)) {
-		PyErr_Format(PyExc_TypeError,
-			     "cast() argument 2 must be a pointer type, not %s",
-			     ((PyTypeObject *)arg)->tp_name);
-	} else {
-		PyErr_Format(PyExc_TypeError,
-			     "cast() argument 2 must be a pointer type, not a %s",
-			     arg->ob_type->tp_name);
-	}
-	return 0;
-}
-
-static PyObject *cast(PyObject *self, PyObject *args)
-{
-	PyObject *obj, *ctype;
-	struct argument a;
-	CDataObject *result;
-
-	/* We could and should allow array types for the second argument
-	   also, but we cannot use the simple memcpy below for them. */
-	if (!PyArg_ParseTuple(args, "OO&:cast", &obj, &cast_check_pointertype, &ctype))
-		return NULL;
-	if (-1 == ConvParam(obj, 1, &a))
-		return NULL;
-	result = (CDataObject *)PyObject_CallFunctionObjArgs(ctype, NULL);
-	if (result == NULL) {
-		Py_XDECREF(a.keep);
-		return NULL;
-	}
-	// result->b_size
-	// a.ffi_type->size
-	memcpy(result->b_ptr, &a.value,
-	       min(result->b_size, (int)a.ffi_type->size));
-	Py_XDECREF(a.keep);
-	return (PyObject *)result;
-}
-
-
 PyMethodDef module_methods[] = {
-	{"cast", cast, METH_VARARGS, cast_doc},
 #ifdef CTYPES_UNICODE
 	{"set_conversion_mode", set_conversion_mode, METH_VARARGS, set_conversion_mode_doc},
 #endif
diff --git a/Modules/_ctypes/cfield.c b/Modules/_ctypes/cfield.c
index 336f265..7bef412 100644
--- a/Modules/_ctypes/cfield.c
+++ b/Modules/_ctypes/cfield.c
@@ -499,7 +499,7 @@
 	long val;
 	if (get_long(value, &val) < 0)
 		return NULL;
-	*(char *)ptr = (char)SET(*(char *)ptr, (char)val, size);
+	*(signed char *)ptr = (signed char)SET(*(signed char *)ptr, (signed char)val, size);
 	_RET(value);
 }
 
@@ -507,7 +507,7 @@
 static PyObject *
 b_get(void *ptr, unsigned size)
 {
-	char val = *(char *)ptr;
+	signed char val = *(signed char *)ptr;
 	GET_BITFIELD(val, size);
 	return PyInt_FromLong(val);
 }
@@ -536,9 +536,12 @@
 h_set(void *ptr, PyObject *value, unsigned size)
 {
 	long val;
+	short x;
 	if (get_long(value, &val) < 0)
 		return NULL;
-	*(short *)ptr = (short)SET(*(short *)ptr, (short)val, size);
+	memcpy(&x, ptr, sizeof(x));
+	x = SET(x, (short)val, size);
+	memcpy(ptr, &x, sizeof(x));
 	_RET(value);
 }
 
@@ -550,24 +553,28 @@
 	short field;
 	if (get_long(value, &val) < 0)
 		return NULL;
-	field = SWAP_2(*(short *)ptr);
+	memcpy(&field, ptr, sizeof(field));
+	field = SWAP_2(field);
 	field = SET(field, (short)val, size);
-	*(short *)ptr = SWAP_2(field);
+	field = SWAP_2(field);
+	memcpy(ptr, &field, sizeof(field));
 	_RET(value);
 }
 
 static PyObject *
 h_get(void *ptr, unsigned size)
 {
-	short val = *(short *)ptr;
+	short val;
+	memcpy(&val, ptr, sizeof(val));
 	GET_BITFIELD(val, size);
-	return PyInt_FromLong(val);
+	return PyInt_FromLong((long)val);
 }
 
 static PyObject *
 h_get_sw(void *ptr, unsigned size)
 {
-	short val = *(short *)ptr;
+	short val;
+	memcpy(&val, ptr, sizeof(val));
 	val = SWAP_2(val);
 	GET_BITFIELD(val, size);
 	return PyInt_FromLong(val);
@@ -577,10 +584,12 @@
 H_set(void *ptr, PyObject *value, unsigned size)
 {
 	unsigned long val;
+	unsigned short x;
 	if (get_ulong(value, &val) < 0)
 		return NULL;
-	*(unsigned short *)ptr = (unsigned short)SET(*(unsigned short *)ptr,
-						     (unsigned short)val, size);
+	memcpy(&x, ptr, sizeof(x));
+	x = SET(x, (unsigned short)val, size);
+	memcpy(ptr, &x, sizeof(x));
 	_RET(value);
 }
 
@@ -591,9 +600,11 @@
 	unsigned short field;
 	if (get_ulong(value, &val) < 0)
 		return NULL;
-	field = SWAP_2(*(unsigned short *)ptr);
+	memcpy(&field, ptr, sizeof(field));
+	field = SWAP_2(field);
 	field = SET(field, (unsigned short)val, size);
-	*(unsigned short *)ptr = SWAP_2(field);
+	field = SWAP_2(field);
+	memcpy(ptr, &field, sizeof(field));
 	_RET(value);
 }
 
@@ -601,7 +612,8 @@
 static PyObject *
 H_get(void *ptr, unsigned size)
 {
-	unsigned short val = *(unsigned short *)ptr;
+	unsigned short val;
+	memcpy(&val, ptr, sizeof(val));
 	GET_BITFIELD(val, size);
 	return PyInt_FromLong(val);
 }
@@ -609,7 +621,8 @@
 static PyObject *
 H_get_sw(void *ptr, unsigned size)
 {
-	unsigned short val = *(unsigned short *)ptr;
+	unsigned short val;
+	memcpy(&val, ptr, sizeof(val));
 	val = SWAP_2(val);
 	GET_BITFIELD(val, size);
 	return PyInt_FromLong(val);
@@ -619,9 +632,12 @@
 i_set(void *ptr, PyObject *value, unsigned size)
 {
 	long val;
+	int x;
 	if (get_long(value, &val) < 0)
 		return NULL;
-	*(int *)ptr = (int)SET(*(int *)ptr, (int)val, size);
+	memcpy(&x, ptr, sizeof(x));
+	x = SET(x, (int)val, size);
+	memcpy(ptr, &x, sizeof(x));
 	_RET(value);
 }
 
@@ -632,9 +648,11 @@
 	int field;
 	if (get_long(value, &val) < 0)
 		return NULL;
-	field = SWAP_INT(*(int *)ptr);
+	memcpy(&field, ptr, sizeof(field));
+	field = SWAP_INT(field);
 	field = SET(field, (int)val, size);
-	*(int *)ptr = SWAP_INT(field);
+	field = SWAP_INT(field);
+	memcpy(ptr, &field, sizeof(field));
 	_RET(value);
 }
 
@@ -642,7 +660,8 @@
 static PyObject *
 i_get(void *ptr, unsigned size)
 {
-	int val = *(int *)ptr;
+	int val;
+	memcpy(&val, ptr, sizeof(val));
 	GET_BITFIELD(val, size);
 	return PyInt_FromLong(val);
 }
@@ -650,7 +669,8 @@
 static PyObject *
 i_get_sw(void *ptr, unsigned size)
 {
-	int val = *(int *)ptr;
+	int val;
+	memcpy(&val, ptr, sizeof(val));
 	val = SWAP_INT(val);
 	GET_BITFIELD(val, size);
 	return PyInt_FromLong(val);
@@ -684,9 +704,12 @@
 I_set(void *ptr, PyObject *value, unsigned size)
 {
 	unsigned long val;
+	unsigned int x;
 	if (get_ulong(value, &val) < 0)
 		return  NULL;
-	*(unsigned int *)ptr = (unsigned int)SET(*(unsigned int *)ptr, (unsigned int)val, size);
+	memcpy(&x, ptr, sizeof(x));
+	x = SET(x, (unsigned int)val, size);
+	memcpy(ptr, &x, sizeof(x));
 	_RET(value);
 }
 
@@ -697,9 +720,10 @@
 	unsigned int field;
 	if (get_ulong(value, &val) < 0)
 		return  NULL;
-	field = SWAP_INT(*(unsigned int *)ptr);
+	memcpy(&field, ptr, sizeof(field));
 	field = (unsigned int)SET(field, (unsigned int)val, size);
-	*(unsigned int *)ptr = SWAP_INT(field);
+	field = SWAP_INT(field);
+	memcpy(ptr, &field, sizeof(field));
 	_RET(value);
 }
 
@@ -707,7 +731,8 @@
 static PyObject *
 I_get(void *ptr, unsigned size)
 {
-	unsigned int val = *(unsigned int *)ptr;
+	unsigned int val;
+	memcpy(&val, ptr, sizeof(val));
 	GET_BITFIELD(val, size);
 	return PyLong_FromUnsignedLong(val);
 }
@@ -715,7 +740,8 @@
 static PyObject *
 I_get_sw(void *ptr, unsigned size)
 {
-	unsigned int val = *(unsigned int *)ptr;
+	unsigned int val;
+	memcpy(&val, ptr, sizeof(val));
 	val = SWAP_INT(val);
 	GET_BITFIELD(val, size);
 	return PyLong_FromUnsignedLong(val);
@@ -725,9 +751,12 @@
 l_set(void *ptr, PyObject *value, unsigned size)
 {
 	long val;
+	long x;
 	if (get_long(value, &val) < 0)
 		return NULL;
-	*(long *)ptr = (long)SET(*(long *)ptr, val, size);
+	memcpy(&x, ptr, sizeof(x));
+	x = SET(x, val, size);
+	memcpy(ptr, &x, sizeof(x));
 	_RET(value);
 }
 
@@ -738,9 +767,11 @@
 	long field;
 	if (get_long(value, &val) < 0)
 		return NULL;
-	field = SWAP_LONG(*(long *)ptr);
+	memcpy(&field, ptr, sizeof(field));
+	field = SWAP_LONG(field);
 	field = (long)SET(field, val, size);
-	*(long *)ptr = SWAP_LONG(field);
+	field = SWAP_LONG(field);
+	memcpy(ptr, &field, sizeof(field));
 	_RET(value);
 }
 
@@ -748,7 +779,8 @@
 static PyObject *
 l_get(void *ptr, unsigned size)
 {
-	long val = *(long *)ptr;
+	long val;
+	memcpy(&val, ptr, sizeof(val));
 	GET_BITFIELD(val, size);
 	return PyInt_FromLong(val);
 }
@@ -756,7 +788,8 @@
 static PyObject *
 l_get_sw(void *ptr, unsigned size)
 {
-	long val = *(long *)ptr;
+	long val;
+	memcpy(&val, ptr, sizeof(val));
 	val = SWAP_LONG(val);
 	GET_BITFIELD(val, size);
 	return PyInt_FromLong(val);
@@ -766,9 +799,12 @@
 L_set(void *ptr, PyObject *value, unsigned size)
 {
 	unsigned long val;
+	unsigned long x;
 	if (get_ulong(value, &val) < 0)
 		return  NULL;
-	*(unsigned long *)ptr = (unsigned long)SET(*(unsigned long *)ptr, val, size);
+	memcpy(&x, ptr, sizeof(x));
+	x = SET(x, val, size);
+	memcpy(ptr, &x, sizeof(x));
 	_RET(value);
 }
 
@@ -779,9 +815,11 @@
 	unsigned long field;
 	if (get_ulong(value, &val) < 0)
 		return  NULL;
-	field = SWAP_LONG(*(unsigned long *)ptr);
+	memcpy(&field, ptr, sizeof(field));
+	field = SWAP_LONG(field);
 	field = (unsigned long)SET(field, val, size);
-	*(unsigned long *)ptr = SWAP_LONG(field);
+	field = SWAP_LONG(field);
+	memcpy(ptr, &field, sizeof(field));
 	_RET(value);
 }
 
@@ -789,7 +827,8 @@
 static PyObject *
 L_get(void *ptr, unsigned size)
 {
-	unsigned long val = *(unsigned long *)ptr;
+	unsigned long val;
+	memcpy(&val, ptr, sizeof(val));
 	GET_BITFIELD(val, size);
 	return PyLong_FromUnsignedLong(val);
 }
@@ -797,7 +836,8 @@
 static PyObject *
 L_get_sw(void *ptr, unsigned size)
 {
-	unsigned long val = *(unsigned long *)ptr;
+	unsigned long val;
+	memcpy(&val, ptr, sizeof(val));
 	val = SWAP_LONG(val);
 	GET_BITFIELD(val, size);
 	return PyLong_FromUnsignedLong(val);
@@ -808,9 +848,12 @@
 q_set(void *ptr, PyObject *value, unsigned size)
 {
 	PY_LONG_LONG val;
+	PY_LONG_LONG x;
 	if (get_longlong(value, &val) < 0)
 		return NULL;
-	*(PY_LONG_LONG *)ptr = (PY_LONG_LONG)SET(*(PY_LONG_LONG *)ptr, val, size);
+	memcpy(&x, ptr, sizeof(x));
+	x = SET(x, val, size);
+	memcpy(ptr, &x, sizeof(x));
 	_RET(value);
 }
 
@@ -821,16 +864,19 @@
 	PY_LONG_LONG field;
 	if (get_longlong(value, &val) < 0)
 		return NULL;
-	field = SWAP_8(*(PY_LONG_LONG *)ptr);
+	memcpy(&field, ptr, sizeof(field));
+	field = SWAP_8(field);
 	field = (PY_LONG_LONG)SET(field, val, size);
-	*(PY_LONG_LONG *)ptr = SWAP_8(field);
+	field = SWAP_8(field);
+	memcpy(ptr, &field, sizeof(field));
 	_RET(value);
 }
 
 static PyObject *
 q_get(void *ptr, unsigned size)
 {
-	PY_LONG_LONG val = *(PY_LONG_LONG *)ptr;
+	PY_LONG_LONG val;
+	memcpy(&val, ptr, sizeof(val));
 	GET_BITFIELD(val, size);
 	return PyLong_FromLongLong(val);
 }
@@ -838,7 +884,8 @@
 static PyObject *
 q_get_sw(void *ptr, unsigned size)
 {
-	PY_LONG_LONG val = *(PY_LONG_LONG *)ptr;
+	PY_LONG_LONG val;
+	memcpy(&val, ptr, sizeof(val));
 	val = SWAP_8(val);
 	GET_BITFIELD(val, size);
 	return PyLong_FromLongLong(val);
@@ -848,9 +895,12 @@
 Q_set(void *ptr, PyObject *value, unsigned size)
 {
 	unsigned PY_LONG_LONG val;
+	unsigned PY_LONG_LONG x;
 	if (get_ulonglong(value, &val) < 0)
 		return NULL;
-	*(unsigned PY_LONG_LONG *)ptr = (unsigned PY_LONG_LONG)SET(*(unsigned PY_LONG_LONG *)ptr, val, size);
+	memcpy(&x, ptr, sizeof(x));
+	x = SET(x, val, size);
+	memcpy(ptr, &x, sizeof(x));
 	_RET(value);
 }
 
@@ -861,16 +911,19 @@
 	unsigned PY_LONG_LONG field;
 	if (get_ulonglong(value, &val) < 0)
 		return NULL;
-	field = SWAP_8(*(unsigned PY_LONG_LONG *)ptr);
+	memcpy(&field, ptr, sizeof(field));
+	field = SWAP_8(field);
 	field = (unsigned PY_LONG_LONG)SET(field, val, size);
-	*(unsigned PY_LONG_LONG *)ptr = SWAP_8(field);
+	field = SWAP_8(field);
+	memcpy(ptr, &field, sizeof(field));
 	_RET(value);
 }
 
 static PyObject *
 Q_get(void *ptr, unsigned size)
 {
-	unsigned PY_LONG_LONG val = *(unsigned PY_LONG_LONG *)ptr;
+	unsigned PY_LONG_LONG val;
+	memcpy(&val, ptr, sizeof(val));
 	GET_BITFIELD(val, size);
 	return PyLong_FromUnsignedLongLong(val);
 }
@@ -878,7 +931,8 @@
 static PyObject *
 Q_get_sw(void *ptr, unsigned size)
 {
-	unsigned PY_LONG_LONG val = *(unsigned PY_LONG_LONG *)ptr;
+	unsigned PY_LONG_LONG val;
+	memcpy(&val, ptr, sizeof(val));
 	val = SWAP_8(val);
 	GET_BITFIELD(val, size);
 	return PyLong_FromUnsignedLongLong(val);
@@ -903,14 +957,16 @@
 			     value->ob_type->tp_name);
 		return NULL;
 	}
-	*(double *)ptr = x;
+	memcpy(ptr, &x, sizeof(double));
 	_RET(value);
 }
 
 static PyObject *
 d_get(void *ptr, unsigned size)
 {
-	return PyFloat_FromDouble(*(double *)ptr);
+	double val;
+	memcpy(&val, ptr, sizeof(val));
+	return PyFloat_FromDouble(val);
 }
 
 static PyObject *
@@ -957,14 +1013,16 @@
 			     value->ob_type->tp_name);
 		return NULL;
 	}
-	*(float *)ptr = x;
+	memcpy(ptr, &x, sizeof(x));
 	_RET(value);
 }
 
 static PyObject *
 f_get(void *ptr, unsigned size)
 {
-	return PyFloat_FromDouble(*(float *)ptr);
+	float val;
+	memcpy(&val, ptr, sizeof(val));
+	return PyFloat_FromDouble(val);
 }
 
 static PyObject *
@@ -1317,6 +1375,7 @@
 		if (-1 == PyUnicode_AsWideChar((PyUnicodeObject *)value,
 					       buffer, PyUnicode_GET_SIZE(value))) {
 			Py_DECREF(value);
+			Py_DECREF(keep);
 			return NULL;
 		}
 		Py_DECREF(value);
diff --git a/Modules/_ctypes/ctypes.h b/Modules/_ctypes/ctypes.h
index 179dcf1..7988595 100644
--- a/Modules/_ctypes/ctypes.h
+++ b/Modules/_ctypes/ctypes.h
@@ -2,15 +2,6 @@
 
 #if (PY_VERSION_HEX < 0x02050000)
 typedef int Py_ssize_t;
-#define lenfunc inquiry
-#define readbufferproc getreadbufferproc
-#define writebufferproc getwritebufferproc
-#define segcountproc getsegcountproc
-#define charbufferproc getcharbufferproc
-#define ssizeargfunc intargfunc
-#define ssizessizeargfunc intintargfunc
-#define ssizeobjargproc intobjargproc
-#define ssizessizeobjargproc intintobjargproc
 #endif
 
 #ifndef MS_WIN32
@@ -30,8 +21,9 @@
 #define PY_LONG_LONG LONG_LONG
 #endif
 
-typedef int (*THUNK)(void);
 typedef struct tagCDataObject CDataObject;
+typedef PyObject *(* GETFUNC)(void *, unsigned size);
+typedef PyObject *(* SETFUNC)(void *, PyObject *value, unsigned size);
 
 /* A default buffer in CDataObject, which can be used for small C types.  If
 this buffer is too small, PyMem_Malloc will be called to create a larger one,
@@ -72,6 +64,16 @@
 };
 
 typedef struct {
+	ffi_closure *pcl; /* the C callable */
+	ffi_cif cif;
+	PyObject *converters;
+	PyObject *callable;
+	SETFUNC setfunc;
+	ffi_type *restype;
+	ffi_type *atypes[0];
+} ffi_info;
+
+typedef struct {
 	/* First part identical to tagCDataObject */
 	PyObject_HEAD
 	char *b_ptr;		/* pointer to memory block */
@@ -85,7 +87,7 @@
 	union value b_value;
 	/* end of tagCDataObject, additional fields follow */
 
-	THUNK thunk;
+	ffi_info *thunk;
 	PyObject *callable;
 
 	/* These two fields will override the ones in the type's stgdict if
@@ -154,17 +156,12 @@
 
 extern void init_callbacks_in_module(PyObject *m);
 
-extern THUNK AllocFunctionCallback(PyObject *callable,
-				   PyObject *converters,
-				   PyObject *restype,
-				   int stdcall);
-extern void FreeCallback(THUNK);
-
 extern PyMethodDef module_methods[];
 
-typedef PyObject *(* GETFUNC)(void *, unsigned size);
-typedef PyObject *(* SETFUNC)(void *, PyObject *value, unsigned size);
-
+extern ffi_info *AllocFunctionCallback(PyObject *callable,
+				       PyObject *converters,
+				       PyObject *restype,
+				       int stdcall);
 /* a table entry describing a predefined ctypes type */
 struct fielddesc {
 	char code;
@@ -201,7 +198,7 @@
 	Py_ssize_t size;	/* number of bytes */
 	Py_ssize_t align;	/* alignment requirements */
 	Py_ssize_t length;	/* number of fields */
-	ffi_type ffi_type;
+	ffi_type ffi_type_pointer;
 	PyObject *proto;	/* Only for Pointer/ArrayObject */
 	SETFUNC setfunc;	/* Only for simple objects */
 	GETFUNC getfunc;	/* Only for simple objects */
diff --git a/Modules/_ctypes/libffi/configure b/Modules/_ctypes/libffi/configure
index 0991b63..c1e5cd4 100755
--- a/Modules/_ctypes/libffi/configure
+++ b/Modules/_ctypes/libffi/configure
@@ -3490,6 +3490,7 @@
 i*86-*-beos*) TARGET=X86; TARGETDIR=x86;;
 i*86-*-freebsd* | i*86-*-kfreebsd*-gnu) TARGET=X86; TARGETDIR=x86;;
 i*86-*-netbsdelf* | i*86-*-knetbsd*-gnu) TARGET=X86; TARGETDIR=x86;;
+i*86-*-openbsd*) TARGET=X86; TARGETDIR=x86;;
 i*86-*-rtems*) TARGET=X86; TARGETDIR=x86;;
 i*86-*-win32*) TARGET=X86_WIN32; TARGETDIR=x86;;
 i*86-*-cygwin*) TARGET=X86_WIN32; TARGETDIR=x86;;
diff --git a/Modules/_ctypes/libffi/configure.ac b/Modules/_ctypes/libffi/configure.ac
index 76bf16e..c7f05d6 100644
--- a/Modules/_ctypes/libffi/configure.ac
+++ b/Modules/_ctypes/libffi/configure.ac
@@ -28,6 +28,7 @@
 i*86-*-beos*) TARGET=X86; TARGETDIR=x86;;
 i*86-*-freebsd* | i*86-*-kfreebsd*-gnu) TARGET=X86; TARGETDIR=x86;;
 i*86-*-netbsdelf* | i*86-*-knetbsd*-gnu) TARGET=X86; TARGETDIR=x86;;
+i*86-*-openbsd*) TARGET=X86; TARGETDIR=x86;;
 i*86-*-rtems*) TARGET=X86; TARGETDIR=x86;;
 i*86-*-win32*) TARGET=X86_WIN32; TARGETDIR=x86;;
 i*86-*-cygwin*) TARGET=X86_WIN32; TARGETDIR=x86;;
diff --git a/Modules/_ctypes/libffi/src/powerpc/darwin_closure.S b/Modules/_ctypes/libffi/src/powerpc/darwin_closure.S
index 7959838..6d9a364 100644
--- a/Modules/_ctypes/libffi/src/powerpc/darwin_closure.S
+++ b/Modules/_ctypes/libffi/src/powerpc/darwin_closure.S
@@ -246,7 +246,7 @@
 /* END(ffi_closure_ASM)  */
 
 .data
-.section __TEXT,__eh_frame,coalesced,no_toc+strip_static_syms+live_support
+.section __TEXT,__eh_frame,coalesced,no_toc+strip_static_syms
 EH_frame1:
 	.set	L$set$0,LECIE1-LSCIE1
 	.long	L$set$0	; Length of Common Information Entry
diff --git a/Modules/_ctypes/libffi/src/x86/ffi.c b/Modules/_ctypes/libffi/src/x86/ffi.c
index e4d5fc3..7f792b7 100644
--- a/Modules/_ctypes/libffi/src/x86/ffi.c
+++ b/Modules/_ctypes/libffi/src/x86/ffi.c
@@ -121,7 +121,7 @@
   switch (cif->rtype->type)
     {
     case FFI_TYPE_VOID:
-#ifndef X86_WIN32
+#if !defined(X86_WIN32) && !defined(__OpenBSD__) && !defined(__FreeBSD__)
     case FFI_TYPE_STRUCT:
 #endif
     case FFI_TYPE_SINT64:
@@ -135,7 +135,7 @@
       cif->flags = FFI_TYPE_SINT64;
       break;
 
-#ifdef X86_WIN32
+#if defined(X86_WIN32) || defined(__OpenBSD__) || defined(__FreeBSD__)
     case FFI_TYPE_STRUCT:
       if (cif->rtype->size == 1)
         {
diff --git a/Modules/_ctypes/stgdict.c b/Modules/_ctypes/stgdict.c
index cb3d599..336be37 100644
--- a/Modules/_ctypes/stgdict.c
+++ b/Modules/_ctypes/stgdict.c
@@ -38,7 +38,7 @@
 StgDict_dealloc(StgDictObject *self)
 {
 	StgDict_clear(self);
-	PyMem_Free(self->ffi_type.elements);
+	PyMem_Free(self->ffi_type_pointer.elements);
 	PyDict_Type.tp_dealloc((PyObject *)self);
 }
 
@@ -49,8 +49,8 @@
 	int size;
 
 	StgDict_clear(dst);
-	PyMem_Free(dst->ffi_type.elements);
-	dst->ffi_type.elements = NULL;
+	PyMem_Free(dst->ffi_type_pointer.elements);
+	dst->ffi_type_pointer.elements = NULL;
 
 	d = (char *)dst;
 	s = (char *)src;
@@ -64,13 +64,15 @@
 	Py_XINCREF(dst->restype);
 	Py_XINCREF(dst->checker);
 
-	if (src->ffi_type.elements == NULL)
+	if (src->ffi_type_pointer.elements == NULL)
 		return 0;
 	size = sizeof(ffi_type *) * (src->length + 1);
-	dst->ffi_type.elements = PyMem_Malloc(size);
-	if (dst->ffi_type.elements == NULL)
+	dst->ffi_type_pointer.elements = PyMem_Malloc(size);
+	if (dst->ffi_type_pointer.elements == NULL)
 		return -1;
-	memcpy(dst->ffi_type.elements, src->ffi_type.elements, size);
+	memcpy(dst->ffi_type_pointer.elements,
+	       src->ffi_type_pointer.elements,
+	       size);
 	return 0;
 }
 
@@ -234,8 +236,8 @@
 	   stuff is sucessfully finished. */
 	stgdict->flags |= DICTFLAG_FINAL;	/* set final */
 
-	if (stgdict->ffi_type.elements)
-		PyMem_Free(stgdict->ffi_type.elements);
+	if (stgdict->ffi_type_pointer.elements)
+		PyMem_Free(stgdict->ffi_type_pointer.elements);
 
 	basedict = PyType_stgdict((PyObject *)((PyTypeObject *)type)->tp_base);
 	if (basedict && !use_broken_old_ctypes_semantics) {
@@ -243,10 +245,12 @@
 		align = basedict->align;
 		union_size = 0;
 		total_align = align ? align : 1;
-		stgdict->ffi_type.type = FFI_TYPE_STRUCT;
-		stgdict->ffi_type.elements = PyMem_Malloc(sizeof(ffi_type *) * (basedict->length + len + 1));
-		memset(stgdict->ffi_type.elements, 0, sizeof(ffi_type *) * (basedict->length + len + 1));
-		memcpy(stgdict->ffi_type.elements, basedict->ffi_type.elements,
+		stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT;
+		stgdict->ffi_type_pointer.elements = PyMem_Malloc(sizeof(ffi_type *) * (basedict->length + len + 1));
+		memset(stgdict->ffi_type_pointer.elements, 0,
+		       sizeof(ffi_type *) * (basedict->length + len + 1));
+		memcpy(stgdict->ffi_type_pointer.elements,
+		       basedict->ffi_type_pointer.elements,
 		       sizeof(ffi_type *) * (basedict->length));
 		ffi_ofs = basedict->length;
 	} else {
@@ -255,9 +259,10 @@
 		align = 0;
 		union_size = 0;
 		total_align = 1;
-		stgdict->ffi_type.type = FFI_TYPE_STRUCT;
-		stgdict->ffi_type.elements = PyMem_Malloc(sizeof(ffi_type *) * (len + 1));
-		memset(stgdict->ffi_type.elements, 0, sizeof(ffi_type *) * (len + 1));
+		stgdict->ffi_type_pointer.type = FFI_TYPE_STRUCT;
+		stgdict->ffi_type_pointer.elements = PyMem_Malloc(sizeof(ffi_type *) * (len + 1));
+		memset(stgdict->ffi_type_pointer.elements, 0,
+		       sizeof(ffi_type *) * (len + 1));
 		ffi_ofs = 0;
 	}
 
@@ -283,10 +288,10 @@
 				     i);
 			return -1;
 		}
-		stgdict->ffi_type.elements[ffi_ofs + i] = &dict->ffi_type;
+		stgdict->ffi_type_pointer.elements[ffi_ofs + i] = &dict->ffi_type_pointer;
 		dict->flags |= DICTFLAG_FINAL; /* mark field type final */
 		if (PyTuple_Size(pair) == 3) { /* bits specified */
-			switch(dict->ffi_type.type) {
+			switch(dict->ffi_type_pointer.type) {
 			case FFI_TYPE_UINT8:
 			case FFI_TYPE_UINT16:
 			case FFI_TYPE_UINT32:
@@ -357,8 +362,8 @@
 	/* Adjust the size according to the alignment requirements */
 	size = ((size + total_align - 1) / total_align) * total_align;
 
-	stgdict->ffi_type.alignment = total_align;
-	stgdict->ffi_type.size = size;
+	stgdict->ffi_type_pointer.alignment = total_align;
+	stgdict->ffi_type_pointer.size = size;
 
 	stgdict->size = size;
 	stgdict->align = total_align;
diff --git a/Modules/_hotshot.c b/Modules/_hotshot.c
index d5b4cde..2ee4eb9 100644
--- a/Modules/_hotshot.c
+++ b/Modules/_hotshot.c
@@ -26,7 +26,7 @@
 #ifndef HAVE_GETTIMEOFDAY
 #error "This module requires gettimeofday() on non-Windows platforms!"
 #endif
-#if (defined(PYOS_OS2) && defined(PYCC_GCC))
+#if (defined(PYOS_OS2) && defined(PYCC_GCC)) || defined(__QNX__)
 #include <sys/time.h>
 #else
 #include <sys/resource.h>
@@ -308,7 +308,7 @@
     if ((err = unpack_packed_int(self, &len, 0)))
         return err;
 
-    buf = malloc(len);
+    buf = (char *)malloc(len);
     for (i=0; i < len; i++) {
         ch = fgetc(self->logfp);
 	buf[i] = ch;
@@ -918,7 +918,7 @@
 #endif
     }
 #if defined(MS_WINDOWS) || defined(PYOS_OS2) || \
-    defined(__VMS)
+    defined(__VMS) || defined (__QNX__)
     rusage_diff = -1;
 #else
     {
@@ -1403,7 +1403,7 @@
         ++rev;
     while (rev[i] != ' ' && rev[i] != '\0')
         ++i;
-    buffer = malloc(i + 1);
+    buffer = (char *)malloc(i + 1);
     if (buffer != NULL) {
         memmove(buffer, rev, i);
         buffer[i] = '\0';
diff --git a/Modules/_lsprof.c b/Modules/_lsprof.c
index 17c71e9..d665f83 100644
--- a/Modules/_lsprof.c
+++ b/Modules/_lsprof.c
@@ -515,6 +515,7 @@
 	5
 };
 
+static int initialized;
 static PyTypeObject StatsEntryType;
 static PyTypeObject StatsSubEntryType;
 
@@ -857,8 +858,12 @@
 		return;
 	PyDict_SetItemString(d, "Profiler", (PyObject *)&PyProfiler_Type);
 
-	PyStructSequence_InitType(&StatsEntryType, &profiler_entry_desc);
-	PyStructSequence_InitType(&StatsSubEntryType, &profiler_subentry_desc);
+	if (!initialized) {
+		PyStructSequence_InitType(&StatsEntryType, 
+					  &profiler_entry_desc);
+		PyStructSequence_InitType(&StatsSubEntryType, 
+					  &profiler_subentry_desc);
+	}
 	Py_INCREF((PyObject*) &StatsEntryType);
 	Py_INCREF((PyObject*) &StatsSubEntryType);
 	PyModule_AddObject(module, "profiler_entry",
@@ -866,4 +871,5 @@
 	PyModule_AddObject(module, "profiler_subentry",
 			   (PyObject*) &StatsSubEntryType);
 	empty_tuple = PyTuple_New(0);
+	initialized = 1;
 }
diff --git a/Modules/_sqlite/adapters.c b/Modules/_sqlite/adapters.c
new file mode 100644
index 0000000..e6fde03
--- /dev/null
+++ b/Modules/_sqlite/adapters.c
@@ -0,0 +1,40 @@
+/* adapters.c - default adapters
+ *
+ * Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "util.h"
+#include "module.h"
+#include "adapters.h"
+
+/* dummy, will be implemented in a later version */
+
+PyObject* adapt_date(PyObject* self, PyObject* args, PyObject* kwargs)
+{
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+PyObject* adapt_datetime(PyObject* self, PyObject* args, PyObject* kwargs)
+{
+    Py_INCREF(Py_None);
+    return Py_None;
+}
diff --git a/Modules/_sqlite/adapters.h b/Modules/_sqlite/adapters.h
new file mode 100644
index 0000000..d2e8479
--- /dev/null
+++ b/Modules/_sqlite/adapters.h
@@ -0,0 +1,33 @@
+/* adapters.h - default adapters
+ *
+ * Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_ADAPTERS_H
+#define PYSQLITE_ADAPTERS_H
+#include "Python.h"
+#include "pythread.h"
+#include "sqlite3.h"
+
+PyObject* adapt_date(PyObject* self, PyObject* args, PyObject* kwargs);
+PyObject* adapt_datetime(PyObject* self, PyObject* args, PyObject* kwargs);
+
+#endif
diff --git a/Modules/_sqlite/cache.c b/Modules/_sqlite/cache.c
new file mode 100644
index 0000000..d102e97
--- /dev/null
+++ b/Modules/_sqlite/cache.c
@@ -0,0 +1,362 @@
+/* cache .c - a LRU cache
+ *
+ * Copyright (C) 2004-2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "cache.h"
+
+/* only used internally */
+Node* new_node(PyObject* key, PyObject* data)
+{
+    Node* node;
+
+    node = (Node*) (NodeType.tp_alloc(&NodeType, 0));
+    if (!node) {
+        return NULL;
+    }
+
+    Py_INCREF(key);
+    node->key = key;
+
+    Py_INCREF(data);
+    node->data = data;
+
+    node->prev = NULL;
+    node->next = NULL;
+
+    return node;
+}
+
+void node_dealloc(Node* self)
+{
+    Py_DECREF(self->key);
+    Py_DECREF(self->data);
+
+    self->ob_type->tp_free((PyObject*)self);
+}
+
+int cache_init(Cache* self, PyObject* args, PyObject* kwargs)
+{
+    PyObject* factory;
+    int size = 10;
+
+    self->factory = NULL;
+
+    if (!PyArg_ParseTuple(args, "O|i", &factory, &size))
+    {
+        return -1; 
+    }
+
+    if (size < 5) {
+        size = 5;
+    }
+    self->size = size;
+    self->first = NULL;
+    self->last = NULL;
+
+    self->mapping = PyDict_New();
+    if (!self->mapping) {
+        return -1;
+    }
+
+    Py_INCREF(factory);
+    self->factory = factory;
+
+    self->decref_factory = 1;
+
+    return 0;
+}
+
+void cache_dealloc(Cache* self)
+{
+    Node* node;
+    Node* delete_node;
+
+    if (!self->factory) {
+        /* constructor failed, just get out of here */
+        return;
+    }
+
+    node = self->first;
+    while (node) {
+        delete_node = node;
+        node = node->next;
+        Py_DECREF(delete_node);
+    }
+
+    if (self->decref_factory) {
+        Py_DECREF(self->factory);
+    }
+    Py_DECREF(self->mapping);
+
+    self->ob_type->tp_free((PyObject*)self);
+}
+
+PyObject* cache_get(Cache* self, PyObject* args)
+{
+    PyObject* key = args;
+    Node* node;
+    Node* ptr;
+    PyObject* data;
+
+    node = (Node*)PyDict_GetItem(self->mapping, key);
+    if (node) {
+        node->count++;
+        if (node->prev && node->count > node->prev->count) {
+            ptr = node->prev;
+
+            while (ptr->prev && node->count > ptr->prev->count) {
+                ptr = ptr->prev;
+            }
+
+            if (node->next) {
+                node->next->prev = node->prev;
+            } else {
+                self->last = node->prev;
+            }
+            if (node->prev) {
+                node->prev->next = node->next;
+            }
+            if (ptr->prev) {
+                ptr->prev->next = node;
+            } else {
+                self->first = node;
+            }
+
+            node->next = ptr;
+            node->prev = ptr->prev;
+            if (!node->prev) {
+                self->first = node;
+            }
+            ptr->prev = node;
+        }
+    } else {
+        if (PyDict_Size(self->mapping) == self->size) {
+            if (self->last) {
+                node = self->last;
+
+                if (PyDict_DelItem(self->mapping, self->last->key) != 0) {
+                    return NULL;
+                }
+
+                if (node->prev) {
+                    node->prev->next = NULL;
+                }
+                self->last = node->prev;
+                node->prev = NULL;
+
+                Py_DECREF(node);
+            }
+        }
+
+        data = PyObject_CallFunction(self->factory, "O", key);
+
+        if (!data) {
+            return NULL;
+        }
+
+        node = new_node(key, data);
+        if (!node) {
+            return NULL;
+        }
+        node->prev = self->last;
+
+        Py_DECREF(data);
+
+        if (PyDict_SetItem(self->mapping, key, (PyObject*)node) != 0) {
+            Py_DECREF(node);
+            return NULL;
+        }
+
+        if (self->last) {
+            self->last->next = node;
+        } else {
+            self->first = node;
+        }
+        self->last = node;
+    }
+
+    Py_INCREF(node->data);
+    return node->data;
+}
+
+PyObject* cache_display(Cache* self, PyObject* args)
+{
+    Node* ptr;
+    PyObject* prevkey;
+    PyObject* nextkey;
+    PyObject* fmt_args;
+    PyObject* template;
+    PyObject* display_str;
+
+    ptr = self->first;
+
+    while (ptr) {
+        if (ptr->prev) {
+            prevkey = ptr->prev->key;
+        } else {
+            prevkey = Py_None;
+        }
+        Py_INCREF(prevkey);
+
+        if (ptr->next) {
+            nextkey = ptr->next->key;
+        } else {
+            nextkey = Py_None;
+        }
+        Py_INCREF(nextkey);
+
+        fmt_args = Py_BuildValue("OOO", prevkey, ptr->key, nextkey);
+        if (!fmt_args) {
+            return NULL;
+        }
+        template = PyString_FromString("%s <- %s ->%s\n");
+        if (!template) {
+            return NULL;
+        }
+        display_str = PyString_Format(template, fmt_args);
+        Py_DECREF(template);
+        Py_DECREF(fmt_args);
+        if (!display_str) {
+            return NULL;
+        }
+        PyObject_Print(display_str, stdout, Py_PRINT_RAW);
+        Py_DECREF(display_str);
+
+        Py_DECREF(prevkey);
+        Py_DECREF(nextkey);
+
+        ptr = ptr->next;
+    }
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+static PyMethodDef cache_methods[] = {
+    {"get", (PyCFunction)cache_get, METH_O,
+        PyDoc_STR("Gets an entry from the cache.")},
+    {"display", (PyCFunction)cache_display, METH_NOARGS,
+        PyDoc_STR("For debugging only.")},
+    {NULL, NULL}
+};
+
+PyTypeObject NodeType = {
+        PyObject_HEAD_INIT(NULL)
+        0,                                              /* ob_size */
+        MODULE_NAME "Node",                             /* tp_name */
+        sizeof(Node),                                   /* tp_basicsize */
+        0,                                              /* tp_itemsize */
+        (destructor)node_dealloc,                       /* tp_dealloc */
+        0,                                              /* tp_print */
+        0,                                              /* tp_getattr */
+        0,                                              /* tp_setattr */
+        0,                                              /* tp_compare */
+        0,                                              /* tp_repr */
+        0,                                              /* tp_as_number */
+        0,                                              /* tp_as_sequence */
+        0,                                              /* tp_as_mapping */
+        0,                                              /* tp_hash */
+        0,                                              /* tp_call */
+        0,                                              /* tp_str */
+        0,                                              /* tp_getattro */
+        0,                                              /* tp_setattro */
+        0,                                              /* tp_as_buffer */
+        Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE,         /* tp_flags */
+        0,                                              /* tp_doc */
+        0,                                              /* tp_traverse */
+        0,                                              /* tp_clear */
+        0,                                              /* tp_richcompare */
+        0,                                              /* tp_weaklistoffset */
+        0,                                              /* tp_iter */
+        0,                                              /* tp_iternext */
+        0,                                              /* tp_methods */
+        0,                                              /* tp_members */
+        0,                                              /* tp_getset */
+        0,                                              /* tp_base */
+        0,                                              /* tp_dict */
+        0,                                              /* tp_descr_get */
+        0,                                              /* tp_descr_set */
+        0,                                              /* tp_dictoffset */
+        (initproc)0,                                    /* tp_init */
+        0,                                              /* tp_alloc */
+        0,                                              /* tp_new */
+        0                                               /* tp_free */
+};
+
+PyTypeObject CacheType = {
+        PyObject_HEAD_INIT(NULL)
+        0,                                              /* ob_size */
+        MODULE_NAME ".Cache",                           /* tp_name */
+        sizeof(Cache),                                  /* tp_basicsize */
+        0,                                              /* tp_itemsize */
+        (destructor)cache_dealloc,                      /* tp_dealloc */
+        0,                                              /* tp_print */
+        0,                                              /* tp_getattr */
+        0,                                              /* tp_setattr */
+        0,                                              /* tp_compare */
+        0,                                              /* tp_repr */
+        0,                                              /* tp_as_number */
+        0,                                              /* tp_as_sequence */
+        0,                                              /* tp_as_mapping */
+        0,                                              /* tp_hash */
+        0,                                              /* tp_call */
+        0,                                              /* tp_str */
+        0,                                              /* tp_getattro */
+        0,                                              /* tp_setattro */
+        0,                                              /* tp_as_buffer */
+        Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE,         /* tp_flags */
+        0,                                              /* tp_doc */
+        0,                                              /* tp_traverse */
+        0,                                              /* tp_clear */
+        0,                                              /* tp_richcompare */
+        0,                                              /* tp_weaklistoffset */
+        0,                                              /* tp_iter */
+        0,                                              /* tp_iternext */
+        cache_methods,                                  /* tp_methods */
+        0,                                              /* tp_members */
+        0,                                              /* tp_getset */
+        0,                                              /* tp_base */
+        0,                                              /* tp_dict */
+        0,                                              /* tp_descr_get */
+        0,                                              /* tp_descr_set */
+        0,                                              /* tp_dictoffset */
+        (initproc)cache_init,                           /* tp_init */
+        0,                                              /* tp_alloc */
+        0,                                              /* tp_new */
+        0                                               /* tp_free */
+};
+
+extern int cache_setup_types(void)
+{
+    int rc;
+
+    NodeType.tp_new = PyType_GenericNew;
+    CacheType.tp_new = PyType_GenericNew;
+
+    rc = PyType_Ready(&NodeType);
+    if (rc < 0) {
+        return rc;
+    }
+
+    rc = PyType_Ready(&CacheType);
+    return rc;
+}
diff --git a/Modules/_sqlite/cache.h b/Modules/_sqlite/cache.h
new file mode 100644
index 0000000..5cc16f3
--- /dev/null
+++ b/Modules/_sqlite/cache.h
@@ -0,0 +1,61 @@
+/* cache.h - definitions for the LRU cache
+ *
+ * Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_CACHE_H
+#define PYSQLITE_CACHE_H
+#include "Python.h"
+
+typedef struct _Node
+{
+    PyObject_HEAD
+    PyObject* key;
+    PyObject* data;
+    long count;
+    struct _Node* prev;
+    struct _Node* next;
+} Node;
+
+typedef struct
+{
+    PyObject_HEAD
+    int size;
+    PyObject* mapping;
+    PyObject* factory;
+    Node* first;
+    Node* last;
+    int decref_factory;
+} Cache;
+
+extern PyTypeObject NodeType;
+extern PyTypeObject CacheType;
+
+int node_init(Node* self, PyObject* args, PyObject* kwargs);
+void node_dealloc(Node* self);
+
+int cache_init(Cache* self, PyObject* args, PyObject* kwargs);
+void cache_dealloc(Cache* self);
+PyObject* cache_get(Cache* self, PyObject* args);
+
+int cache_setup_types(void);
+
+#endif
diff --git a/Modules/_sqlite/connection.c b/Modules/_sqlite/connection.c
new file mode 100644
index 0000000..78aad37
--- /dev/null
+++ b/Modules/_sqlite/connection.c
@@ -0,0 +1,1082 @@
+/* connection.c - the connection type
+ *
+ * Copyright (C) 2004-2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ * 
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "cache.h"
+#include "module.h"
+#include "connection.h"
+#include "statement.h"
+#include "cursor.h"
+#include "prepare_protocol.h"
+#include "util.h"
+#include "sqlitecompat.h"
+
+#include "pythread.h"
+
+static int connection_set_isolation_level(Connection* self, PyObject* isolation_level);
+
+int connection_init(Connection* self, PyObject* args, PyObject* kwargs)
+{
+    static char *kwlist[] = {"database", "timeout", "detect_types", "isolation_level", "check_same_thread", "factory", "cached_statements", NULL, NULL};
+
+    char* database;
+    int detect_types = 0;
+    PyObject* isolation_level = NULL;
+    PyObject* factory = NULL;
+    int check_same_thread = 1;
+    int cached_statements = 100;
+    double timeout = 5.0;
+    int rc;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s|diOiOi", kwlist,
+                                     &database, &timeout, &detect_types, &isolation_level, &check_same_thread, &factory, &cached_statements))
+    {
+        return -1; 
+    }
+
+    self->begin_statement = NULL;
+
+    self->statement_cache = NULL;
+
+    Py_INCREF(Py_None);
+    self->row_factory = Py_None;
+
+    Py_INCREF(&PyUnicode_Type);
+    self->text_factory = (PyObject*)&PyUnicode_Type;
+
+    Py_BEGIN_ALLOW_THREADS
+    rc = sqlite3_open(database, &self->db);
+    Py_END_ALLOW_THREADS
+
+    if (rc != SQLITE_OK) {
+        _seterror(self->db);
+        return -1;
+    }
+
+    if (!isolation_level) {
+        isolation_level = PyString_FromString("");
+    } else {
+        Py_INCREF(isolation_level);
+    }
+    self->isolation_level = NULL;
+    connection_set_isolation_level(self, isolation_level);
+    Py_DECREF(isolation_level);
+
+    self->statement_cache = (Cache*)PyObject_CallFunction((PyObject*)&CacheType, "Oi", self, cached_statements);
+    if (PyErr_Occurred()) {
+        return -1;
+    }
+
+    /* By default, the Cache class INCREFs the factory in its initializer, and
+     * decrefs it in its deallocator method. Since this would create a circular
+     * reference here, we're breaking it by decrementing self, and telling the
+     * cache class to not decref the factory (self) in its deallocator.
+     */
+    self->statement_cache->decref_factory = 0;
+    Py_DECREF(self);
+
+    self->inTransaction = 0;
+    self->detect_types = detect_types;
+    self->timeout = timeout;
+    (void)sqlite3_busy_timeout(self->db, (int)(timeout*1000));
+
+    self->thread_ident = PyThread_get_thread_ident();
+    self->check_same_thread = check_same_thread;
+
+    self->function_pinboard = PyDict_New();
+    if (!self->function_pinboard) {
+        return -1;
+    }
+
+    self->collations = PyDict_New();
+    if (!self->collations) {
+        return -1;
+    }
+
+    self->Warning = Warning;
+    self->Error = Error;
+    self->InterfaceError = InterfaceError;
+    self->DatabaseError = DatabaseError;
+    self->DataError = DataError;
+    self->OperationalError = OperationalError;
+    self->IntegrityError = IntegrityError;
+    self->InternalError = InternalError;
+    self->ProgrammingError = ProgrammingError;
+    self->NotSupportedError = NotSupportedError;
+
+    return 0;
+}
+
+void flush_statement_cache(Connection* self)
+{
+    Node* node;
+    Statement* statement;
+
+    node = self->statement_cache->first;
+
+    while (node) {
+        statement = (Statement*)(node->data);
+        (void)statement_finalize(statement);
+        node = node->next;
+    }
+
+    Py_DECREF(self->statement_cache);
+    self->statement_cache = (Cache*)PyObject_CallFunction((PyObject*)&CacheType, "O", self);
+    Py_DECREF(self);
+    self->statement_cache->decref_factory = 0;
+}
+
+void reset_all_statements(Connection* self)
+{
+    Node* node;
+    Statement* statement;
+
+    node = self->statement_cache->first;
+
+    while (node) {
+        statement = (Statement*)(node->data);
+        (void)statement_reset(statement);
+        node = node->next;
+    }
+}
+
+void connection_dealloc(Connection* self)
+{
+    Py_XDECREF(self->statement_cache);
+
+    /* Clean up if user has not called .close() explicitly. */
+    if (self->db) {
+        Py_BEGIN_ALLOW_THREADS
+        sqlite3_close(self->db);
+        Py_END_ALLOW_THREADS
+    }
+
+    if (self->begin_statement) {
+        PyMem_Free(self->begin_statement);
+    }
+    Py_XDECREF(self->isolation_level);
+    Py_XDECREF(self->function_pinboard);
+    Py_XDECREF(self->row_factory);
+    Py_XDECREF(self->text_factory);
+    Py_XDECREF(self->collations);
+
+    self->ob_type->tp_free((PyObject*)self);
+}
+
+PyObject* connection_cursor(Connection* self, PyObject* args, PyObject* kwargs)
+{
+    static char *kwlist[] = {"factory", NULL, NULL};
+    PyObject* factory = NULL;
+    PyObject* cursor;
+
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|O", kwlist,
+                                     &factory)) {
+        return NULL;
+    }
+
+    if (!check_thread(self) || !check_connection(self)) {
+        return NULL;
+    }
+
+    if (factory == NULL) {
+        factory = (PyObject*)&CursorType;
+    }
+
+    cursor = PyObject_CallFunction(factory, "O", self);
+
+    if (cursor && self->row_factory != Py_None) {
+        Py_XDECREF(((Cursor*)cursor)->row_factory);
+        Py_INCREF(self->row_factory);
+        ((Cursor*)cursor)->row_factory = self->row_factory;
+    }
+
+    return cursor;
+}
+
+PyObject* connection_close(Connection* self, PyObject* args)
+{
+    int rc;
+
+    if (!check_thread(self)) {
+        return NULL;
+    }
+
+    flush_statement_cache(self);
+
+    if (self->db) {
+        Py_BEGIN_ALLOW_THREADS
+        rc = sqlite3_close(self->db);
+        Py_END_ALLOW_THREADS
+
+        if (rc != SQLITE_OK) {
+            _seterror(self->db);
+            return NULL;
+        } else {
+            self->db = NULL;
+        }
+    }
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+/*
+ * Checks if a connection object is usable (i. e. not closed).
+ *
+ * 0 => error; 1 => ok
+ */
+int check_connection(Connection* con)
+{
+    if (!con->db) {
+        PyErr_SetString(ProgrammingError, "Cannot operate on a closed database.");
+        return 0;
+    } else {
+        return 1;
+    }
+}
+
+PyObject* _connection_begin(Connection* self)
+{
+    int rc;
+    const char* tail;
+    sqlite3_stmt* statement;
+
+    Py_BEGIN_ALLOW_THREADS
+    rc = sqlite3_prepare(self->db, self->begin_statement, -1, &statement, &tail);
+    Py_END_ALLOW_THREADS
+
+    if (rc != SQLITE_OK) {
+        _seterror(self->db);
+        goto error;
+    }
+
+    rc = _sqlite_step_with_busyhandler(statement, self);
+    if (rc == SQLITE_DONE) {
+        self->inTransaction = 1;
+    } else {
+        _seterror(self->db);
+    }
+
+    Py_BEGIN_ALLOW_THREADS
+    rc = sqlite3_finalize(statement);
+    Py_END_ALLOW_THREADS
+
+    if (rc != SQLITE_OK && !PyErr_Occurred()) {
+        _seterror(self->db);
+    }
+
+error:
+    if (PyErr_Occurred()) {
+        return NULL;
+    } else {
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+}
+
+PyObject* connection_commit(Connection* self, PyObject* args)
+{
+    int rc;
+    const char* tail;
+    sqlite3_stmt* statement;
+
+    if (!check_thread(self) || !check_connection(self)) {
+        return NULL;
+    }
+
+    if (self->inTransaction) {
+        Py_BEGIN_ALLOW_THREADS
+        rc = sqlite3_prepare(self->db, "COMMIT", -1, &statement, &tail);
+        Py_END_ALLOW_THREADS
+        if (rc != SQLITE_OK) {
+            _seterror(self->db);
+            goto error;
+        }
+
+        rc = _sqlite_step_with_busyhandler(statement, self);
+        if (rc == SQLITE_DONE) {
+            self->inTransaction = 0;
+        } else {
+            _seterror(self->db);
+        }
+
+        Py_BEGIN_ALLOW_THREADS
+        rc = sqlite3_finalize(statement);
+        Py_END_ALLOW_THREADS
+        if (rc != SQLITE_OK && !PyErr_Occurred()) {
+            _seterror(self->db);
+        }
+
+    }
+
+error:
+    if (PyErr_Occurred()) {
+        return NULL;
+    } else {
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+}
+
+PyObject* connection_rollback(Connection* self, PyObject* args)
+{
+    int rc;
+    const char* tail;
+    sqlite3_stmt* statement;
+
+    if (!check_thread(self) || !check_connection(self)) {
+        return NULL;
+    }
+
+    if (self->inTransaction) {
+        reset_all_statements(self);
+
+        Py_BEGIN_ALLOW_THREADS
+        rc = sqlite3_prepare(self->db, "ROLLBACK", -1, &statement, &tail);
+        Py_END_ALLOW_THREADS
+        if (rc != SQLITE_OK) {
+            _seterror(self->db);
+            goto error;
+        }
+
+        rc = _sqlite_step_with_busyhandler(statement, self);
+        if (rc == SQLITE_DONE) {
+            self->inTransaction = 0;
+        } else {
+            _seterror(self->db);
+        }
+
+        Py_BEGIN_ALLOW_THREADS
+        rc = sqlite3_finalize(statement);
+        Py_END_ALLOW_THREADS
+        if (rc != SQLITE_OK && !PyErr_Occurred()) {
+            _seterror(self->db);
+        }
+
+    }
+
+error:
+    if (PyErr_Occurred()) {
+        return NULL;
+    } else {
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+}
+
+void _set_result(sqlite3_context* context, PyObject* py_val)
+{
+    long longval;
+    const char* buffer;
+    Py_ssize_t buflen;
+    PyObject* stringval;
+
+    if (PyErr_Occurred()) {
+        /* Errors in callbacks are ignored, and we return NULL */
+        PyErr_Clear();
+        sqlite3_result_null(context);
+    } else if (py_val == Py_None) {
+        sqlite3_result_null(context);
+    } else if (PyInt_Check(py_val)) {
+        longval = PyInt_AsLong(py_val);
+        /* TODO: investigate what to do with range overflows - long vs. long long */
+        sqlite3_result_int64(context, (PY_LONG_LONG)longval);
+    } else if (PyFloat_Check(py_val)) {
+        sqlite3_result_double(context, PyFloat_AsDouble(py_val));
+    } else if (PyBuffer_Check(py_val)) {
+        if (PyObject_AsCharBuffer(py_val, &buffer, &buflen) != 0) {
+            PyErr_SetString(PyExc_ValueError, "could not convert BLOB to buffer");
+        }
+        sqlite3_result_blob(context, buffer, buflen, SQLITE_TRANSIENT);
+    } else if (PyString_Check(py_val)) {
+        sqlite3_result_text(context, PyString_AsString(py_val), -1, SQLITE_TRANSIENT);
+    } else if (PyUnicode_Check(py_val)) {
+        stringval = PyUnicode_AsUTF8String(py_val);
+        sqlite3_result_text(context, PyString_AsString(stringval), -1, SQLITE_TRANSIENT);
+        Py_DECREF(stringval);
+    } else {
+        /* TODO: raise error */
+    }
+}
+
+PyObject* _build_py_params(sqlite3_context *context, int argc, sqlite3_value** argv)
+{
+    PyObject* args;
+    int i;
+    sqlite3_value* cur_value;
+    PyObject* cur_py_value;
+    const char* val_str;
+    PY_LONG_LONG val_int;
+    Py_ssize_t buflen;
+    void* raw_buffer;
+
+    args = PyTuple_New(argc);
+    if (!args) {
+        return NULL;
+    }
+
+    for (i = 0; i < argc; i++) {
+        cur_value = argv[i];
+        switch (sqlite3_value_type(argv[i])) {
+            case SQLITE_INTEGER:
+                val_int = sqlite3_value_int64(cur_value);
+                cur_py_value = PyInt_FromLong((long)val_int);
+                break;
+            case SQLITE_FLOAT:
+                cur_py_value = PyFloat_FromDouble(sqlite3_value_double(cur_value));
+                break;
+            case SQLITE_TEXT:
+                val_str = (const char*)sqlite3_value_text(cur_value);
+                cur_py_value = PyUnicode_DecodeUTF8(val_str, strlen(val_str), NULL);
+                /* TODO: have a way to show errors here */
+                if (!cur_py_value) {
+                    Py_INCREF(Py_None);
+                    cur_py_value = Py_None;
+                }
+                break;
+            case SQLITE_BLOB:
+                buflen = sqlite3_value_bytes(cur_value);
+                cur_py_value = PyBuffer_New(buflen);
+                if (!cur_py_value) {
+                    /* TODO: error */
+                }
+                if (PyObject_AsWriteBuffer(cur_py_value, &raw_buffer, &buflen)) {
+                    /* TODO: error */
+                }
+                memcpy(raw_buffer, sqlite3_value_blob(cur_value), buflen);
+                break;
+            case SQLITE_NULL:
+            default:
+                Py_INCREF(Py_None);
+                cur_py_value = Py_None;
+        }
+        PyTuple_SetItem(args, i, cur_py_value);
+
+    }
+
+    return args;
+}
+
+void _func_callback(sqlite3_context* context, int argc, sqlite3_value** argv)
+{
+    PyObject* args;
+    PyObject* py_func;
+    PyObject* py_retval;
+
+
+    PyGILState_STATE threadstate;
+
+    threadstate = PyGILState_Ensure();
+
+    py_func = (PyObject*)sqlite3_user_data(context);
+
+    args = _build_py_params(context, argc, argv);
+
+    py_retval = PyObject_CallObject(py_func, args);
+    Py_DECREF(args);
+
+    _set_result(context, py_retval);
+    Py_XDECREF(py_retval);
+
+    PyGILState_Release(threadstate);
+}
+
+static void _step_callback(sqlite3_context *context, int argc, sqlite3_value** params)
+{
+    PyObject* args;
+    PyObject* function_result;
+    PyObject* aggregate_class;
+    PyObject** aggregate_instance;
+    PyObject* stepmethod;
+
+    PyGILState_STATE threadstate;
+
+    threadstate = PyGILState_Ensure();
+
+    aggregate_class = (PyObject*)sqlite3_user_data(context);
+
+    aggregate_instance = (PyObject**)sqlite3_aggregate_context(context, sizeof(PyObject*));
+
+    if (*aggregate_instance == 0) {
+        *aggregate_instance = PyObject_CallFunction(aggregate_class, "");
+
+        if (PyErr_Occurred())
+        {
+            PyErr_Clear();
+            *aggregate_instance = 0;
+            PyGILState_Release(threadstate);
+            return;
+        }
+    }
+
+    stepmethod = PyObject_GetAttrString(*aggregate_instance, "step");
+    if (!stepmethod)
+    {
+        PyGILState_Release(threadstate);
+        return;
+    }
+
+    args = _build_py_params(context, argc, params);
+
+    function_result = PyObject_CallObject(stepmethod, args);
+    Py_DECREF(args);
+    Py_DECREF(stepmethod);
+
+    if (function_result == NULL) {
+        PyErr_Clear();
+    } else {
+        Py_DECREF(function_result);
+    }
+
+    PyGILState_Release(threadstate);
+}
+
+void _final_callback(sqlite3_context* context)
+{
+    PyObject* args;
+    PyObject* function_result;
+    PyObject** aggregate_instance;
+    PyObject* aggregate_class;
+    PyObject* finalizemethod;
+
+    PyGILState_STATE threadstate;
+
+    threadstate = PyGILState_Ensure();
+
+    aggregate_class = (PyObject*)sqlite3_user_data(context);
+
+    aggregate_instance = (PyObject**)sqlite3_aggregate_context(context, sizeof(PyObject*));
+    if (!*aggregate_instance) {
+        /* this branch is executed if there was an exception in the aggregate's
+         * __init__ */
+
+        PyGILState_Release(threadstate);
+        return;
+    }
+
+    finalizemethod = PyObject_GetAttrString(*aggregate_instance, "finalize");
+
+    if (!finalizemethod) {
+        /*
+        PyErr_SetString(ProgrammingError, "finalize method missing");
+        goto error;
+        */
+        Py_INCREF(Py_None);
+        function_result = Py_None;
+    } else {
+        args = PyTuple_New(0);
+        if (!args)
+                return;
+        function_result = PyObject_CallObject(finalizemethod, args);
+        Py_DECREF(args);
+        Py_DECREF(finalizemethod);
+    }
+
+    _set_result(context, function_result);
+    Py_XDECREF(*aggregate_instance);
+    Py_XDECREF(function_result);
+
+    PyGILState_Release(threadstate);
+}
+
+
+PyObject* connection_create_function(Connection* self, PyObject* args, PyObject* kwargs)
+{
+    static char *kwlist[] = {"name", "narg", "func", NULL, NULL};
+
+    PyObject* func;
+    char* name;
+    int narg;
+    int rc;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "siO", kwlist,
+                                     &name, &narg, &func))
+    {
+        return NULL;
+    }
+
+    rc = sqlite3_create_function(self->db, name, narg, SQLITE_UTF8, (void*)func, _func_callback, NULL, NULL);
+
+    PyDict_SetItem(self->function_pinboard, func, Py_None);
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+PyObject* connection_create_aggregate(Connection* self, PyObject* args, PyObject* kwargs)
+{
+    PyObject* aggregate_class;
+
+    int n_arg;
+    char* name;
+    static char *kwlist[] = { "name", "n_arg", "aggregate_class", NULL };
+    int rc;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "siO:create_aggregate",
+                                      kwlist, &name, &n_arg, &aggregate_class)) {
+        return NULL;
+    }
+
+    rc = sqlite3_create_function(self->db, name, n_arg, SQLITE_UTF8, (void*)aggregate_class, 0, &_step_callback, &_final_callback);
+    if (rc != SQLITE_OK) {
+        _seterror(self->db);
+        return NULL;
+    } else {
+        PyDict_SetItem(self->function_pinboard, aggregate_class, Py_None);
+
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+}
+
+int check_thread(Connection* self)
+{
+    if (self->check_same_thread) {
+        if (PyThread_get_thread_ident() != self->thread_ident) {
+            PyErr_Format(ProgrammingError,
+                        "SQLite objects created in a thread can only be used in that same thread."
+                        "The object was created in thread id %ld and this is thread id %ld",
+                        self->thread_ident, PyThread_get_thread_ident());
+            return 0;
+        }
+
+    }
+
+    return 1;
+}
+
+static PyObject* connection_get_isolation_level(Connection* self, void* unused)
+{
+    Py_INCREF(self->isolation_level);
+    return self->isolation_level;
+}
+
+static PyObject* connection_get_total_changes(Connection* self, void* unused)
+{
+    if (!check_connection(self)) {
+        return NULL;
+    } else {
+        return Py_BuildValue("i", sqlite3_total_changes(self->db));
+    }
+}
+
+static int connection_set_isolation_level(Connection* self, PyObject* isolation_level)
+{
+    PyObject* empty;
+    PyObject* res;
+    PyObject* begin_statement;
+
+    Py_XDECREF(self->isolation_level);
+
+    if (self->begin_statement) {
+        PyMem_Free(self->begin_statement);
+        self->begin_statement = NULL;
+    }
+
+    if (isolation_level == Py_None) {
+        Py_INCREF(Py_None);
+        self->isolation_level = Py_None;
+
+        empty = PyTuple_New(0);
+        if (!empty) {
+            return -1;
+        }
+        res = connection_commit(self, empty);
+        if (!res) {
+            return -1;
+        }
+        Py_DECREF(empty);
+        Py_DECREF(res);
+
+        self->inTransaction = 0;
+    } else {
+        Py_INCREF(isolation_level);
+        self->isolation_level = isolation_level;
+
+        begin_statement = PyString_FromString("BEGIN ");
+        if (!begin_statement) {
+            return -1;
+        }
+        PyString_Concat(&begin_statement, isolation_level);
+        if (!begin_statement) {
+            return -1;
+        }
+
+        self->begin_statement = PyMem_Malloc(PyString_Size(begin_statement) + 2);
+        if (!self->begin_statement) {
+            return -1;
+        }
+
+        strcpy(self->begin_statement, PyString_AsString(begin_statement));
+        Py_DECREF(begin_statement);
+    }
+
+    return 0;
+}
+
+PyObject* connection_call(Connection* self, PyObject* args, PyObject* kwargs)
+{
+    PyObject* sql;
+    Statement* statement;
+    int rc;
+
+    if (!PyArg_ParseTuple(args, "O", &sql)) {
+        return NULL;
+    }
+
+    statement = PyObject_New(Statement, &StatementType);
+    if (!statement) {
+        return NULL;
+    }
+
+    rc = statement_create(statement, self, sql);
+
+    if (rc != SQLITE_OK) {
+        if (rc == PYSQLITE_TOO_MUCH_SQL) {
+            PyErr_SetString(Warning, "You can only execute one statement at a time.");
+        } else if (rc == PYSQLITE_SQL_WRONG_TYPE) {
+            PyErr_SetString(Warning, "SQL is of wrong type. Must be string or unicode.");
+        } else {
+            _seterror(self->db);
+        }
+
+        Py_DECREF(statement);
+        statement = 0;
+    }
+
+    return (PyObject*)statement;
+}
+
+PyObject* connection_execute(Connection* self, PyObject* args, PyObject* kwargs)
+{
+    PyObject* cursor = 0;
+    PyObject* result = 0;
+    PyObject* method = 0;
+
+    cursor = PyObject_CallMethod((PyObject*)self, "cursor", "");
+    if (!cursor) {
+        goto error;
+    }
+
+    method = PyObject_GetAttrString(cursor, "execute");
+    if (!method) {
+        Py_DECREF(cursor);
+        cursor = 0;
+        goto error;
+    }
+
+    result = PyObject_CallObject(method, args);
+    if (!result) {
+        Py_DECREF(cursor);
+        cursor = 0;
+    }
+
+error:
+    Py_XDECREF(result);
+    Py_XDECREF(method);
+
+    return cursor;
+}
+
+PyObject* connection_executemany(Connection* self, PyObject* args, PyObject* kwargs)
+{
+    PyObject* cursor = 0;
+    PyObject* result = 0;
+    PyObject* method = 0;
+
+    cursor = PyObject_CallMethod((PyObject*)self, "cursor", "");
+    if (!cursor) {
+        goto error;
+    }
+
+    method = PyObject_GetAttrString(cursor, "executemany");
+    if (!method) {
+        Py_DECREF(cursor);
+        cursor = 0;
+        goto error;
+    }
+
+    result = PyObject_CallObject(method, args);
+    if (!result) {
+        Py_DECREF(cursor);
+        cursor = 0;
+    }
+
+error:
+    Py_XDECREF(result);
+    Py_XDECREF(method);
+
+    return cursor;
+}
+
+PyObject* connection_executescript(Connection* self, PyObject* args, PyObject* kwargs)
+{
+    PyObject* cursor = 0;
+    PyObject* result = 0;
+    PyObject* method = 0;
+
+    cursor = PyObject_CallMethod((PyObject*)self, "cursor", "");
+    if (!cursor) {
+        goto error;
+    }
+
+    method = PyObject_GetAttrString(cursor, "executescript");
+    if (!method) {
+        Py_DECREF(cursor);
+        cursor = 0;
+        goto error;
+    }
+
+    result = PyObject_CallObject(method, args);
+    if (!result) {
+        Py_DECREF(cursor);
+        cursor = 0;
+    }
+
+error:
+    Py_XDECREF(result);
+    Py_XDECREF(method);
+
+    return cursor;
+}
+
+/* ------------------------- COLLATION CODE ------------------------ */
+
+static int
+collation_callback(
+        void* context,
+        int text1_length, const void* text1_data,
+        int text2_length, const void* text2_data)
+{
+    PyObject* callback = (PyObject*)context;
+    PyObject* string1 = 0;
+    PyObject* string2 = 0;
+    PyGILState_STATE gilstate;
+
+    PyObject* retval = NULL;
+    int result = 0;
+
+    gilstate = PyGILState_Ensure();
+
+    if (PyErr_Occurred()) {
+        goto finally;
+    }
+
+    string1 = PyString_FromStringAndSize((const char*)text1_data, text1_length);
+    string2 = PyString_FromStringAndSize((const char*)text2_data, text2_length);
+
+    if (!string1 || !string2) {
+        goto finally; /* failed to allocate strings */
+    }
+
+    retval = PyObject_CallFunctionObjArgs(callback, string1, string2, NULL);
+
+    if (!retval) {
+        /* execution failed */
+        goto finally;
+    }
+
+    result = PyInt_AsLong(retval);
+    if (PyErr_Occurred()) {
+        result = 0;
+    }
+
+finally:
+    Py_XDECREF(string1);
+    Py_XDECREF(string2);
+    Py_XDECREF(retval);
+
+    PyGILState_Release(gilstate);
+
+    return result;
+}
+
+static PyObject *
+connection_create_collation(Connection* self, PyObject* args)
+{
+    PyObject* callable;
+    PyObject* uppercase_name = 0;
+    PyObject* name;
+    PyObject* retval;
+    char* chk;
+    int rc;
+
+    if (!check_thread(self) || !check_connection(self)) {
+        goto finally;
+    }
+
+    if (!PyArg_ParseTuple(args, "O!O:create_collation(name, callback)", &PyString_Type, &name, &callable)) {
+        goto finally;
+    }
+
+    uppercase_name = PyObject_CallMethod(name, "upper", "");
+    if (!uppercase_name) {
+        goto finally;
+    }
+
+    chk = PyString_AsString(uppercase_name);
+    while (*chk) {
+        if ((*chk >= '0' && *chk <= '9')
+         || (*chk >= 'A' && *chk <= 'Z')
+         || (*chk == '_'))
+        {
+            chk++;
+        } else {
+            PyErr_SetString(ProgrammingError, "invalid character in collation name");
+            goto finally;
+        }
+    }
+
+    if (callable != Py_None && !PyCallable_Check(callable)) {
+        PyErr_SetString(PyExc_TypeError, "parameter must be callable");
+        goto finally;
+    }
+
+    if (callable != Py_None) {
+        PyDict_SetItem(self->collations, uppercase_name, callable);
+    } else {
+        PyDict_DelItem(self->collations, uppercase_name);
+    }
+
+    rc = sqlite3_create_collation(self->db,
+                                  PyString_AsString(uppercase_name),
+                                  SQLITE_UTF8,
+                                  (callable != Py_None) ? callable : NULL,
+                                  (callable != Py_None) ? collation_callback : NULL);
+    if (rc != SQLITE_OK) {
+        PyDict_DelItem(self->collations, uppercase_name);
+        _seterror(self->db);
+        goto finally;
+    }
+
+finally:
+    Py_XDECREF(uppercase_name);
+
+    if (PyErr_Occurred()) {
+        retval = NULL;
+    } else {
+        Py_INCREF(Py_None);
+        retval = Py_None;
+    }
+
+    return retval;
+}
+
+static char connection_doc[] =
+PyDoc_STR("<missing docstring>");
+
+static PyGetSetDef connection_getset[] = {
+    {"isolation_level",  (getter)connection_get_isolation_level, (setter)connection_set_isolation_level},
+    {"total_changes",  (getter)connection_get_total_changes, (setter)0},
+    {NULL}
+};
+
+static PyMethodDef connection_methods[] = {
+    {"cursor", (PyCFunction)connection_cursor, METH_VARARGS|METH_KEYWORDS,
+        PyDoc_STR("Return a cursor for the connection.")},
+    {"close", (PyCFunction)connection_close, METH_NOARGS,
+        PyDoc_STR("Closes the connection.")},
+    {"commit", (PyCFunction)connection_commit, METH_NOARGS,
+        PyDoc_STR("Commit the current transaction.")},
+    {"rollback", (PyCFunction)connection_rollback, METH_NOARGS,
+        PyDoc_STR("Roll back the current transaction.")},
+    {"create_function", (PyCFunction)connection_create_function, METH_VARARGS|METH_KEYWORDS,
+        PyDoc_STR("Creates a new function. Non-standard.")},
+    {"create_aggregate", (PyCFunction)connection_create_aggregate, METH_VARARGS|METH_KEYWORDS,
+        PyDoc_STR("Creates a new aggregate. Non-standard.")},
+    {"execute", (PyCFunction)connection_execute, METH_VARARGS,
+        PyDoc_STR("Executes a SQL statement. Non-standard.")},
+    {"executemany", (PyCFunction)connection_executemany, METH_VARARGS,
+        PyDoc_STR("Repeatedly executes a SQL statement. Non-standard.")},
+    {"executescript", (PyCFunction)connection_executescript, METH_VARARGS,
+        PyDoc_STR("Executes a multiple SQL statements at once. Non-standard.")},
+    {"create_collation", (PyCFunction)connection_create_collation, METH_VARARGS,
+        PyDoc_STR("Creates a collation function.")},
+    {NULL, NULL}
+};
+
+static struct PyMemberDef connection_members[] =
+{
+    {"Warning", T_OBJECT, offsetof(Connection, Warning), RO},
+    {"Error", T_OBJECT, offsetof(Connection, Error), RO},
+    {"InterfaceError", T_OBJECT, offsetof(Connection, InterfaceError), RO},
+    {"DatabaseError", T_OBJECT, offsetof(Connection, DatabaseError), RO},
+    {"DataError", T_OBJECT, offsetof(Connection, DataError), RO},
+    {"OperationalError", T_OBJECT, offsetof(Connection, OperationalError), RO},
+    {"IntegrityError", T_OBJECT, offsetof(Connection, IntegrityError), RO},
+    {"InternalError", T_OBJECT, offsetof(Connection, InternalError), RO},
+    {"ProgrammingError", T_OBJECT, offsetof(Connection, ProgrammingError), RO},
+    {"NotSupportedError", T_OBJECT, offsetof(Connection, NotSupportedError), RO},
+    {"row_factory", T_OBJECT, offsetof(Connection, row_factory)},
+    {"text_factory", T_OBJECT, offsetof(Connection, text_factory)},
+    {NULL}
+};
+
+PyTypeObject ConnectionType = {
+        PyObject_HEAD_INIT(NULL)
+        0,                                              /* ob_size */
+        MODULE_NAME ".Connection",                      /* tp_name */
+        sizeof(Connection),                             /* tp_basicsize */
+        0,                                              /* tp_itemsize */
+        (destructor)connection_dealloc,                 /* tp_dealloc */
+        0,                                              /* tp_print */
+        0,                                              /* tp_getattr */
+        0,                                              /* tp_setattr */
+        0,                                              /* tp_compare */
+        0,                                              /* tp_repr */
+        0,                                              /* tp_as_number */
+        0,                                              /* tp_as_sequence */
+        0,                                              /* tp_as_mapping */
+        0,                                              /* tp_hash */
+        (ternaryfunc)connection_call,                   /* tp_call */
+        0,                                              /* tp_str */
+        0,                                              /* tp_getattro */
+        0,                                              /* tp_setattro */
+        0,                                              /* tp_as_buffer */
+        Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE,         /* tp_flags */
+        connection_doc,                                 /* tp_doc */
+        0,                                              /* tp_traverse */
+        0,                                              /* tp_clear */
+        0,                                              /* tp_richcompare */
+        0,                                              /* tp_weaklistoffset */
+        0,                                              /* tp_iter */
+        0,                                              /* tp_iternext */
+        connection_methods,                             /* tp_methods */
+        connection_members,                             /* tp_members */
+        connection_getset,                              /* tp_getset */
+        0,                                              /* tp_base */
+        0,                                              /* tp_dict */
+        0,                                              /* tp_descr_get */
+        0,                                              /* tp_descr_set */
+        0,                                              /* tp_dictoffset */
+        (initproc)connection_init,                      /* tp_init */
+        0,                                              /* tp_alloc */
+        0,                                              /* tp_new */
+        0                                               /* tp_free */
+};
+
+extern int connection_setup_types(void)
+{
+    ConnectionType.tp_new = PyType_GenericNew;
+    return PyType_Ready(&ConnectionType);
+}
diff --git a/Modules/_sqlite/connection.h b/Modules/_sqlite/connection.h
new file mode 100644
index 0000000..faae6e4
--- /dev/null
+++ b/Modules/_sqlite/connection.h
@@ -0,0 +1,106 @@
+/* connection.h - definitions for the connection type
+ *
+ * Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_CONNECTION_H
+#define PYSQLITE_CONNECTION_H
+#include "Python.h"
+#include "pythread.h"
+#include "structmember.h"
+
+#include "cache.h"
+#include "module.h"
+
+#include "sqlite3.h"
+
+typedef struct
+{
+    PyObject_HEAD
+    sqlite3* db;
+
+    int inTransaction;
+    int detect_types;
+
+    /* the timeout value in seconds for database locks */
+    double timeout;
+
+    /* for internal use in the timeout handler: when did the timeout handler
+     * first get called with count=0? */
+    double timeout_started;
+
+    /* None for autocommit, otherwise a PyString with the isolation level */
+    PyObject* isolation_level;
+
+    /* NULL for autocommit, otherwise a string with the BEGIN statment; will be
+     * freed in connection destructor */
+    char* begin_statement;
+
+    int check_same_thread;
+    long thread_ident;
+
+    Cache* statement_cache;
+
+    PyObject* row_factory;
+
+    PyObject* text_factory;
+
+    /* remember references to functions/classes used in
+     * create_function/create/aggregate, use these as dictionary keys, so we
+     * can keep the total system refcount constant by clearing that dictionary
+     * in connection_dealloc */
+    PyObject* function_pinboard;
+
+    /* a dictionary of registered collation name => collation callable mappings */
+    PyObject* collations;
+
+    /* Exception objects */
+    PyObject* Warning;
+    PyObject* Error;
+    PyObject* InterfaceError;
+    PyObject* DatabaseError;
+    PyObject* DataError;
+    PyObject* OperationalError;
+    PyObject* IntegrityError;
+    PyObject* InternalError;
+    PyObject* ProgrammingError;
+    PyObject* NotSupportedError;
+} Connection;
+
+extern PyTypeObject ConnectionType;
+
+PyObject* connection_alloc(PyTypeObject* type, int aware);
+void connection_dealloc(Connection* self);
+PyObject* connection_cursor(Connection* self, PyObject* args, PyObject* kwargs);
+PyObject* connection_close(Connection* self, PyObject* args);
+PyObject* _connection_begin(Connection* self);
+PyObject* connection_begin(Connection* self, PyObject* args);
+PyObject* connection_commit(Connection* self, PyObject* args);
+PyObject* connection_rollback(Connection* self, PyObject* args);
+PyObject* connection_new(PyTypeObject* type, PyObject* args, PyObject* kw);
+int connection_init(Connection* self, PyObject* args, PyObject* kwargs);
+
+int check_thread(Connection* self);
+int check_connection(Connection* con);
+
+int connection_setup_types(void);
+
+#endif
diff --git a/Modules/_sqlite/converters.c b/Modules/_sqlite/converters.c
new file mode 100644
index 0000000..018063a
--- /dev/null
+++ b/Modules/_sqlite/converters.c
@@ -0,0 +1,40 @@
+/* converters.c - default converters
+ *
+ * Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "util.h"
+#include "module.h"
+#include "adapters.h"
+
+/* dummy, will be implemented in a later version */
+
+PyObject* convert_date(PyObject* self, PyObject* args, PyObject* kwargs)
+{
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+PyObject* convert_timestamp(PyObject* self, PyObject* args, PyObject* kwargs)
+{
+    Py_INCREF(Py_None);
+    return Py_None;
+}
diff --git a/Modules/_sqlite/converters.h b/Modules/_sqlite/converters.h
new file mode 100644
index 0000000..df3768a
--- /dev/null
+++ b/Modules/_sqlite/converters.h
@@ -0,0 +1,33 @@
+/* converters.h - default converters
+ *
+ * Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_CONVERTERS_H
+#define PYSQLITE_CONVERTERS_H
+#include "Python.h"
+#include "pythread.h"
+#include "sqlite3.h"
+
+PyObject* convert_date(PyObject* self, PyObject* args, PyObject* kwargs);
+PyObject* convert_timestamp(PyObject* self, PyObject* args, PyObject* kwargs);
+
+#endif
diff --git a/Modules/_sqlite/cursor.c b/Modules/_sqlite/cursor.c
new file mode 100644
index 0000000..c6b8c77
--- /dev/null
+++ b/Modules/_sqlite/cursor.c
@@ -0,0 +1,1027 @@
+/* cursor.c - the cursor type
+ *
+ * Copyright (C) 2004-2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "cursor.h"
+#include "module.h"
+#include "util.h"
+#include "sqlitecompat.h"
+
+/* used to decide wether to call PyInt_FromLong or PyLong_FromLongLong */
+#define INT32_MIN (-2147483647 - 1)
+#define INT32_MAX 2147483647
+
+PyObject* cursor_iternext(Cursor *self);
+
+static StatementKind detect_statement_type(char* statement)
+{
+    char buf[20];
+    char* src;
+    char* dst;
+
+    src = statement;
+    /* skip over whitepace */
+    while (*src == '\r' || *src == '\n' || *src == ' ' || *src == '\t') {
+        src++;
+    }
+
+    if (*src == 0)
+        return STATEMENT_INVALID;
+
+    dst = buf;
+    *dst = 0;
+    while (isalpha(*src) && dst - buf < sizeof(buf) - 2) {
+        *dst++ = tolower(*src++);
+    }
+
+    *dst = 0;
+
+    if (!strcmp(buf, "select")) {
+        return STATEMENT_SELECT;
+    } else if (!strcmp(buf, "insert")) {
+        return STATEMENT_INSERT;
+    } else if (!strcmp(buf, "update")) {
+        return STATEMENT_UPDATE;
+    } else if (!strcmp(buf, "delete")) {
+        return STATEMENT_DELETE;
+    } else if (!strcmp(buf, "replace")) {
+        return STATEMENT_REPLACE;
+    } else {
+        return STATEMENT_OTHER;
+    }
+}
+
+int cursor_init(Cursor* self, PyObject* args, PyObject* kwargs)
+{
+    Connection* connection;
+
+    if (!PyArg_ParseTuple(args, "O!", &ConnectionType, &connection))
+    {
+        return -1; 
+    }
+
+    Py_INCREF(connection);
+    self->connection = connection;
+    self->statement = NULL;
+    self->next_row = NULL;
+
+    self->row_cast_map = PyList_New(0);
+    if (!self->row_cast_map) {
+        return -1;
+    }
+
+    Py_INCREF(Py_None);
+    self->description = Py_None;
+
+    Py_INCREF(Py_None);
+    self->lastrowid= Py_None;
+
+    self->arraysize = 1;
+
+    self->rowcount = PyInt_FromLong(-1L);
+    if (!self->rowcount) {
+        return -1;
+    }
+
+    Py_INCREF(Py_None);
+    self->row_factory = Py_None;
+
+    if (!check_thread(self->connection)) {
+        return -1;
+    }
+
+    return 0;
+}
+
+void cursor_dealloc(Cursor* self)
+{
+    int rc;
+
+    /* Reset the statement if the user has not closed the cursor */
+    if (self->statement) {
+        rc = statement_reset(self->statement);
+        Py_DECREF(self->statement);
+    }
+
+    Py_XDECREF(self->connection);
+    Py_XDECREF(self->row_cast_map);
+    Py_XDECREF(self->description);
+    Py_XDECREF(self->lastrowid);
+    Py_XDECREF(self->rowcount);
+    Py_XDECREF(self->row_factory);
+    Py_XDECREF(self->next_row);
+
+    self->ob_type->tp_free((PyObject*)self);
+}
+
+int build_row_cast_map(Cursor* self)
+{
+    int i;
+    const char* type_start = (const char*)-1;
+    const char* pos;
+
+    const char* colname;
+    const char* decltype;
+    PyObject* py_decltype;
+    PyObject* converter;
+    PyObject* key;
+
+    if (!self->connection->detect_types) {
+        return 0;
+    }
+
+    Py_XDECREF(self->row_cast_map);
+    self->row_cast_map = PyList_New(0);
+
+    for (i = 0; i < sqlite3_column_count(self->statement->st); i++) {
+        converter = NULL;
+
+        if (self->connection->detect_types | PARSE_COLNAMES) {
+            colname = sqlite3_column_name(self->statement->st, i);
+
+            for (pos = colname; *pos != 0; pos++) {
+                if (*pos == '[') {
+                    type_start = pos + 1;
+                } else if (*pos == ']' && type_start != (const char*)-1) {
+                    key = PyString_FromStringAndSize(type_start, pos - type_start);
+                    if (!key) {
+                        /* creating a string failed, but it is too complicated
+                         * to propagate the error here, we just assume there is
+                         * no converter and proceed */
+                        break;
+                    }
+
+                    converter = PyDict_GetItem(converters, key);
+                    Py_DECREF(key);
+                    break;
+                }
+
+            }
+        }
+
+        if (!converter && self->connection->detect_types | PARSE_DECLTYPES) {
+            decltype = sqlite3_column_decltype(self->statement->st, i);
+            if (decltype) {
+                for (pos = decltype;;pos++) {
+                    if (*pos == ' ' || *pos == 0) {
+                        py_decltype = PyString_FromStringAndSize(decltype, pos - decltype);
+                        if (!py_decltype) {
+                            return -1;
+                        }
+                        break;
+                    }
+                }
+
+                converter = PyDict_GetItem(converters, py_decltype);
+                Py_DECREF(py_decltype);
+            }
+        }
+
+        if (!converter) {
+            converter = Py_None;
+        }
+
+        if (PyList_Append(self->row_cast_map, converter) != 0) {
+            if (converter != Py_None) {
+                Py_DECREF(converter);
+            }
+            Py_XDECREF(self->row_cast_map);
+            self->row_cast_map = NULL;
+
+            return -1;
+        }
+    }
+
+    return 0;
+}
+
+PyObject* _build_column_name(const char* colname)
+{
+    const char* pos;
+
+    if (!colname) {
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+
+    for (pos = colname;; pos++) {
+        if (*pos == 0 || *pos == ' ') {
+            return PyString_FromStringAndSize(colname, pos - colname);
+        }
+    }
+}
+
+PyObject* unicode_from_string(const char* val_str, int optimize)
+{
+    const char* check;
+    int is_ascii = 0;
+
+    if (optimize) {
+        is_ascii = 1;
+
+        check = val_str;
+        while (*check) {
+            if (*check & 0x80) {
+                is_ascii = 0;
+                break;
+            }
+
+            check++;
+        }
+    }
+
+    if (is_ascii) {
+        return PyString_FromString(val_str);
+    } else {
+        return PyUnicode_DecodeUTF8(val_str, strlen(val_str), NULL);
+    }
+}
+
+/*
+ * Returns a row from the currently active SQLite statement
+ *
+ * Precondidition:
+ * - sqlite3_step() has been called before and it returned SQLITE_ROW.
+ */
+PyObject* _fetch_one_row(Cursor* self)
+{
+    int i, numcols;
+    PyObject* row;
+    PyObject* item = NULL;
+    int coltype;
+    PY_LONG_LONG intval;
+    PyObject* converter;
+    PyObject* converted;
+    Py_ssize_t nbytes;
+    PyObject* buffer;
+    void* raw_buffer;
+    const char* val_str;
+    char buf[200];
+
+    Py_BEGIN_ALLOW_THREADS
+    numcols = sqlite3_data_count(self->statement->st);
+    Py_END_ALLOW_THREADS
+
+    row = PyTuple_New(numcols);
+    if (!row) {
+        return NULL;
+    }
+
+    for (i = 0; i < numcols; i++) {
+        if (self->connection->detect_types) {
+            converter = PyList_GetItem(self->row_cast_map, i);
+            if (!converter) {
+                converter = Py_None;
+            }
+        } else {
+            converter = Py_None;
+        }
+
+        if (converter != Py_None) {
+            val_str = (const char*)sqlite3_column_text(self->statement->st, i);
+            if (!val_str) {
+                Py_INCREF(Py_None);
+                converted = Py_None;
+            } else {
+                item = PyString_FromString(val_str);
+                if (!item) {
+                    return NULL;
+                }
+                converted = PyObject_CallFunction(converter, "O", item);
+                if (!converted) {
+                    /* TODO: have a way to log these errors */
+                    Py_INCREF(Py_None);
+                    converted = Py_None;
+                    PyErr_Clear();
+                }
+                Py_DECREF(item);
+            }
+        } else {
+            Py_BEGIN_ALLOW_THREADS
+            coltype = sqlite3_column_type(self->statement->st, i);
+            Py_END_ALLOW_THREADS
+            if (coltype == SQLITE_NULL) {
+                Py_INCREF(Py_None);
+                converted = Py_None;
+            } else if (coltype == SQLITE_INTEGER) {
+                intval = sqlite3_column_int64(self->statement->st, i);
+                if (intval < INT32_MIN || intval > INT32_MAX) {
+                    converted = PyLong_FromLongLong(intval);
+                } else {
+                    converted = PyInt_FromLong((long)intval);
+                }
+            } else if (coltype == SQLITE_FLOAT) {
+                converted = PyFloat_FromDouble(sqlite3_column_double(self->statement->st, i));
+            } else if (coltype == SQLITE_TEXT) {
+                val_str = (const char*)sqlite3_column_text(self->statement->st, i);
+                if ((self->connection->text_factory == (PyObject*)&PyUnicode_Type)
+                    || (self->connection->text_factory == OptimizedUnicode)) {
+
+                    converted = unicode_from_string(val_str,
+                        self->connection->text_factory == OptimizedUnicode ? 1 : 0);
+
+                    if (!converted) {
+                        PyOS_snprintf(buf, sizeof(buf) - 1, "Could not decode to UTF-8 column %s with text %s",
+                                    sqlite3_column_name(self->statement->st, i), val_str);
+                        PyErr_SetString(OperationalError, buf);
+                    }
+                } else if (self->connection->text_factory == (PyObject*)&PyString_Type) {
+                    converted = PyString_FromString(val_str);
+                } else {
+                    converted = PyObject_CallFunction(self->connection->text_factory, "s", val_str);
+                }
+            } else {
+                /* coltype == SQLITE_BLOB */
+                nbytes = sqlite3_column_bytes(self->statement->st, i);
+                buffer = PyBuffer_New(nbytes);
+                if (!buffer) {
+                    break;
+                }
+                if (PyObject_AsWriteBuffer(buffer, &raw_buffer, &nbytes)) {
+                    break;
+                }
+                memcpy(raw_buffer, sqlite3_column_blob(self->statement->st, i), nbytes);
+                converted = buffer;
+            }
+        }
+
+        PyTuple_SetItem(row, i, converted);
+    }
+
+    if (PyErr_Occurred()) {
+        Py_DECREF(row);
+        row = NULL;
+    }
+
+    return row;
+}
+
+PyObject* _query_execute(Cursor* self, int multiple, PyObject* args)
+{
+    PyObject* operation;
+    PyObject* operation_bytestr = NULL;
+    char* operation_cstr;
+    PyObject* parameters_list = NULL;
+    PyObject* parameters_iter = NULL;
+    PyObject* parameters = NULL;
+    int i;
+    int rc;
+    PyObject* func_args;
+    PyObject* result;
+    int numcols;
+    PY_LONG_LONG lastrowid;
+    int statement_type;
+    PyObject* descriptor;
+    PyObject* second_argument = NULL;
+    long rowcount = 0;
+
+    if (!check_thread(self->connection) || !check_connection(self->connection)) {
+        return NULL;
+    }
+
+    Py_XDECREF(self->next_row);
+    self->next_row = NULL;
+
+    if (multiple) {
+        /* executemany() */
+        if (!PyArg_ParseTuple(args, "OO", &operation, &second_argument)) {
+            return NULL; 
+        }
+
+        if (!PyString_Check(operation) && !PyUnicode_Check(operation)) {
+            PyErr_SetString(PyExc_ValueError, "operation parameter must be str or unicode");
+            return NULL;
+        }
+
+        if (PyIter_Check(second_argument)) {
+            /* iterator */
+            Py_INCREF(second_argument);
+            parameters_iter = second_argument;
+        } else {
+            /* sequence */
+            parameters_iter = PyObject_GetIter(second_argument);
+            if (!parameters_iter)
+            {
+                return NULL;
+            }
+        }
+    } else {
+        /* execute() */
+        if (!PyArg_ParseTuple(args, "O|O", &operation, &second_argument)) {
+            return NULL; 
+        }
+
+        if (!PyString_Check(operation) && !PyUnicode_Check(operation)) {
+            PyErr_SetString(PyExc_ValueError, "operation parameter must be str or unicode");
+            return NULL;
+        }
+
+        parameters_list = PyList_New(0);
+        if (!parameters_list) {
+            return NULL;
+        }
+
+        if (second_argument == NULL) {
+            second_argument = PyTuple_New(0);
+            if (!second_argument) {
+                goto error;
+            }
+        } else {
+            Py_INCREF(second_argument);
+        }
+        if (PyList_Append(parameters_list, second_argument) != 0) {
+            Py_DECREF(second_argument);
+            goto error;
+        }
+        Py_DECREF(second_argument);
+
+        parameters_iter = PyObject_GetIter(parameters_list);
+        if (!parameters_iter) {
+            goto error;
+        }
+    }
+
+    if (self->statement != NULL) {
+        /* There is an active statement */
+        rc = statement_reset(self->statement);
+    }
+
+    if (PyString_Check(operation)) {
+        operation_cstr = PyString_AsString(operation);
+    } else {
+        operation_bytestr = PyUnicode_AsUTF8String(operation);
+        if (!operation_bytestr) {
+            goto error;
+        }
+
+        operation_cstr = PyString_AsString(operation_bytestr);
+    }
+
+    /* reset description and rowcount */
+    Py_DECREF(self->description);
+    Py_INCREF(Py_None);
+    self->description = Py_None;
+
+    Py_DECREF(self->rowcount);
+    self->rowcount = PyInt_FromLong(-1L);
+    if (!self->rowcount) {
+        goto error;
+    }
+
+    statement_type = detect_statement_type(operation_cstr);
+    if (self->connection->begin_statement) {
+        switch (statement_type) {
+            case STATEMENT_UPDATE:
+            case STATEMENT_DELETE:
+            case STATEMENT_INSERT:
+            case STATEMENT_REPLACE:
+                if (!self->connection->inTransaction) {
+                    result = _connection_begin(self->connection);
+                    if (!result) {
+                        goto error;
+                    }
+                    Py_DECREF(result);
+                }
+                break;
+            case STATEMENT_OTHER:
+                /* it's a DDL statement or something similar
+                   - we better COMMIT first so it works for all cases */
+                if (self->connection->inTransaction) {
+                    func_args = PyTuple_New(0);
+                    if (!func_args) {
+                        goto error;
+                    }
+                    result = connection_commit(self->connection, func_args);
+                    Py_DECREF(func_args);
+                    if (!result) {
+                        goto error;
+                    }
+                    Py_DECREF(result);
+                }
+                break;
+            case STATEMENT_SELECT:
+                if (multiple) {
+                    PyErr_SetString(ProgrammingError,
+                                "You cannot execute SELECT statements in executemany().");
+                    goto error;
+                }
+                break;
+        }
+    }
+
+    func_args = PyTuple_New(1);
+    if (!func_args) {
+        goto error;
+    }
+    Py_INCREF(operation);
+    if (PyTuple_SetItem(func_args, 0, operation) != 0) {
+        goto error;
+    }
+
+    if (self->statement) {
+        (void)statement_reset(self->statement);
+        Py_DECREF(self->statement);
+    }
+
+    self->statement = (Statement*)cache_get(self->connection->statement_cache, func_args);
+    Py_DECREF(func_args);
+
+    if (!self->statement) {
+        goto error;
+    }
+
+    if (self->statement->in_use) {
+        Py_DECREF(self->statement);
+        self->statement = PyObject_New(Statement, &StatementType);
+        if (!self->statement) {
+            goto error;
+        }
+        rc = statement_create(self->statement, self->connection, operation);
+        if (rc != SQLITE_OK) {
+            self->statement = 0;
+            goto error;
+        }
+    }
+
+    statement_reset(self->statement);
+    statement_mark_dirty(self->statement);
+
+    while (1) {
+        parameters = PyIter_Next(parameters_iter);
+        if (!parameters) {
+            break;
+        }
+
+        statement_mark_dirty(self->statement);
+
+        statement_bind_parameters(self->statement, parameters);
+        if (PyErr_Occurred()) {
+            goto error;
+        }
+
+        if (build_row_cast_map(self) != 0) {
+            PyErr_SetString(OperationalError, "Error while building row_cast_map");
+            goto error;
+        }
+
+        rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
+        if (rc != SQLITE_DONE && rc != SQLITE_ROW) {
+            rc = statement_reset(self->statement);
+            if (rc == SQLITE_SCHEMA) {
+                rc = statement_recompile(self->statement, parameters);
+                if (rc == SQLITE_OK) {
+                    rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
+                } else {
+                    _seterror(self->connection->db);
+                    goto error;
+                }
+            } else {
+                _seterror(self->connection->db);
+                goto error;
+            }
+        }
+
+        if (rc == SQLITE_ROW || (rc == SQLITE_DONE && statement_type == STATEMENT_SELECT)) {
+            Py_BEGIN_ALLOW_THREADS
+            numcols = sqlite3_column_count(self->statement->st);
+            Py_END_ALLOW_THREADS
+
+            if (self->description == Py_None) {
+                Py_DECREF(self->description);
+                self->description = PyTuple_New(numcols);
+                if (!self->description) {
+                    goto error;
+                }
+                for (i = 0; i < numcols; i++) {
+                    descriptor = PyTuple_New(7);
+                    if (!descriptor) {
+                        goto error;
+                    }
+                    PyTuple_SetItem(descriptor, 0, _build_column_name(sqlite3_column_name(self->statement->st, i)));
+                    Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 1, Py_None);
+                    Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 2, Py_None);
+                    Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 3, Py_None);
+                    Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 4, Py_None);
+                    Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 5, Py_None);
+                    Py_INCREF(Py_None); PyTuple_SetItem(descriptor, 6, Py_None);
+                    PyTuple_SetItem(self->description, i, descriptor);
+                }
+            }
+        }
+
+        if (rc == SQLITE_ROW) {
+            if (multiple) {
+                PyErr_SetString(ProgrammingError, "executemany() can only execute DML statements.");
+                goto error;
+            }
+
+            self->next_row = _fetch_one_row(self);
+        } else if (rc == SQLITE_DONE && !multiple) {
+            statement_reset(self->statement);
+            Py_DECREF(self->statement);
+            self->statement = 0;
+        }
+
+        switch (statement_type) {
+            case STATEMENT_UPDATE:
+            case STATEMENT_DELETE:
+            case STATEMENT_INSERT:
+            case STATEMENT_REPLACE:
+                Py_BEGIN_ALLOW_THREADS
+                rowcount += (long)sqlite3_changes(self->connection->db);
+                Py_END_ALLOW_THREADS
+                Py_DECREF(self->rowcount);
+                self->rowcount = PyInt_FromLong(rowcount);
+        }
+
+        Py_DECREF(self->lastrowid);
+        if (statement_type == STATEMENT_INSERT) {
+            Py_BEGIN_ALLOW_THREADS
+            lastrowid = sqlite3_last_insert_rowid(self->connection->db);
+            Py_END_ALLOW_THREADS
+            self->lastrowid = PyInt_FromLong((long)lastrowid);
+        } else {
+            Py_INCREF(Py_None);
+            self->lastrowid = Py_None;
+        }
+
+        if (multiple) {
+            rc = statement_reset(self->statement);
+        }
+        Py_XDECREF(parameters);
+    }
+
+error:
+    Py_XDECREF(operation_bytestr);
+    Py_XDECREF(parameters);
+    Py_XDECREF(parameters_iter);
+    Py_XDECREF(parameters_list);
+
+    if (PyErr_Occurred()) {
+        return NULL;
+    } else {
+        Py_INCREF(self);
+        return (PyObject*)self;
+    }
+}
+
+PyObject* cursor_execute(Cursor* self, PyObject* args)
+{
+    return _query_execute(self, 0, args);
+}
+
+PyObject* cursor_executemany(Cursor* self, PyObject* args)
+{
+    return _query_execute(self, 1, args);
+}
+
+PyObject* cursor_executescript(Cursor* self, PyObject* args)
+{
+    PyObject* script_obj;
+    PyObject* script_str = NULL;
+    const char* script_cstr;
+    sqlite3_stmt* statement;
+    int rc;
+    PyObject* func_args;
+    PyObject* result;
+    int statement_completed = 0;
+
+    if (!PyArg_ParseTuple(args, "O", &script_obj)) {
+        return NULL; 
+    }
+
+    if (!check_thread(self->connection) || !check_connection(self->connection)) {
+        return NULL;
+    }
+
+    if (PyString_Check(script_obj)) {
+        script_cstr = PyString_AsString(script_obj);
+    } else if (PyUnicode_Check(script_obj)) {
+        script_str = PyUnicode_AsUTF8String(script_obj);
+        if (!script_str) {
+            return NULL;
+        }
+
+        script_cstr = PyString_AsString(script_str);
+    } else {
+        PyErr_SetString(PyExc_ValueError, "script argument must be unicode or string.");
+        return NULL;
+    }
+
+    /* commit first */
+    func_args = PyTuple_New(0);
+    if (!func_args) {
+        goto error;
+    }
+    result = connection_commit(self->connection, func_args);
+    Py_DECREF(func_args);
+    if (!result) {
+        goto error;
+    }
+    Py_DECREF(result);
+
+    while (1) {
+        if (!sqlite3_complete(script_cstr)) {
+            break;
+        }
+        statement_completed = 1;
+
+        rc = sqlite3_prepare(self->connection->db,
+                             script_cstr,
+                             -1,
+                             &statement,
+                             &script_cstr);
+        if (rc != SQLITE_OK) {
+            _seterror(self->connection->db);
+            goto error;
+        }
+
+        /* execute statement, and ignore results of SELECT statements */
+        rc = SQLITE_ROW;
+        while (rc == SQLITE_ROW) {
+            rc = _sqlite_step_with_busyhandler(statement, self->connection);
+        }
+
+        if (rc != SQLITE_DONE) {
+            (void)sqlite3_finalize(statement);
+            _seterror(self->connection->db);
+            goto error;
+        }
+
+        rc = sqlite3_finalize(statement);
+        if (rc != SQLITE_OK) {
+            _seterror(self->connection->db);
+            goto error;
+        }
+    }
+
+error:
+    Py_XDECREF(script_str);
+
+    if (!statement_completed) {
+        PyErr_SetString(ProgrammingError, "you did not provide a complete SQL statement");
+    }
+
+    if (PyErr_Occurred()) {
+        return NULL;
+    } else {
+        Py_INCREF(self);
+        return (PyObject*)self;
+    }
+}
+
+PyObject* cursor_getiter(Cursor *self)
+{
+    Py_INCREF(self);
+    return (PyObject*)self;
+}
+
+PyObject* cursor_iternext(Cursor *self)
+{
+    PyObject* next_row_tuple;
+    PyObject* next_row;
+    int rc;
+
+    if (!check_thread(self->connection) || !check_connection(self->connection)) {
+        return NULL;
+    }
+
+    if (!self->next_row) {
+         if (self->statement) {
+            (void)statement_reset(self->statement);
+            Py_DECREF(self->statement);
+            self->statement = NULL;
+        }
+        return NULL;
+    }
+
+    next_row_tuple = self->next_row;
+    self->next_row = NULL;
+
+    if (self->row_factory != Py_None) {
+        next_row = PyObject_CallFunction(self->row_factory, "OO", self, next_row_tuple);
+        Py_DECREF(next_row_tuple);
+    } else {
+        next_row = next_row_tuple;
+    }
+
+    rc = _sqlite_step_with_busyhandler(self->statement->st, self->connection);
+    if (rc != SQLITE_DONE && rc != SQLITE_ROW) {
+        Py_DECREF(next_row);
+        _seterror(self->connection->db);
+        return NULL;
+    }
+
+    if (rc == SQLITE_ROW) {
+        self->next_row = _fetch_one_row(self);
+    }
+
+    return next_row;
+}
+
+PyObject* cursor_fetchone(Cursor* self, PyObject* args)
+{
+    PyObject* row;
+
+    row = cursor_iternext(self);
+    if (!row && !PyErr_Occurred()) {
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+
+    return row;
+}
+
+PyObject* cursor_fetchmany(Cursor* self, PyObject* args)
+{
+    PyObject* row;
+    PyObject* list;
+    int maxrows = self->arraysize;
+    int counter = 0;
+
+    if (!PyArg_ParseTuple(args, "|i", &maxrows)) {
+        return NULL; 
+    }
+
+    list = PyList_New(0);
+    if (!list) {
+        return NULL;
+    }
+
+    /* just make sure we enter the loop */
+    row = Py_None;
+
+    while (row) {
+        row = cursor_iternext(self);
+        if (row) {
+            PyList_Append(list, row);
+            Py_DECREF(row);
+        } else {
+            break;
+        }
+
+        if (++counter == maxrows) {
+            break;
+        }
+    }
+
+    if (PyErr_Occurred()) {
+        Py_DECREF(list);
+        return NULL;
+    } else {
+        return list;
+    }
+}
+
+PyObject* cursor_fetchall(Cursor* self, PyObject* args)
+{
+    PyObject* row;
+    PyObject* list;
+
+    list = PyList_New(0);
+    if (!list) {
+        return NULL;
+    }
+
+    /* just make sure we enter the loop */
+    row = (PyObject*)Py_None;
+
+    while (row) {
+        row = cursor_iternext(self);
+        if (row) {
+            PyList_Append(list, row);
+            Py_DECREF(row);
+        }
+    }
+
+    if (PyErr_Occurred()) {
+        Py_DECREF(list);
+        return NULL;
+    } else {
+        return list;
+    }
+}
+
+PyObject* pysqlite_noop(Connection* self, PyObject* args)
+{
+    /* don't care, return None */
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+PyObject* cursor_close(Cursor* self, PyObject* args)
+{
+    if (!check_thread(self->connection) || !check_connection(self->connection)) {
+        return NULL;
+    }
+
+    if (self->statement) {
+        (void)statement_reset(self->statement);
+        Py_DECREF(self->statement);
+        self->statement = 0;
+    }
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+static PyMethodDef cursor_methods[] = {
+    {"execute", (PyCFunction)cursor_execute, METH_VARARGS,
+        PyDoc_STR("Executes a SQL statement.")},
+    {"executemany", (PyCFunction)cursor_executemany, METH_VARARGS,
+        PyDoc_STR("Repeatedly executes a SQL statement.")},
+    {"executescript", (PyCFunction)cursor_executescript, METH_VARARGS,
+        PyDoc_STR("Executes a multiple SQL statements at once. Non-standard.")},
+    {"fetchone", (PyCFunction)cursor_fetchone, METH_NOARGS,
+        PyDoc_STR("Fetches several rows from the resultset.")},
+    {"fetchmany", (PyCFunction)cursor_fetchmany, METH_VARARGS,
+        PyDoc_STR("Fetches all rows from the resultset.")},
+    {"fetchall", (PyCFunction)cursor_fetchall, METH_NOARGS,
+        PyDoc_STR("Fetches one row from the resultset.")},
+    {"close", (PyCFunction)cursor_close, METH_NOARGS,
+        PyDoc_STR("Closes the cursor.")},
+    {"setinputsizes", (PyCFunction)pysqlite_noop, METH_VARARGS,
+        PyDoc_STR("Required by DB-API. Does nothing in pysqlite.")},
+    {"setoutputsize", (PyCFunction)pysqlite_noop, METH_VARARGS,
+        PyDoc_STR("Required by DB-API. Does nothing in pysqlite.")},
+    {NULL, NULL}
+};
+
+static struct PyMemberDef cursor_members[] =
+{
+    {"connection", T_OBJECT, offsetof(Cursor, connection), RO},
+    {"description", T_OBJECT, offsetof(Cursor, description), RO},
+    {"arraysize", T_INT, offsetof(Cursor, arraysize), 0},
+    {"lastrowid", T_OBJECT, offsetof(Cursor, lastrowid), RO},
+    {"rowcount", T_OBJECT, offsetof(Cursor, rowcount), RO},
+    {"row_factory", T_OBJECT, offsetof(Cursor, row_factory), 0},
+    {NULL}
+};
+
+PyTypeObject CursorType = {
+        PyObject_HEAD_INIT(NULL)
+        0,                                              /* ob_size */
+        MODULE_NAME ".Cursor",                          /* tp_name */
+        sizeof(Cursor),                                 /* tp_basicsize */
+        0,                                              /* tp_itemsize */
+        (destructor)cursor_dealloc,                     /* tp_dealloc */
+        0,                                              /* tp_print */
+        0,                                              /* tp_getattr */
+        0,                                              /* tp_setattr */
+        0,                                              /* tp_compare */
+        0,                                              /* tp_repr */
+        0,                                              /* tp_as_number */
+        0,                                              /* tp_as_sequence */
+        0,                                              /* tp_as_mapping */
+        0,                                              /* tp_hash */
+        0,                                              /* tp_call */
+        0,                                              /* tp_str */
+        0,                                              /* tp_getattro */
+        0,                                              /* tp_setattro */
+        0,                                              /* tp_as_buffer */
+        Py_TPFLAGS_DEFAULT|Py_TPFLAGS_HAVE_ITER|Py_TPFLAGS_BASETYPE, /* tp_flags */
+        0,                                              /* tp_doc */
+        0,                                              /* tp_traverse */
+        0,                                              /* tp_clear */
+        0,                                              /* tp_richcompare */
+        0,                                              /* tp_weaklistoffset */
+        (getiterfunc)cursor_getiter,                    /* tp_iter */
+        (iternextfunc)cursor_iternext,                  /* tp_iternext */
+        cursor_methods,                                 /* tp_methods */
+        cursor_members,                                 /* tp_members */
+        0,                                              /* tp_getset */
+        0,                                              /* tp_base */
+        0,                                              /* tp_dict */
+        0,                                              /* tp_descr_get */
+        0,                                              /* tp_descr_set */
+        0,                                              /* tp_dictoffset */
+        (initproc)cursor_init,                          /* tp_init */
+        0,                                              /* tp_alloc */
+        0,                                              /* tp_new */
+        0                                               /* tp_free */
+};
+
+extern int cursor_setup_types(void)
+{
+    CursorType.tp_new = PyType_GenericNew;
+    return PyType_Ready(&CursorType);
+}
diff --git a/Modules/_sqlite/cursor.h b/Modules/_sqlite/cursor.h
new file mode 100644
index 0000000..7f56799
--- /dev/null
+++ b/Modules/_sqlite/cursor.h
@@ -0,0 +1,71 @@
+/* cursor.h - definitions for the cursor type
+ *
+ * Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_CURSOR_H
+#define PYSQLITE_CURSOR_H
+#include "Python.h"
+
+#include "statement.h"
+#include "connection.h"
+#include "module.h"
+
+typedef struct
+{
+    PyObject_HEAD
+    Connection* connection;
+    PyObject* description;
+    PyObject* row_cast_map;
+    int arraysize;
+    PyObject* lastrowid;
+    PyObject* rowcount;
+    PyObject* row_factory;
+    Statement* statement;
+
+    /* the next row to be returned, NULL if no next row available */
+    PyObject* next_row;
+} Cursor;
+
+typedef enum {
+    STATEMENT_INVALID, STATEMENT_INSERT, STATEMENT_DELETE,
+    STATEMENT_UPDATE, STATEMENT_REPLACE, STATEMENT_SELECT,
+    STATEMENT_OTHER
+} StatementKind;
+
+extern PyTypeObject CursorType;
+
+int cursor_init(Cursor* self, PyObject* args, PyObject* kwargs);
+void cursor_dealloc(Cursor* self);
+PyObject* cursor_execute(Cursor* self, PyObject* args);
+PyObject* cursor_executemany(Cursor* self, PyObject* args);
+PyObject* cursor_getiter(Cursor *self);
+PyObject* cursor_iternext(Cursor *self);
+PyObject* cursor_fetchone(Cursor* self, PyObject* args);
+PyObject* cursor_fetchmany(Cursor* self, PyObject* args);
+PyObject* cursor_fetchall(Cursor* self, PyObject* args);
+PyObject* pysqlite_noop(Connection* self, PyObject* args);
+PyObject* cursor_close(Cursor* self, PyObject* args);
+
+int cursor_setup_types(void);
+
+#define UNKNOWN (-1)
+#endif
diff --git a/Modules/_sqlite/microprotocols.c b/Modules/_sqlite/microprotocols.c
new file mode 100644
index 0000000..4956ac0
--- /dev/null
+++ b/Modules/_sqlite/microprotocols.c
@@ -0,0 +1,142 @@
+/* microprotocols.c - minimalist and non-validating protocols implementation
+ *
+ * Copyright (C) 2003-2004 Federico Di Gregorio <fog@debian.org>
+ *
+ * This file is part of psycopg and was adapted for pysqlite. Federico Di
+ * Gregorio gave the permission to use it within pysqlite under the following
+ * license:
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include <Python.h>
+#include <structmember.h>
+
+#include "cursor.h"
+#include "microprotocols.h"
+#include "prepare_protocol.h"
+
+
+/** the adapters registry **/
+
+PyObject *psyco_adapters;
+
+/* microprotocols_init - initialize the adapters dictionary */
+
+int
+microprotocols_init(PyObject *dict)
+{
+    /* create adapters dictionary and put it in module namespace */
+    if ((psyco_adapters = PyDict_New()) == NULL) {
+        return -1;
+    }
+
+    return PyDict_SetItemString(dict, "adapters", psyco_adapters);
+}
+
+
+/* microprotocols_add - add a reverse type-caster to the dictionary */
+
+int
+microprotocols_add(PyTypeObject *type, PyObject *proto, PyObject *cast)
+{
+    PyObject* key;
+    int rc;
+
+    if (proto == NULL) proto = (PyObject*)&SQLitePrepareProtocolType;
+
+    key = Py_BuildValue("(OO)", (PyObject*)type, proto);
+    if (!key) {
+        return -1;
+    }
+
+    rc = PyDict_SetItem(psyco_adapters, key, cast);
+    Py_DECREF(key);
+
+    return rc;
+}
+
+/* microprotocols_adapt - adapt an object to the built-in protocol */
+
+PyObject *
+microprotocols_adapt(PyObject *obj, PyObject *proto, PyObject *alt)
+{
+    PyObject *adapter, *key;
+
+    /* we don't check for exact type conformance as specified in PEP 246
+       because the SQLitePrepareProtocolType type is abstract and there is no
+       way to get a quotable object to be its instance */
+
+    /* look for an adapter in the registry */
+    key = Py_BuildValue("(OO)", (PyObject*)obj->ob_type, proto);
+    if (!key) {
+        return NULL;
+    }
+    adapter = PyDict_GetItem(psyco_adapters, key);
+    Py_DECREF(key);
+    if (adapter) {
+        PyObject *adapted = PyObject_CallFunctionObjArgs(adapter, obj, NULL);
+        return adapted;
+    }
+
+    /* try to have the protocol adapt this object*/
+    if (PyObject_HasAttrString(proto, "__adapt__")) {
+        PyObject *adapted = PyObject_CallMethod(proto, "__adapt__", "O", obj);
+        if (adapted) {
+            if (adapted != Py_None) {
+                return adapted;
+            } else {
+                Py_DECREF(adapted);
+            }
+        }
+
+        if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_TypeError))
+            return NULL;
+    }
+
+    /* and finally try to have the object adapt itself */
+    if (PyObject_HasAttrString(obj, "__conform__")) {
+        PyObject *adapted = PyObject_CallMethod(obj, "__conform__","O", proto);
+        if (adapted) {
+            if (adapted != Py_None) {
+                return adapted;
+            } else {
+                Py_DECREF(adapted);
+            }
+        }
+
+        if (PyErr_Occurred() && !PyErr_ExceptionMatches(PyExc_TypeError)) {
+            return NULL;
+        }
+    }
+
+    /* else set the right exception and return NULL */
+    PyErr_SetString(ProgrammingError, "can't adapt");
+    return NULL;
+}
+
+/** module-level functions **/
+
+PyObject *
+psyco_microprotocols_adapt(Cursor *self, PyObject *args)
+{
+    PyObject *obj, *alt = NULL;
+    PyObject *proto = (PyObject*)&SQLitePrepareProtocolType;
+
+    if (!PyArg_ParseTuple(args, "O|OO", &obj, &proto, &alt)) return NULL;
+    return microprotocols_adapt(obj, proto, alt);
+}
diff --git a/Modules/_sqlite/microprotocols.h b/Modules/_sqlite/microprotocols.h
new file mode 100644
index 0000000..d2d9b65
--- /dev/null
+++ b/Modules/_sqlite/microprotocols.h
@@ -0,0 +1,59 @@
+/* microprotocols.c - definitions for minimalist and non-validating protocols
+ *
+ * Copyright (C) 2003-2004 Federico Di Gregorio <fog@debian.org>
+ *
+ * This file is part of psycopg and was adapted for pysqlite. Federico Di
+ * Gregorio gave the permission to use it within pysqlite under the following
+ * license:
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PSYCOPG_MICROPROTOCOLS_H
+#define PSYCOPG_MICROPROTOCOLS_H 1
+
+#include <Python.h>
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+/** adapters registry **/
+
+extern PyObject *psyco_adapters;
+
+/** the names of the three mandatory methods **/
+
+#define MICROPROTOCOLS_GETQUOTED_NAME "getquoted"
+#define MICROPROTOCOLS_GETSTRING_NAME "getstring"
+#define MICROPROTOCOLS_GETBINARY_NAME "getbinary"
+
+/** exported functions **/
+
+/* used by module.c to init the microprotocols system */
+extern int microprotocols_init(PyObject *dict);
+extern int microprotocols_add(
+    PyTypeObject *type, PyObject *proto, PyObject *cast);
+extern PyObject *microprotocols_adapt(
+    PyObject *obj, PyObject *proto, PyObject *alt);
+
+extern PyObject *
+    psyco_microprotocols_adapt(Cursor* self, PyObject *args);   
+#define psyco_microprotocols_adapt_doc \
+    "adapt(obj, protocol, alternate) -> adapt obj to given protocol"
+    
+#endif /* !defined(PSYCOPG_MICROPROTOCOLS_H) */
diff --git a/Modules/_sqlite/module.c b/Modules/_sqlite/module.c
new file mode 100644
index 0000000..1537e79
--- /dev/null
+++ b/Modules/_sqlite/module.c
@@ -0,0 +1,325 @@
+/* module.c - the module itself
+ *
+ * Copyright (C) 2004-2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "connection.h"
+#include "statement.h"
+#include "cursor.h"
+#include "cache.h"
+#include "prepare_protocol.h"
+#include "microprotocols.h"
+#include "row.h"
+
+#if SQLITE_VERSION_NUMBER >= 3003003
+#define HAVE_SHARED_CACHE
+#endif
+
+/* static objects at module-level */
+
+PyObject* Error, *Warning, *InterfaceError, *DatabaseError, *InternalError,
+    *OperationalError, *ProgrammingError, *IntegrityError, *DataError,
+    *NotSupportedError, *OptimizedUnicode;
+
+PyObject* converters;
+
+static PyObject* module_connect(PyObject* self, PyObject* args, PyObject*
+        kwargs)
+{
+    /* Python seems to have no way of extracting a single keyword-arg at
+     * C-level, so this code is redundant with the one in connection_init in
+     * connection.c and must always be copied from there ... */
+
+    static char *kwlist[] = {"database", "timeout", "detect_types", "isolation_level", "check_same_thread", "factory", "cached_statements", NULL, NULL};
+    char* database;
+    int detect_types = 0;
+    PyObject* isolation_level;
+    PyObject* factory = NULL;
+    int check_same_thread = 1;
+    int cached_statements;
+    double timeout = 5.0;
+
+    PyObject* result;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s|diOiOi", kwlist,
+                                     &database, &timeout, &detect_types, &isolation_level, &check_same_thread, &factory, &cached_statements))
+    {
+        return NULL; 
+    }
+
+    if (factory == NULL) {
+        factory = (PyObject*)&ConnectionType;
+    }
+
+    result = PyObject_Call(factory, args, kwargs);
+
+    return result;
+}
+
+static PyObject* module_complete(PyObject* self, PyObject* args, PyObject*
+        kwargs)
+{
+    static char *kwlist[] = {"statement", NULL, NULL};
+    char* statement;
+
+    PyObject* result;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s", kwlist, &statement))
+    {
+        return NULL; 
+    }
+
+    if (sqlite3_complete(statement)) {
+        result = Py_True;
+    } else {
+        result = Py_False;
+    }
+
+    Py_INCREF(result);
+
+    return result;
+}
+
+#ifdef HAVE_SHARED_CACHE
+static PyObject* module_enable_shared_cache(PyObject* self, PyObject* args, PyObject*
+        kwargs)
+{
+    static char *kwlist[] = {"do_enable", NULL, NULL};
+    int do_enable;
+    int rc;
+
+    if (!PyArg_ParseTupleAndKeywords(args, kwargs, "i", kwlist, &do_enable))
+    {
+        return NULL; 
+    }
+
+    rc = sqlite3_enable_shared_cache(do_enable);
+
+    if (rc != SQLITE_OK) {
+        PyErr_SetString(OperationalError, "Changing the shared_cache flag failed");
+        return NULL;
+    } else {
+        Py_INCREF(Py_None);
+        return Py_None;
+    }
+}
+#endif /* HAVE_SHARED_CACHE */
+
+static PyObject* module_register_adapter(PyObject* self, PyObject* args, PyObject* kwargs)
+{
+    PyTypeObject* type;
+    PyObject* caster;
+
+    if (!PyArg_ParseTuple(args, "OO", &type, &caster)) {
+        return NULL;
+    }
+
+    microprotocols_add(type, (PyObject*)&SQLitePrepareProtocolType, caster);
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+static PyObject* module_register_converter(PyObject* self, PyObject* args, PyObject* kwargs)
+{
+    PyObject* name;
+    PyObject* callable;
+
+    if (!PyArg_ParseTuple(args, "OO", &name, &callable)) {
+        return NULL;
+    }
+
+    if (PyDict_SetItem(converters, name, callable) != 0) {
+        return NULL;
+    }
+
+    Py_INCREF(Py_None);
+    return Py_None;
+}
+
+void converters_init(PyObject* dict)
+{
+    converters = PyDict_New();
+    if (!converters) {
+        return;
+    }
+
+    PyDict_SetItemString(dict, "converters", converters);
+}
+
+static PyMethodDef module_methods[] = {
+    {"connect",  (PyCFunction)module_connect,  METH_VARARGS|METH_KEYWORDS, PyDoc_STR("Creates a connection.")},
+    {"complete_statement",  (PyCFunction)module_complete,  METH_VARARGS|METH_KEYWORDS, PyDoc_STR("Checks if a string contains a complete SQL statement.")},
+#ifdef HAVE_SHARED_CACHE
+    {"enable_shared_cache",  (PyCFunction)module_enable_shared_cache,  METH_VARARGS|METH_KEYWORDS, PyDoc_STR("Enable or disable shared cache mode for the calling thread.")},
+#endif
+    {"register_adapter", (PyCFunction)module_register_adapter, METH_VARARGS, PyDoc_STR("Registers an adapter with sqlite's adapter registry.")},
+    {"register_converter", (PyCFunction)module_register_converter, METH_VARARGS, PyDoc_STR("Registers a converter with sqlite.")},
+    {"adapt",  (PyCFunction)psyco_microprotocols_adapt, METH_VARARGS, psyco_microprotocols_adapt_doc},
+    {NULL, NULL}
+};
+
+PyMODINIT_FUNC init_sqlite3(void)
+{
+    PyObject *module, *dict;
+    PyObject *tmp_obj;
+
+    module = Py_InitModule("_sqlite3", module_methods);
+
+    if (!module ||
+        (row_setup_types() < 0) ||
+        (cursor_setup_types() < 0) ||
+        (connection_setup_types() < 0) ||
+        (cache_setup_types() < 0) ||
+        (statement_setup_types() < 0) ||
+        (prepare_protocol_setup_types() < 0)
+       ) {
+        return;
+    }
+
+    Py_INCREF(&ConnectionType);
+    PyModule_AddObject(module, "Connection", (PyObject*) &ConnectionType);
+    Py_INCREF(&CursorType);
+    PyModule_AddObject(module, "Cursor", (PyObject*) &CursorType);
+    Py_INCREF(&CacheType);
+    PyModule_AddObject(module, "Statement", (PyObject*)&StatementType);
+    Py_INCREF(&StatementType);
+    PyModule_AddObject(module, "Cache", (PyObject*) &CacheType);
+    Py_INCREF(&SQLitePrepareProtocolType);
+    PyModule_AddObject(module, "PrepareProtocol", (PyObject*) &SQLitePrepareProtocolType);
+    Py_INCREF(&RowType);
+    PyModule_AddObject(module, "Row", (PyObject*) &RowType);
+
+    if (!(dict = PyModule_GetDict(module))) {
+        goto error;
+    }
+
+    /*** Create DB-API Exception hierarchy */
+
+    if (!(Error = PyErr_NewException(MODULE_NAME ".Error", PyExc_StandardError, NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "Error", Error);
+
+    if (!(Warning = PyErr_NewException(MODULE_NAME ".Warning", PyExc_StandardError, NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "Warning", Warning);
+
+    /* Error subclasses */
+
+    if (!(InterfaceError = PyErr_NewException(MODULE_NAME ".InterfaceError", Error, NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "InterfaceError", InterfaceError);
+
+    if (!(DatabaseError = PyErr_NewException(MODULE_NAME ".DatabaseError", Error, NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "DatabaseError", DatabaseError);
+
+    /* DatabaseError subclasses */
+
+    if (!(InternalError = PyErr_NewException(MODULE_NAME ".InternalError", DatabaseError, NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "InternalError", InternalError);
+
+    if (!(OperationalError = PyErr_NewException(MODULE_NAME ".OperationalError", DatabaseError, NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "OperationalError", OperationalError);
+
+    if (!(ProgrammingError = PyErr_NewException(MODULE_NAME ".ProgrammingError", DatabaseError, NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "ProgrammingError", ProgrammingError);
+
+    if (!(IntegrityError = PyErr_NewException(MODULE_NAME ".IntegrityError", DatabaseError,NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "IntegrityError", IntegrityError);
+
+    if (!(DataError = PyErr_NewException(MODULE_NAME ".DataError", DatabaseError, NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "DataError", DataError);
+
+    if (!(NotSupportedError = PyErr_NewException(MODULE_NAME ".NotSupportedError", DatabaseError, NULL))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "NotSupportedError", NotSupportedError);
+
+    /* We just need "something" unique for OptimizedUnicode. It does not really
+     * need to be a string subclass. Just anything that can act as a special
+     * marker for us. So I pulled PyCell_Type out of my magic hat.
+     */
+    Py_INCREF((PyObject*)&PyCell_Type);
+    OptimizedUnicode = (PyObject*)&PyCell_Type;
+    PyDict_SetItemString(dict, "OptimizedUnicode", OptimizedUnicode);
+
+    if (!(tmp_obj = PyInt_FromLong(PARSE_DECLTYPES))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "PARSE_DECLTYPES", tmp_obj);
+
+    if (!(tmp_obj = PyInt_FromLong(PARSE_COLNAMES))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "PARSE_COLNAMES", tmp_obj);
+
+    if (!(tmp_obj = PyString_FromString(PYSQLITE_VERSION))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "version", tmp_obj);
+
+    if (!(tmp_obj = PyString_FromString(sqlite3_libversion()))) {
+        goto error;
+    }
+    PyDict_SetItemString(dict, "sqlite_version", tmp_obj);
+
+    /* initialize microprotocols layer */
+    microprotocols_init(dict);
+
+    /* initialize the default converters */
+    converters_init(dict);
+
+    /* Original comment form _bsddb.c in the Python core. This is also still
+     * needed nowadays for Python 2.3/2.4.
+     * 
+     * PyEval_InitThreads is called here due to a quirk in python 1.5
+     * - 2.2.1 (at least) according to Russell Williamson <merel@wt.net>:
+     * The global interepreter lock is not initialized until the first
+     * thread is created using thread.start_new_thread() or fork() is
+     * called.  that would cause the ALLOW_THREADS here to segfault due
+     * to a null pointer reference if no threads or child processes
+     * have been created.  This works around that and is a no-op if
+     * threads have already been initialized.
+     *  (see pybsddb-users mailing list post on 2002-08-07)
+     */
+    PyEval_InitThreads();
+
+error:
+    if (PyErr_Occurred())
+    {
+        PyErr_SetString(PyExc_ImportError, MODULE_NAME ": init failed");
+    }
+}
diff --git a/Modules/_sqlite/module.h b/Modules/_sqlite/module.h
new file mode 100644
index 0000000..6694735
--- /dev/null
+++ b/Modules/_sqlite/module.h
@@ -0,0 +1,55 @@
+/* module.h - definitions for the module
+ *
+ * Copyright (C) 2004-2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_MODULE_H
+#define PYSQLITE_MODULE_H
+#include "Python.h"
+
+#define PYSQLITE_VERSION "2.2.0"
+
+extern PyObject* Error;
+extern PyObject* Warning;
+extern PyObject* InterfaceError;
+extern PyObject* DatabaseError;
+extern PyObject* InternalError;
+extern PyObject* OperationalError;
+extern PyObject* ProgrammingError;
+extern PyObject* IntegrityError;
+extern PyObject* DataError;
+extern PyObject* NotSupportedError;
+
+extern PyObject* OptimizedUnicode;
+
+/* the functions time.time() and time.sleep() */
+extern PyObject* time_time;
+extern PyObject* time_sleep;
+
+/* A dictionary, mapping colum types (INTEGER, VARCHAR, etc.) to converter
+ * functions, that convert the SQL value to the appropriate Python value.
+ * The key is uppercase.
+ */
+extern PyObject* converters;
+
+#define PARSE_DECLTYPES 1
+#define PARSE_COLNAMES 2
+#endif
diff --git a/Modules/_sqlite/prepare_protocol.c b/Modules/_sqlite/prepare_protocol.c
new file mode 100644
index 0000000..26b663b
--- /dev/null
+++ b/Modules/_sqlite/prepare_protocol.c
@@ -0,0 +1,84 @@
+/* prepare_protocol.c - the protocol for preparing values for SQLite
+ *
+ * Copyright (C) 2005-2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "prepare_protocol.h"
+
+int prepare_protocol_init(SQLitePrepareProtocol* self, PyObject* args, PyObject* kwargs)
+{
+    return 0;
+}
+
+void prepare_protocol_dealloc(SQLitePrepareProtocol* self)
+{
+    self->ob_type->tp_free((PyObject*)self);
+}
+
+PyTypeObject SQLitePrepareProtocolType= {
+        PyObject_HEAD_INIT(NULL)
+        0,                                              /* ob_size */
+        MODULE_NAME ".PrepareProtocol",                 /* tp_name */
+        sizeof(SQLitePrepareProtocol),                  /* tp_basicsize */
+        0,                                              /* tp_itemsize */
+        (destructor)prepare_protocol_dealloc,           /* tp_dealloc */
+        0,                                              /* tp_print */
+        0,                                              /* tp_getattr */
+        0,                                              /* tp_setattr */
+        0,                                              /* tp_compare */
+        0,                                              /* tp_repr */
+        0,                                              /* tp_as_number */
+        0,                                              /* tp_as_sequence */
+        0,                                              /* tp_as_mapping */
+        0,                                              /* tp_hash */
+        0,                                              /* tp_call */
+        0,                                              /* tp_str */
+        0,                                              /* tp_getattro */
+        0,                                              /* tp_setattro */
+        0,                                              /* tp_as_buffer */
+        Py_TPFLAGS_DEFAULT,                             /* tp_flags */
+        0,                                              /* tp_doc */
+        0,                                              /* tp_traverse */
+        0,                                              /* tp_clear */
+        0,                                              /* tp_richcompare */
+        0,                                              /* tp_weaklistoffset */
+        0,                                              /* tp_iter */
+        0,                                              /* tp_iternext */
+        0,                                              /* tp_methods */
+        0,                                              /* tp_members */
+        0,                                              /* tp_getset */
+        0,                                              /* tp_base */
+        0,                                              /* tp_dict */
+        0,                                              /* tp_descr_get */
+        0,                                              /* tp_descr_set */
+        0,                                              /* tp_dictoffset */
+        (initproc)prepare_protocol_init,                /* tp_init */
+        0,                                              /* tp_alloc */
+        0,                                              /* tp_new */
+        0                                               /* tp_free */
+};
+
+extern int prepare_protocol_setup_types(void)
+{
+    SQLitePrepareProtocolType.tp_new = PyType_GenericNew;
+    SQLitePrepareProtocolType.ob_type= &PyType_Type;
+    return PyType_Ready(&SQLitePrepareProtocolType);
+}
diff --git a/Modules/_sqlite/prepare_protocol.h b/Modules/_sqlite/prepare_protocol.h
new file mode 100644
index 0000000..2fc4f61
--- /dev/null
+++ b/Modules/_sqlite/prepare_protocol.h
@@ -0,0 +1,41 @@
+/* prepare_protocol.h - the protocol for preparing values for SQLite
+ *
+ * Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_PREPARE_PROTOCOL_H
+#define PYSQLITE_PREPARE_PROTOCOL_H
+#include "Python.h"
+
+typedef struct
+{
+    PyObject_HEAD
+} SQLitePrepareProtocol;
+
+extern PyTypeObject SQLitePrepareProtocolType;
+
+int prepare_protocol_init(SQLitePrepareProtocol* self, PyObject* args, PyObject* kwargs);
+void prepare_protocol_dealloc(SQLitePrepareProtocol* self);
+
+int prepare_protocol_setup_types(void);
+
+#define UNKNOWN (-1)
+#endif
diff --git a/Modules/_sqlite/row.c b/Modules/_sqlite/row.c
new file mode 100644
index 0000000..80b6135
--- /dev/null
+++ b/Modules/_sqlite/row.c
@@ -0,0 +1,202 @@
+/* row.c - an enhanced tuple for database rows
+ *
+ * Copyright (C) 2005-2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "row.h"
+#include "cursor.h"
+#include "sqlitecompat.h"
+
+void row_dealloc(Row* self)
+{
+    Py_XDECREF(self->data);
+    Py_XDECREF(self->description);
+
+    self->ob_type->tp_free((PyObject*)self);
+}
+
+int row_init(Row* self, PyObject* args, PyObject* kwargs)
+{
+    PyObject* data;
+    Cursor* cursor;
+
+    self->data = 0;
+    self->description = 0;
+
+    if (!PyArg_ParseTuple(args, "OO", &cursor, &data)) {
+        return -1;
+    }
+
+    if (!PyObject_IsInstance((PyObject*)cursor, (PyObject*)&CursorType)) {
+        PyErr_SetString(PyExc_TypeError, "instance of cursor required for first argument");
+        return -1;
+    }
+
+    if (!PyTuple_Check(data)) {
+        PyErr_SetString(PyExc_TypeError, "tuple required for second argument");
+        return -1;
+    }
+
+    Py_INCREF(data);
+    self->data = data;
+
+    Py_INCREF(cursor->description);
+    self->description = cursor->description;
+
+    return 0;
+}
+
+PyObject* row_subscript(Row* self, PyObject* idx)
+{
+    long _idx;
+    char* key;
+    int nitems, i;
+    char* compare_key;
+
+    char* p1;
+    char* p2;
+
+    PyObject* item;
+
+    if (PyInt_Check(idx)) {
+        _idx = PyInt_AsLong(idx);
+        item = PyTuple_GetItem(self->data, _idx);
+        Py_XINCREF(item);
+        return item;
+    } else if (PyLong_Check(idx)) {
+        _idx = PyLong_AsLong(idx);
+        item = PyTuple_GetItem(self->data, _idx);
+        Py_XINCREF(item);
+        return item;
+    } else if (PyString_Check(idx)) {
+        key = PyString_AsString(idx);
+
+        nitems = PyTuple_Size(self->description);
+
+        for (i = 0; i < nitems; i++) {
+            compare_key = PyString_AsString(PyTuple_GET_ITEM(PyTuple_GET_ITEM(self->description, i), 0));
+            if (!compare_key) {
+                return NULL;
+            }
+
+            p1 = key;
+            p2 = compare_key;
+
+            while (1) {
+                if ((*p1 == (char)0) || (*p2 == (char)0)) {
+                    break;
+                }
+
+                if ((*p1 | 0x20) != (*p2 | 0x20)) {
+                    break;
+                }
+
+                p1++;
+                p2++;
+            }
+
+            if ((*p1 == (char)0) && (*p2 == (char)0)) {
+                /* found item */
+                item = PyTuple_GetItem(self->data, i);
+                Py_INCREF(item);
+                return item;
+            }
+
+        }
+
+        PyErr_SetString(PyExc_IndexError, "No item with that key");
+        return NULL;
+    } else if (PySlice_Check(idx)) {
+        PyErr_SetString(PyExc_ValueError, "slices not implemented, yet");
+        return NULL;
+    } else {
+        PyErr_SetString(PyExc_IndexError, "Index must be int or string");
+        return NULL;
+    }
+}
+
+Py_ssize_t row_length(Row* self, PyObject* args, PyObject* kwargs)
+{
+    return PyTuple_GET_SIZE(self->data);
+}
+
+static int row_print(Row* self, FILE *fp, int flags)
+{
+    return (&PyTuple_Type)->tp_print(self->data, fp, flags);
+}
+
+
+PyMappingMethods row_as_mapping = {
+    /* mp_length        */ (lenfunc)row_length,
+    /* mp_subscript     */ (binaryfunc)row_subscript,
+    /* mp_ass_subscript */ (objobjargproc)0,
+};
+
+
+PyTypeObject RowType = {
+        PyObject_HEAD_INIT(NULL)
+        0,                                              /* ob_size */
+        MODULE_NAME ".Row",                             /* tp_name */
+        sizeof(Row),                                    /* tp_basicsize */
+        0,                                              /* tp_itemsize */
+        (destructor)row_dealloc,                        /* tp_dealloc */
+        (printfunc)row_print,                           /* tp_print */
+        0,                                              /* tp_getattr */
+        0,                                              /* tp_setattr */
+        0,                                              /* tp_compare */
+        0,                                              /* tp_repr */
+        0,                                              /* tp_as_number */
+        0,                                              /* tp_as_sequence */
+        0,                                              /* tp_as_mapping */
+        0,                                              /* tp_hash */
+        0,                                              /* tp_call */
+        0,                                              /* tp_str */
+        0,                                              /* tp_getattro */
+        0,                                              /* tp_setattro */
+        0,                                              /* tp_as_buffer */
+        Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE,         /* tp_flags */
+        0,                                              /* tp_doc */
+        0,                                              /* tp_traverse */
+        0,                                              /* tp_clear */
+        0,                                              /* tp_richcompare */
+        0,                                              /* tp_weaklistoffset */
+        0,                                              /* tp_iter */
+        0,                                              /* tp_iternext */
+        0,                                              /* tp_methods */
+        0,                                              /* tp_members */
+        0,                                              /* tp_getset */
+        0,                                              /* tp_base */
+        0,                                              /* tp_dict */
+        0,                                              /* tp_descr_get */
+        0,                                              /* tp_descr_set */
+        0,                                              /* tp_dictoffset */
+        (initproc)row_init,                             /* tp_init */
+        0,                                              /* tp_alloc */
+        0,                                              /* tp_new */
+        0                                               /* tp_free */
+};
+
+extern int row_setup_types(void)
+{
+    RowType.tp_new = PyType_GenericNew;
+    RowType.tp_as_mapping = &row_as_mapping;
+    return PyType_Ready(&RowType);
+}
diff --git a/Modules/_sqlite/row.h b/Modules/_sqlite/row.h
new file mode 100644
index 0000000..c6e083c
--- /dev/null
+++ b/Modules/_sqlite/row.h
@@ -0,0 +1,39 @@
+/* row.h - an enhanced tuple for database rows
+ *
+ * Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_ROW_H
+#define PYSQLITE_ROW_H
+#include "Python.h"
+
+typedef struct _Row
+{
+    PyObject_HEAD
+    PyObject* data;
+    PyObject* description;
+} Row;
+
+extern PyTypeObject RowType;
+
+int row_setup_types(void);
+
+#endif
diff --git a/Modules/_sqlite/sqlitecompat.h b/Modules/_sqlite/sqlitecompat.h
new file mode 100644
index 0000000..c379825
--- /dev/null
+++ b/Modules/_sqlite/sqlitecompat.h
@@ -0,0 +1,34 @@
+/* sqlitecompat.h - compatibility macros
+ *
+ * Copyright (C) 2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_COMPAT_H
+#define PYSQLITE_COMPAT_H
+
+/* define Py_ssize_t for pre-2.5 versions of Python */
+
+#if PY_VERSION_HEX < 0x02050000
+typedef int Py_ssize_t;
+typedef int (*lenfunc)(PyObject*);
+#endif
+
+#endif
diff --git a/Modules/_sqlite/statement.c b/Modules/_sqlite/statement.c
new file mode 100644
index 0000000..0c93651
--- /dev/null
+++ b/Modules/_sqlite/statement.c
@@ -0,0 +1,427 @@
+/* statement.c - the statement type
+ *
+ * Copyright (C) 2005-2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "statement.h"
+#include "cursor.h"
+#include "connection.h"
+#include "microprotocols.h"
+#include "prepare_protocol.h"
+#include "sqlitecompat.h"
+
+/* prototypes */
+int check_remaining_sql(const char* tail);
+
+typedef enum {
+    LINECOMMENT_1,
+    IN_LINECOMMENT,
+    COMMENTSTART_1,
+    IN_COMMENT,
+    COMMENTEND_1,
+    NORMAL
+} parse_remaining_sql_state;
+
+int statement_create(Statement* self, Connection* connection, PyObject* sql)
+{
+    const char* tail;
+    int rc;
+    PyObject* sql_str;
+    char* sql_cstr;
+
+    self->st = NULL;
+    self->in_use = 0;
+
+    if (PyString_Check(sql)) {
+        sql_str = sql;
+        Py_INCREF(sql_str);
+    } else if (PyUnicode_Check(sql)) {
+        sql_str = PyUnicode_AsUTF8String(sql);
+        if (!sql_str) {
+            rc = PYSQLITE_SQL_WRONG_TYPE;
+            return rc;
+        }
+    } else {
+        rc = PYSQLITE_SQL_WRONG_TYPE;
+        return rc;
+    }
+
+    self->sql = sql_str;
+
+    sql_cstr = PyString_AsString(sql_str);
+
+    rc = sqlite3_prepare(connection->db,
+                         sql_cstr,
+                         -1,
+                         &self->st,
+                         &tail);
+
+    self->db = connection->db;
+
+    if (rc == SQLITE_OK && check_remaining_sql(tail)) {
+        (void)sqlite3_finalize(self->st);
+        self->st = NULL;
+        rc = PYSQLITE_TOO_MUCH_SQL;
+    }
+
+    return rc;
+}
+
+int statement_bind_parameter(Statement* self, int pos, PyObject* parameter)
+{
+    int rc = SQLITE_OK;
+    long longval;
+#ifdef HAVE_LONG_LONG
+    PY_LONG_LONG longlongval;
+#endif
+    const char* buffer;
+    char* string;
+    Py_ssize_t buflen;
+    PyObject* stringval;
+
+    if (parameter == Py_None) {
+        rc = sqlite3_bind_null(self->st, pos);
+    } else if (PyInt_Check(parameter)) {
+        longval = PyInt_AsLong(parameter);
+        rc = sqlite3_bind_int64(self->st, pos, (sqlite_int64)longval);
+#ifdef HAVE_LONG_LONG
+    } else if (PyLong_Check(parameter)) {
+        longlongval = PyLong_AsLongLong(parameter);
+        /* in the overflow error case, longlongval is -1, and an exception is set */
+        rc = sqlite3_bind_int64(self->st, pos, (sqlite_int64)longlongval);
+#endif
+    } else if (PyFloat_Check(parameter)) {
+        rc = sqlite3_bind_double(self->st, pos, PyFloat_AsDouble(parameter));
+    } else if (PyBuffer_Check(parameter)) {
+        if (PyObject_AsCharBuffer(parameter, &buffer, &buflen) == 0) {
+            rc = sqlite3_bind_blob(self->st, pos, buffer, buflen, SQLITE_TRANSIENT);
+        } else {
+            PyErr_SetString(PyExc_ValueError, "could not convert BLOB to buffer");
+            rc = -1;
+        }
+    } else if PyString_Check(parameter) {
+        string = PyString_AsString(parameter);
+        rc = sqlite3_bind_text(self->st, pos, string, -1, SQLITE_TRANSIENT);
+    } else if PyUnicode_Check(parameter) {
+        stringval = PyUnicode_AsUTF8String(parameter);
+        string = PyString_AsString(stringval);
+        rc = sqlite3_bind_text(self->st, pos, string, -1, SQLITE_TRANSIENT);
+        Py_DECREF(stringval);
+    } else {
+        rc = -1;
+    }
+
+    return rc;
+}
+
+void statement_bind_parameters(Statement* self, PyObject* parameters)
+{
+    PyObject* current_param;
+    PyObject* adapted;
+    const char* binding_name;
+    int i;
+    int rc;
+    int num_params_needed;
+    int num_params;
+
+    Py_BEGIN_ALLOW_THREADS
+    num_params_needed = sqlite3_bind_parameter_count(self->st);
+    Py_END_ALLOW_THREADS
+
+    if (PyDict_Check(parameters)) {
+        /* parameters passed as dictionary */
+        for (i = 1; i <= num_params_needed; i++) {
+            Py_BEGIN_ALLOW_THREADS
+            binding_name = sqlite3_bind_parameter_name(self->st, i);
+            Py_END_ALLOW_THREADS
+            if (!binding_name) {
+                PyErr_Format(ProgrammingError, "Binding %d has no name, but you supplied a dictionary (which has only names).", i);
+                return;
+            }
+
+            binding_name++; /* skip first char (the colon) */
+            current_param = PyDict_GetItemString(parameters, binding_name);
+            if (!current_param) {
+                PyErr_Format(ProgrammingError, "You did not supply a value for binding %d.", i);
+                return;
+            }
+
+            Py_INCREF(current_param);
+            adapted = microprotocols_adapt(current_param, (PyObject*)&SQLitePrepareProtocolType, NULL);
+            if (adapted) {
+                Py_DECREF(current_param);
+            } else {
+                PyErr_Clear();
+                adapted = current_param;
+            }
+
+            rc = statement_bind_parameter(self, i, adapted);
+            Py_DECREF(adapted);
+
+            if (rc != SQLITE_OK) {
+                PyErr_Format(InterfaceError, "Error binding parameter :%s - probably unsupported type.", binding_name);
+                return;
+           }
+        }
+    } else {
+        /* parameters passed as sequence */
+        num_params = PySequence_Length(parameters);
+        if (num_params != num_params_needed) {
+            PyErr_Format(ProgrammingError, "Incorrect number of bindings supplied. The current statement uses %d, and there are %d supplied.",
+                         num_params_needed, num_params);
+            return;
+        }
+        for (i = 0; i < num_params; i++) {
+            current_param = PySequence_GetItem(parameters, i);
+            if (!current_param) {
+                return;
+            }
+            adapted = microprotocols_adapt(current_param, (PyObject*)&SQLitePrepareProtocolType, NULL);
+
+            if (adapted) {
+                Py_DECREF(current_param);
+            } else {
+                PyErr_Clear();
+                adapted = current_param;
+            }
+
+            rc = statement_bind_parameter(self, i + 1, adapted);
+            Py_DECREF(adapted);
+
+            if (rc != SQLITE_OK) {
+                PyErr_Format(InterfaceError, "Error binding parameter %d - probably unsupported type.", i);
+                return;
+            }
+        }
+    }
+}
+
+int statement_recompile(Statement* self, PyObject* params)
+{
+    const char* tail;
+    int rc;
+    char* sql_cstr;
+    sqlite3_stmt* new_st;
+
+    sql_cstr = PyString_AsString(self->sql);
+
+    rc = sqlite3_prepare(self->db,
+                         sql_cstr,
+                         -1,
+                         &new_st,
+                         &tail);
+
+    if (rc == SQLITE_OK) {
+        /* The efficient sqlite3_transfer_bindings is only available in SQLite
+         * version 3.2.2 or later. For older SQLite releases, that might not
+         * even define SQLITE_VERSION_NUMBER, we do it the manual way.
+         */
+        #ifdef SQLITE_VERSION_NUMBER
+        #if SQLITE_VERSION_NUMBER >= 3002002
+        (void)sqlite3_transfer_bindings(self->st, new_st);
+        #endif
+        #else
+        statement_bind_parameters(self, params);
+        #endif
+
+        (void)sqlite3_finalize(self->st);
+        self->st = new_st;
+    }
+
+    return rc;
+}
+
+int statement_finalize(Statement* self)
+{
+    int rc;
+
+    rc = SQLITE_OK;
+    if (self->st) {
+        Py_BEGIN_ALLOW_THREADS
+        rc = sqlite3_finalize(self->st);
+        Py_END_ALLOW_THREADS
+        self->st = NULL;
+    }
+
+    self->in_use = 0;
+
+    return rc;
+}
+
+int statement_reset(Statement* self)
+{
+    int rc;
+
+    rc = SQLITE_OK;
+
+    if (self->in_use && self->st) {
+        Py_BEGIN_ALLOW_THREADS
+        rc = sqlite3_reset(self->st);
+        Py_END_ALLOW_THREADS
+
+        if (rc == SQLITE_OK) {
+            self->in_use = 0;
+        }
+    }
+
+    return rc;
+}
+
+void statement_mark_dirty(Statement* self)
+{
+    self->in_use = 1;
+}
+
+void statement_dealloc(Statement* self)
+{
+    int rc;
+
+    if (self->st) {
+        Py_BEGIN_ALLOW_THREADS
+        rc = sqlite3_finalize(self->st);
+        Py_END_ALLOW_THREADS
+    }
+
+    self->st = NULL;
+
+    Py_XDECREF(self->sql);
+
+    self->ob_type->tp_free((PyObject*)self);
+}
+
+/*
+ * Checks if there is anything left in an SQL string after SQLite compiled it.
+ * This is used to check if somebody tried to execute more than one SQL command
+ * with one execute()/executemany() command, which the DB-API and we don't
+ * allow.
+ *
+ * Returns 1 if there is more left than should be. 0 if ok.
+ */
+int check_remaining_sql(const char* tail)
+{
+    const char* pos = tail;
+
+    parse_remaining_sql_state state = NORMAL;
+
+    for (;;) {
+        switch (*pos) {
+            case 0:
+                return 0;
+            case '-':
+                if (state == NORMAL) {
+                    state  = LINECOMMENT_1;
+                } else if (state == LINECOMMENT_1) {
+                    state = IN_LINECOMMENT;
+                }
+                break;
+            case ' ':
+            case '\t':
+                break;
+            case '\n':
+            case 13:
+                if (state == IN_LINECOMMENT) {
+                    state = NORMAL;
+                }
+                break;
+            case '/':
+                if (state == NORMAL) {
+                    state = COMMENTSTART_1;
+                } else if (state == COMMENTEND_1) {
+                    state = NORMAL;
+                } else if (state == COMMENTSTART_1) {
+                    return 1;
+                }
+                break;
+            case '*':
+                if (state == NORMAL) {
+                    return 1;
+                } else if (state == LINECOMMENT_1) {
+                    return 1;
+                } else if (state == COMMENTSTART_1) {
+                    state = IN_COMMENT;
+                } else if (state == IN_COMMENT) {
+                    state = COMMENTEND_1;
+                }
+                break;
+            default:
+                if (state == COMMENTEND_1) {
+                    state = IN_COMMENT;
+                } else if (state == IN_LINECOMMENT) {
+                } else if (state == IN_COMMENT) {
+                } else {
+                    return 1;
+                }
+        }
+
+        pos++;
+    }
+
+    return 0;
+}
+
+PyTypeObject StatementType = {
+        PyObject_HEAD_INIT(NULL)
+        0,                                              /* ob_size */
+        MODULE_NAME ".Statement",                       /* tp_name */
+        sizeof(Statement),                              /* tp_basicsize */
+        0,                                              /* tp_itemsize */
+        (destructor)statement_dealloc,                  /* tp_dealloc */
+        0,                                              /* tp_print */
+        0,                                              /* tp_getattr */
+        0,                                              /* tp_setattr */
+        0,                                              /* tp_compare */
+        0,                                              /* tp_repr */
+        0,                                              /* tp_as_number */
+        0,                                              /* tp_as_sequence */
+        0,                                              /* tp_as_mapping */
+        0,                                              /* tp_hash */
+        0,                                              /* tp_call */
+        0,                                              /* tp_str */
+        0,                                              /* tp_getattro */
+        0,                                              /* tp_setattro */
+        0,                                              /* tp_as_buffer */
+        Py_TPFLAGS_DEFAULT,                             /* tp_flags */
+        0,                                              /* tp_doc */
+        0,                                              /* tp_traverse */
+        0,                                              /* tp_clear */
+        0,                                              /* tp_richcompare */
+        0,                                              /* tp_weaklistoffset */
+        0,                                              /* tp_iter */
+        0,                                              /* tp_iternext */
+        0,                                              /* tp_methods */
+        0,                                              /* tp_members */
+        0,                                              /* tp_getset */
+        0,                                              /* tp_base */
+        0,                                              /* tp_dict */
+        0,                                              /* tp_descr_get */
+        0,                                              /* tp_descr_set */
+        0,                                              /* tp_dictoffset */
+        (initproc)0,                                    /* tp_init */
+        0,                                              /* tp_alloc */
+        0,                                              /* tp_new */
+        0                                               /* tp_free */
+};
+
+extern int statement_setup_types(void)
+{
+    StatementType.tp_new = PyType_GenericNew;
+    return PyType_Ready(&StatementType);
+}
diff --git a/Modules/_sqlite/statement.h b/Modules/_sqlite/statement.h
new file mode 100644
index 0000000..e45a0fc
--- /dev/null
+++ b/Modules/_sqlite/statement.h
@@ -0,0 +1,58 @@
+/* statement.h - definitions for the statement type
+ *
+ * Copyright (C) 2005 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_STATEMENT_H
+#define PYSQLITE_STATEMENT_H
+#include "Python.h"
+
+#include "connection.h"
+#include "sqlite3.h"
+
+#define PYSQLITE_TOO_MUCH_SQL (-100)
+#define PYSQLITE_SQL_WRONG_TYPE (-101)
+
+typedef struct
+{
+    PyObject_HEAD
+    sqlite3* db;
+    sqlite3_stmt* st;
+    PyObject* sql;
+    int in_use;
+} Statement;
+
+extern PyTypeObject StatementType;
+
+int statement_create(Statement* self, Connection* connection, PyObject* sql);
+void statement_dealloc(Statement* self);
+
+int statement_bind_parameter(Statement* self, int pos, PyObject* parameter);
+void statement_bind_parameters(Statement* self, PyObject* parameters);
+
+int statement_recompile(Statement* self, PyObject* parameters);
+int statement_finalize(Statement* self);
+int statement_reset(Statement* self);
+void statement_mark_dirty(Statement* self);
+
+int statement_setup_types(void);
+
+#endif
diff --git a/Modules/_sqlite/util.c b/Modules/_sqlite/util.c
new file mode 100644
index 0000000..33748a6
--- /dev/null
+++ b/Modules/_sqlite/util.c
@@ -0,0 +1,96 @@
+/* util.c - various utility functions
+ *
+ * Copyright (C) 2005-2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#include "module.h"
+#include "connection.h"
+
+int _sqlite_step_with_busyhandler(sqlite3_stmt* statement, Connection* connection
+)
+{
+    int rc;
+
+    Py_BEGIN_ALLOW_THREADS
+    rc = sqlite3_step(statement);
+    Py_END_ALLOW_THREADS
+
+    return rc;
+}
+
+/**
+ * Checks the SQLite error code and sets the appropriate DB-API exception.
+ * Returns the error code (0 means no error occured).
+ */
+int _seterror(sqlite3* db)
+{
+    int errorcode;
+
+    errorcode = sqlite3_errcode(db);
+
+    switch (errorcode)
+    {
+        case SQLITE_OK:
+            PyErr_Clear();
+            break;
+        case SQLITE_INTERNAL:
+        case SQLITE_NOTFOUND:
+            PyErr_SetString(InternalError, sqlite3_errmsg(db));
+            break;
+        case SQLITE_NOMEM:
+            (void)PyErr_NoMemory();
+            break;
+        case SQLITE_ERROR:
+        case SQLITE_PERM:
+        case SQLITE_ABORT:
+        case SQLITE_BUSY:
+        case SQLITE_LOCKED:
+        case SQLITE_READONLY:
+        case SQLITE_INTERRUPT:
+        case SQLITE_IOERR:
+        case SQLITE_FULL:
+        case SQLITE_CANTOPEN:
+        case SQLITE_PROTOCOL:
+        case SQLITE_EMPTY:
+        case SQLITE_SCHEMA:
+            PyErr_SetString(OperationalError, sqlite3_errmsg(db));
+            break;
+        case SQLITE_CORRUPT:
+            PyErr_SetString(DatabaseError, sqlite3_errmsg(db));
+            break;
+        case SQLITE_TOOBIG:
+            PyErr_SetString(DataError, sqlite3_errmsg(db));
+            break;
+        case SQLITE_CONSTRAINT:
+        case SQLITE_MISMATCH:
+            PyErr_SetString(IntegrityError, sqlite3_errmsg(db));
+            break;
+        case SQLITE_MISUSE:
+            PyErr_SetString(ProgrammingError, sqlite3_errmsg(db));
+            break;
+        default:
+            PyErr_SetString(DatabaseError, sqlite3_errmsg(db));
+            break;
+    }
+
+    return errorcode;
+}
+
diff --git a/Modules/_sqlite/util.h b/Modules/_sqlite/util.h
new file mode 100644
index 0000000..e99a4dd
--- /dev/null
+++ b/Modules/_sqlite/util.h
@@ -0,0 +1,38 @@
+/* util.h - various utility functions
+ *
+ * Copyright (C) 2005-2006 Gerhard Häring <gh@ghaering.de>
+ *
+ * This file is part of pysqlite.
+ *
+ * This software is provided 'as-is', without any express or implied
+ * warranty.  In no event will the authors be held liable for any damages
+ * arising from the use of this software.
+ *
+ * Permission is granted to anyone to use this software for any purpose,
+ * including commercial applications, and to alter it and redistribute it
+ * freely, subject to the following restrictions:
+ *
+ * 1. The origin of this software must not be misrepresented; you must not
+ *    claim that you wrote the original software. If you use this software
+ *    in a product, an acknowledgment in the product documentation would be
+ *    appreciated but is not required.
+ * 2. Altered source versions must be plainly marked as such, and must not be
+ *    misrepresented as being the original software.
+ * 3. This notice may not be removed or altered from any source distribution.
+ */
+
+#ifndef PYSQLITE_UTIL_H
+#define PYSQLITE_UTIL_H
+#include "Python.h"
+#include "pythread.h"
+#include "sqlite3.h"
+#include "connection.h"
+
+int _sqlite_step_with_busyhandler(sqlite3_stmt* statement, Connection* connection);
+
+/**
+ * Checks the SQLite error code and sets the appropriate DB-API exception.
+ * Returns the error code (0 means no error occured).
+ */
+int _seterror(sqlite3* db);
+#endif
diff --git a/Modules/_sre.c b/Modules/_sre.c
index 81223d7..4af08ed 100644
--- a/Modules/_sre.c
+++ b/Modules/_sre.c
@@ -275,7 +275,7 @@
             data_stack_dealloc(state);
             return SRE_ERROR_MEMORY;
         }
-        state->data_stack = stack;
+        state->data_stack = (char *)stack;
         state->data_stack_size = cursize;
     }
     return 0;
@@ -335,7 +335,7 @@
 {
     /* check if pointer is at given position */
 
-    int this, that;
+    int thisp, thatp;
 
     switch (at) {
 
@@ -362,57 +362,57 @@
     case SRE_AT_BOUNDARY:
         if (state->beginning == state->end)
             return 0;
-        that = ((void*) ptr > state->beginning) ?
+        thatp = ((void*) ptr > state->beginning) ?
             SRE_IS_WORD((int) ptr[-1]) : 0;
-        this = ((void*) ptr < state->end) ?
+        thisp = ((void*) ptr < state->end) ?
             SRE_IS_WORD((int) ptr[0]) : 0;
-        return this != that;
+        return thisp != thatp;
 
     case SRE_AT_NON_BOUNDARY:
         if (state->beginning == state->end)
             return 0;
-        that = ((void*) ptr > state->beginning) ?
+        thatp = ((void*) ptr > state->beginning) ?
             SRE_IS_WORD((int) ptr[-1]) : 0;
-        this = ((void*) ptr < state->end) ?
+        thisp = ((void*) ptr < state->end) ?
             SRE_IS_WORD((int) ptr[0]) : 0;
-        return this == that;
+        return thisp == thatp;
 
     case SRE_AT_LOC_BOUNDARY:
         if (state->beginning == state->end)
             return 0;
-        that = ((void*) ptr > state->beginning) ?
+        thatp = ((void*) ptr > state->beginning) ?
             SRE_LOC_IS_WORD((int) ptr[-1]) : 0;
-        this = ((void*) ptr < state->end) ?
+        thisp = ((void*) ptr < state->end) ?
             SRE_LOC_IS_WORD((int) ptr[0]) : 0;
-        return this != that;
+        return thisp != thatp;
 
     case SRE_AT_LOC_NON_BOUNDARY:
         if (state->beginning == state->end)
             return 0;
-        that = ((void*) ptr > state->beginning) ?
+        thatp = ((void*) ptr > state->beginning) ?
             SRE_LOC_IS_WORD((int) ptr[-1]) : 0;
-        this = ((void*) ptr < state->end) ?
+        thisp = ((void*) ptr < state->end) ?
             SRE_LOC_IS_WORD((int) ptr[0]) : 0;
-        return this == that;
+        return thisp == thatp;
 
 #if defined(HAVE_UNICODE)
     case SRE_AT_UNI_BOUNDARY:
         if (state->beginning == state->end)
             return 0;
-        that = ((void*) ptr > state->beginning) ?
+        thatp = ((void*) ptr > state->beginning) ?
             SRE_UNI_IS_WORD((int) ptr[-1]) : 0;
-        this = ((void*) ptr < state->end) ?
+        thisp = ((void*) ptr < state->end) ?
             SRE_UNI_IS_WORD((int) ptr[0]) : 0;
-        return this != that;
+        return thisp != thatp;
 
     case SRE_AT_UNI_NON_BOUNDARY:
         if (state->beginning == state->end)
             return 0;
-        that = ((void*) ptr > state->beginning) ?
+        thatp = ((void*) ptr > state->beginning) ?
             SRE_UNI_IS_WORD((int) ptr[-1]) : 0;
-        this = ((void*) ptr < state->end) ?
+        thisp = ((void*) ptr < state->end) ?
             SRE_UNI_IS_WORD((int) ptr[0]) : 0;
-        return this == that;
+        return thisp == thatp;
 #endif
 
     }
@@ -516,8 +516,8 @@
 SRE_COUNT(SRE_STATE* state, SRE_CODE* pattern, int maxcount)
 {
     SRE_CODE chr;
-    SRE_CHAR* ptr = state->ptr;
-    SRE_CHAR* end = state->end;
+    SRE_CHAR* ptr = (SRE_CHAR *)state->ptr;
+    SRE_CHAR* end = (SRE_CHAR *)state->end;
     int i;
 
     /* adjust end */
@@ -803,7 +803,7 @@
 LOCAL(int)
 SRE_MATCH(SRE_STATE* state, SRE_CODE* pattern)
 {
-    SRE_CHAR* end = state->end;
+    SRE_CHAR* end = (SRE_CHAR *)state->end;
     int alloc_pos, ctx_pos = -1;
     int i, ret = 0;
     int jump;
@@ -821,7 +821,7 @@
 
 entrance:
 
-    ctx->ptr = state->ptr;
+    ctx->ptr = (SRE_CHAR *)state->ptr;
 
     if (ctx->pattern[0] == SRE_OP_INFO) {
         /* optimization info block */
@@ -1477,8 +1477,8 @@
 LOCAL(int)
 SRE_SEARCH(SRE_STATE* state, SRE_CODE* pattern)
 {
-    SRE_CHAR* ptr = state->start;
-    SRE_CHAR* end = state->end;
+    SRE_CHAR* ptr = (SRE_CHAR *)state->start;
+    SRE_CHAR* end = (SRE_CHAR *)state->end;
     int status = 0;
     int prefix_len = 0;
     int prefix_skip = 0;
@@ -1524,7 +1524,7 @@
         /* pattern starts with a known prefix.  use the overlap
            table to skip forward as fast as we possibly can */
         int i = 0;
-        end = state->end;
+        end = (SRE_CHAR *)state->end;
         while (ptr < end) {
             for (;;) {
                 if ((SRE_CODE) ptr[0] != prefix[i]) {
@@ -1559,7 +1559,7 @@
         /* pattern starts with a literal character.  this is used
            for short prefixes, and if fast search is disabled */
         SRE_CODE chr = pattern[1];
-        end = state->end;
+        end = (SRE_CHAR *)state->end;
         for (;;) {
             while (ptr < end && (SRE_CODE) ptr[0] != chr)
                 ptr++;
@@ -1576,7 +1576,7 @@
         }
     } else if (charset) {
         /* pattern starts with a character from a known set */
-        end = state->end;
+        end = (SRE_CHAR *)state->end;
         for (;;) {
             while (ptr < end && !SRE_CHARSET(charset, ptr[0]))
                 ptr++;
@@ -1619,72 +1619,8 @@
 /* factories and destructors */
 
 /* see sre.h for object declarations */
-
-static PyTypeObject Pattern_Type;
-static PyTypeObject Match_Type;
-static PyTypeObject Scanner_Type;
-
-static PyObject *
-_compile(PyObject* self_, PyObject* args)
-{
-    /* "compile" pattern descriptor to pattern object */
-
-    PatternObject* self;
-    int i, n;
-
-    PyObject* pattern;
-    int flags = 0;
-    PyObject* code;
-    int groups = 0;
-    PyObject* groupindex = NULL;
-    PyObject* indexgroup = NULL;
-    if (!PyArg_ParseTuple(args, "OiO!|iOO", &pattern, &flags,
-                          &PyList_Type, &code, &groups,
-                          &groupindex, &indexgroup))
-        return NULL;
-
-    n = PyList_GET_SIZE(code);
-
-    self = PyObject_NEW_VAR(PatternObject, &Pattern_Type, n);
-    if (!self)
-        return NULL;
-
-    self->codesize = n;
-
-    for (i = 0; i < n; i++) {
-        PyObject *o = PyList_GET_ITEM(code, i);
-        unsigned long value = PyInt_Check(o) ? (unsigned long)PyInt_AsLong(o)
-                                              : PyLong_AsUnsignedLong(o);
-        self->code[i] = (SRE_CODE) value;
-        if ((unsigned long) self->code[i] != value) {
-            PyErr_SetString(PyExc_OverflowError,
-                            "regular expression code size limit exceeded");
-            break;
-        }
-    }
-
-    if (PyErr_Occurred()) {
-        PyObject_DEL(self);
-        return NULL;
-    }
-
-    Py_INCREF(pattern);
-    self->pattern = pattern;
-
-    self->flags = flags;
-
-    self->groups = groups;
-
-    Py_XINCREF(groupindex);
-    self->groupindex = groupindex;
-
-    Py_XINCREF(indexgroup);
-    self->indexgroup = indexgroup;
-
-    self->weakreflist = NULL;
-
-    return (PyObject*) self;
-}
+static PyObject*pattern_new_match(PatternObject*, SRE_STATE*, int);
+static PyObject*pattern_scanner(PatternObject*, PyObject*);
 
 static PyObject *
 sre_codesize(PyObject* self, PyObject* args)
@@ -1900,98 +1836,6 @@
     }
 }
 
-static PyObject*
-pattern_new_match(PatternObject* pattern, SRE_STATE* state, int status)
-{
-    /* create match object (from state object) */
-
-    MatchObject* match;
-    int i, j;
-    char* base;
-    int n;
-
-    if (status > 0) {
-
-        /* create match object (with room for extra group marks) */
-        match = PyObject_NEW_VAR(MatchObject, &Match_Type,
-                                 2*(pattern->groups+1));
-        if (!match)
-            return NULL;
-
-        Py_INCREF(pattern);
-        match->pattern = pattern;
-
-        Py_INCREF(state->string);
-        match->string = state->string;
-
-        match->regs = NULL;
-        match->groups = pattern->groups+1;
-
-        /* fill in group slices */
-
-        base = (char*) state->beginning;
-        n = state->charsize;
-
-        match->mark[0] = ((char*) state->start - base) / n;
-        match->mark[1] = ((char*) state->ptr - base) / n;
-
-        for (i = j = 0; i < pattern->groups; i++, j+=2)
-            if (j+1 <= state->lastmark && state->mark[j] && state->mark[j+1]) {
-                match->mark[j+2] = ((char*) state->mark[j] - base) / n;
-                match->mark[j+3] = ((char*) state->mark[j+1] - base) / n;
-            } else
-                match->mark[j+2] = match->mark[j+3] = -1; /* undefined */
-
-        match->pos = state->pos;
-        match->endpos = state->endpos;
-
-        match->lastindex = state->lastindex;
-
-        return (PyObject*) match;
-
-    } else if (status == 0) {
-
-        /* no match */
-        Py_INCREF(Py_None);
-        return Py_None;
-
-    }
-
-    /* internal error */
-    pattern_error(status);
-    return NULL;
-}
-
-static PyObject*
-pattern_scanner(PatternObject* pattern, PyObject* args)
-{
-    /* create search state object */
-
-    ScannerObject* self;
-
-    PyObject* string;
-    int start = 0;
-    int end = INT_MAX;
-    if (!PyArg_ParseTuple(args, "O|ii:scanner", &string, &start, &end))
-        return NULL;
-
-    /* create scanner object */
-    self = PyObject_NEW(ScannerObject, &Scanner_Type);
-    if (!self)
-        return NULL;
-
-    string = state_init(&self->state, pattern, string, start, end);
-    if (!string) {
-        PyObject_DEL(self);
-        return NULL;
-    }
-
-    Py_INCREF(pattern);
-    self->pattern = (PyObject*) pattern;
-
-    return (PyObject*) self;
-}
-
 static void
 pattern_dealloc(PatternObject* self)
 {
@@ -2414,7 +2258,7 @@
 }
 
 static PyObject*
-pattern_subx(PatternObject* self, PyObject* template, PyObject* string,
+pattern_subx(PatternObject* self, PyObject* ptemplate, PyObject* string,
              int count, int subn)
 {
     SRE_STATE state;
@@ -2429,21 +2273,21 @@
     int i, b, e;
     int filter_is_callable;
 
-    if (PyCallable_Check(template)) {
+    if (PyCallable_Check(ptemplate)) {
         /* sub/subn takes either a function or a template */
-        filter = template;
+        filter = ptemplate;
         Py_INCREF(filter);
         filter_is_callable = 1;
     } else {
         /* if not callable, check if it's a literal string */
         int literal;
-        ptr = getstring(template, &n, &b);
+        ptr = getstring(ptemplate, &n, &b);
         if (ptr) {
             if (b == 1) {
-                literal = sre_literal_template(ptr, n);
+		    literal = sre_literal_template((unsigned char *)ptr, n);
             } else {
 #if defined(HAVE_UNICODE)
-                literal = sre_uliteral_template(ptr, n);
+		    literal = sre_uliteral_template((Py_UNICODE *)ptr, n);
 #endif
             }
         } else {
@@ -2451,14 +2295,14 @@
             literal = 0;
         }
         if (literal) {
-            filter = template;
+            filter = ptemplate;
             Py_INCREF(filter);
             filter_is_callable = 0;
         } else {
             /* not a literal; hand it over to the template compiler */
             filter = call(
                 SRE_PY_MODULE, "_subx",
-                PyTuple_Pack(2, self, template)
+                PyTuple_Pack(2, self, ptemplate)
                 );
             if (!filter)
                 return NULL;
@@ -2597,29 +2441,29 @@
 static PyObject*
 pattern_sub(PatternObject* self, PyObject* args, PyObject* kw)
 {
-    PyObject* template;
+    PyObject* ptemplate;
     PyObject* string;
     int count = 0;
     static char* kwlist[] = { "repl", "string", "count", NULL };
     if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|i:sub", kwlist,
-                                     &template, &string, &count))
+                                     &ptemplate, &string, &count))
         return NULL;
 
-    return pattern_subx(self, template, string, count, 0);
+    return pattern_subx(self, ptemplate, string, count, 0);
 }
 
 static PyObject*
 pattern_subn(PatternObject* self, PyObject* args, PyObject* kw)
 {
-    PyObject* template;
+    PyObject* ptemplate;
     PyObject* string;
     int count = 0;
     static char* kwlist[] = { "repl", "string", "count", NULL };
     if (!PyArg_ParseTupleAndKeywords(args, kw, "OO|i:subn", kwlist,
-                                     &template, &string, &count))
+                                     &ptemplate, &string, &count))
         return NULL;
 
-    return pattern_subx(self, template, string, count, 1);
+    return pattern_subx(self, ptemplate, string, count, 1);
 }
 
 static PyObject*
@@ -2799,6 +2643,68 @@
     offsetof(PatternObject, weakreflist),	/* tp_weaklistoffset */
 };
 
+static PyObject *
+_compile(PyObject* self_, PyObject* args)
+{
+    /* "compile" pattern descriptor to pattern object */
+
+    PatternObject* self;
+    int i, n;
+
+    PyObject* pattern;
+    int flags = 0;
+    PyObject* code;
+    int groups = 0;
+    PyObject* groupindex = NULL;
+    PyObject* indexgroup = NULL;
+    if (!PyArg_ParseTuple(args, "OiO!|iOO", &pattern, &flags,
+                          &PyList_Type, &code, &groups,
+                          &groupindex, &indexgroup))
+        return NULL;
+
+    n = PyList_GET_SIZE(code);
+
+    self = PyObject_NEW_VAR(PatternObject, &Pattern_Type, n);
+    if (!self)
+        return NULL;
+
+    self->codesize = n;
+
+    for (i = 0; i < n; i++) {
+        PyObject *o = PyList_GET_ITEM(code, i);
+        unsigned long value = PyInt_Check(o) ? (unsigned long)PyInt_AsLong(o)
+                                              : PyLong_AsUnsignedLong(o);
+        self->code[i] = (SRE_CODE) value;
+        if ((unsigned long) self->code[i] != value) {
+            PyErr_SetString(PyExc_OverflowError,
+                            "regular expression code size limit exceeded");
+            break;
+        }
+    }
+
+    if (PyErr_Occurred()) {
+        PyObject_DEL(self);
+        return NULL;
+    }
+
+    Py_INCREF(pattern);
+    self->pattern = pattern;
+
+    self->flags = flags;
+
+    self->groups = groups;
+
+    Py_XINCREF(groupindex);
+    self->groupindex = groupindex;
+
+    Py_XINCREF(indexgroup);
+    self->indexgroup = indexgroup;
+
+    self->weakreflist = NULL;
+
+    return (PyObject*) self;
+}
+
 /* -------------------------------------------------------------------- */
 /* match methods */
 
@@ -2868,14 +2774,14 @@
 static PyObject*
 match_expand(MatchObject* self, PyObject* args)
 {
-    PyObject* template;
-    if (!PyArg_ParseTuple(args, "O:expand", &template))
+    PyObject* ptemplate;
+    if (!PyArg_ParseTuple(args, "O:expand", &ptemplate))
         return NULL;
 
     /* delegate to Python code */
     return call(
         SRE_PY_MODULE, "_expand",
-        PyTuple_Pack(3, self->pattern, self, template)
+        PyTuple_Pack(3, self->pattern, self, ptemplate)
         );
 }
 
@@ -3262,6 +3168,69 @@
     (getattrfunc)match_getattr /*tp_getattr*/
 };
 
+static PyObject*
+pattern_new_match(PatternObject* pattern, SRE_STATE* state, int status)
+{
+    /* create match object (from state object) */
+
+    MatchObject* match;
+    int i, j;
+    char* base;
+    int n;
+
+    if (status > 0) {
+
+        /* create match object (with room for extra group marks) */
+        match = PyObject_NEW_VAR(MatchObject, &Match_Type,
+                                 2*(pattern->groups+1));
+        if (!match)
+            return NULL;
+
+        Py_INCREF(pattern);
+        match->pattern = pattern;
+
+        Py_INCREF(state->string);
+        match->string = state->string;
+
+        match->regs = NULL;
+        match->groups = pattern->groups+1;
+
+        /* fill in group slices */
+
+        base = (char*) state->beginning;
+        n = state->charsize;
+
+        match->mark[0] = ((char*) state->start - base) / n;
+        match->mark[1] = ((char*) state->ptr - base) / n;
+
+        for (i = j = 0; i < pattern->groups; i++, j+=2)
+            if (j+1 <= state->lastmark && state->mark[j] && state->mark[j+1]) {
+                match->mark[j+2] = ((char*) state->mark[j] - base) / n;
+                match->mark[j+3] = ((char*) state->mark[j+1] - base) / n;
+            } else
+                match->mark[j+2] = match->mark[j+3] = -1; /* undefined */
+
+        match->pos = state->pos;
+        match->endpos = state->endpos;
+
+        match->lastindex = state->lastindex;
+
+        return (PyObject*) match;
+
+    } else if (status == 0) {
+
+        /* no match */
+        Py_INCREF(Py_None);
+        return Py_None;
+
+    }
+
+    /* internal error */
+    pattern_error(status);
+    return NULL;
+}
+
+
 /* -------------------------------------------------------------------- */
 /* scanner methods (experimental) */
 
@@ -3372,6 +3341,36 @@
     (getattrfunc)scanner_getattr, /*tp_getattr*/
 };
 
+static PyObject*
+pattern_scanner(PatternObject* pattern, PyObject* args)
+{
+    /* create search state object */
+
+    ScannerObject* self;
+
+    PyObject* string;
+    int start = 0;
+    int end = INT_MAX;
+    if (!PyArg_ParseTuple(args, "O|ii:scanner", &string, &start, &end))
+        return NULL;
+
+    /* create scanner object */
+    self = PyObject_NEW(ScannerObject, &Scanner_Type);
+    if (!self)
+        return NULL;
+
+    string = state_init(&self->state, pattern, string, start, end);
+    if (!string) {
+        PyObject_DEL(self);
+        return NULL;
+    }
+
+    Py_INCREF(pattern);
+    self->pattern = (PyObject*) pattern;
+
+    return (PyObject*) self;
+}
+
 static PyMethodDef _functions[] = {
     {"compile", _compile, METH_VARARGS},
     {"getcodesize", sre_codesize, METH_VARARGS},
diff --git a/Modules/_ssl.c b/Modules/_ssl.c
index 5f541f5..4c0da6f 100644
--- a/Modules/_ssl.c
+++ b/Modules/_ssl.c
@@ -55,7 +55,6 @@
 	SSL_CTX* 	ctx;
 	SSL*     	ssl;
 	X509*    	server_cert;
-	BIO*		sbio;
 	char    	server[X509_NAME_MAXLEN];
 	char		issuer[X509_NAME_MAXLEN];
 
@@ -474,15 +473,22 @@
 
 	if (!(buf = PyString_FromStringAndSize((char *) 0, len)))
 		return NULL;
+	
+	/* first check if there are bytes ready to be read */
+	Py_BEGIN_ALLOW_THREADS
+	count = SSL_pending(self->ssl);
+	Py_END_ALLOW_THREADS
 
-	sockstate = check_socket_and_wait_for_timeout(self->Socket, 0);
-	if (sockstate == SOCKET_HAS_TIMED_OUT) {
-		PyErr_SetString(PySSLErrorObject, "The read operation timed out");
-		Py_DECREF(buf);
-		return NULL;
-	} else if (sockstate == SOCKET_TOO_LARGE_FOR_SELECT) {
-		PyErr_SetString(PySSLErrorObject, "Underlying socket too large for select().");
-		return NULL;
+	if (!count) {
+		sockstate = check_socket_and_wait_for_timeout(self->Socket, 0);
+		if (sockstate == SOCKET_HAS_TIMED_OUT) {
+			PyErr_SetString(PySSLErrorObject, "The read operation timed out");
+			Py_DECREF(buf);
+			return NULL;
+		} else if (sockstate == SOCKET_TOO_LARGE_FOR_SELECT) {
+			PyErr_SetString(PySSLErrorObject, "Underlying socket too large for select().");
+			return NULL;
+		}
 	}
 	do {
 		err = 0;
diff --git a/Modules/_testcapimodule.c b/Modules/_testcapimodule.c
index 6d8ea3c..e8881dc 100644
--- a/Modules/_testcapimodule.c
+++ b/Modules/_testcapimodule.c
@@ -10,7 +10,6 @@
 #ifdef WITH_THREAD
 #include "pythread.h"
 #endif /* WITH_THREAD */
-
 static PyObject *TestError;	/* set to exception object in init */
 
 /* Raise TestError with test_name + ": " + msg, and return NULL. */
@@ -235,7 +234,7 @@
 #include "testcapi_long.h"
 
 static PyObject *
-test_longlong_api(PyObject* self)
+test_longlong_api(PyObject* self, PyObject *args)
 {
 	return TESTNAME(raise_test_longlong_error);
 }
@@ -361,6 +360,15 @@
 	return PyLong_FromLong(value);
 }
 
+static PyObject *
+getargs_n(PyObject *self, PyObject *args)
+{
+	Py_ssize_t value;
+	if (!PyArg_ParseTuple(args, "n", &value))
+	return NULL;
+	return PyInt_FromSsize_t(value);
+}
+
 #ifdef HAVE_LONG_LONG
 static PyObject *
 getargs_L(PyObject *self, PyObject *args)
@@ -405,7 +413,7 @@
 
         PyTuple_SET_ITEM(tuple, 0, num);
 
-        value = -1;
+        value = 0;
         if (PyArg_ParseTuple(tuple, "k:test_k_code", &value) < 0)
         	return NULL;
         if (value != ULONG_MAX)
@@ -424,7 +432,7 @@
 
         PyTuple_SET_ITEM(tuple, 0, num);
 
-	value = -1;
+	value = 0;
         if (PyArg_ParseTuple(tuple, "k:test_k_code", &value) < 0)
         	return NULL;
         if (value != (unsigned long)-0x42)
@@ -478,6 +486,26 @@
 	return Py_None;
 }
 
+static
+PyObject *codec_incrementalencoder(PyObject *self, PyObject *args)
+{
+	const char *encoding, *errors = NULL;
+	if (!PyArg_ParseTuple(args, "s|s:test_incrementalencoder",
+			      &encoding, &errors))
+		return NULL;
+	return PyCodec_IncrementalEncoder(encoding, errors);
+}
+
+static
+PyObject *codec_incrementaldecoder(PyObject *self, PyObject *args)
+{
+	const char *encoding, *errors = NULL;
+	if (!PyArg_ParseTuple(args, "s|s:test_incrementaldecoder",
+			      &encoding, &errors))
+		return NULL;
+	return PyCodec_IncrementalDecoder(encoding, errors);
+}
+
 #endif
 
 /* Simple test of _PyLong_NumBits and _PyLong_Sign. */
@@ -563,7 +591,17 @@
 
 #ifdef WITH_THREAD
 
-void _make_call(void *callable)
+/* test_thread_state spawns a thread of its own, and that thread releases
+ * `thread_done` when it's finished.  The driver code has to know when the
+ * thread finishes, because the thread uses a PyObject (the callable) that
+ * may go away when the driver finishes.  The former lack of this explicit
+ * synchronization caused rare segfaults, so rare that they were seen only
+ * on a Mac buildbot (although they were possible on any box).
+ */
+static PyThread_type_lock thread_done = NULL;
+
+static void
+_make_call(void *callable)
 {
 	PyObject *rc;
 	PyGILState_STATE s = PyGILState_Ensure();
@@ -572,32 +610,53 @@
 	PyGILState_Release(s);
 }
 
+/* Same thing, but releases `thread_done` when it returns.  This variant
+ * should be called only from threads spawned by test_thread_state().
+ */
+static void
+_make_call_from_thread(void *callable)
+{
+	_make_call(callable);
+	PyThread_release_lock(thread_done);
+}
+
 static PyObject *
 test_thread_state(PyObject *self, PyObject *args)
 {
 	PyObject *fn;
+
 	if (!PyArg_ParseTuple(args, "O:test_thread_state", &fn))
 		return NULL;
-	/* Ensure Python is setup for threading */
+
+	/* Ensure Python is set up for threading */
 	PyEval_InitThreads();
-	/* Start a new thread for our callback. */
-	PyThread_start_new_thread( _make_call, fn);
+	thread_done = PyThread_allocate_lock();
+	if (thread_done == NULL)
+		return PyErr_NoMemory();
+	PyThread_acquire_lock(thread_done, 1);
+
+	/* Start a new thread with our callback. */
+	PyThread_start_new_thread(_make_call_from_thread, fn);
 	/* Make the callback with the thread lock held by this thread */
 	_make_call(fn);
 	/* Do it all again, but this time with the thread-lock released */
 	Py_BEGIN_ALLOW_THREADS
 	_make_call(fn);
+	PyThread_acquire_lock(thread_done, 1);  /* wait for thread to finish */
 	Py_END_ALLOW_THREADS
+
 	/* And once more with and without a thread
-	   XXX - should use a lock and work out exactly what we are trying 
-	   to test <wink> 
+	   XXX - should use a lock and work out exactly what we are trying
+	   to test <wink>
 	*/
 	Py_BEGIN_ALLOW_THREADS
-	PyThread_start_new_thread( _make_call, fn);
+	PyThread_start_new_thread(_make_call_from_thread, fn);
 	_make_call(fn);
+	PyThread_acquire_lock(thread_done, 1);  /* wait for thread to finish */
 	Py_END_ALLOW_THREADS
-	Py_INCREF(Py_None);
-	return Py_None;
+
+	PyThread_free_lock(thread_done);
+	Py_RETURN_NONE;
 }
 #endif
 
@@ -611,24 +670,29 @@
 	{"test_k_code",		(PyCFunction)test_k_code,	 METH_NOARGS},
 	{"test_null_strings",	(PyCFunction)test_null_strings,	 METH_NOARGS},
 
-	{"getargs_b",		(PyCFunction)getargs_b,		 METH_VARARGS},
-	{"getargs_B",		(PyCFunction)getargs_B,		 METH_VARARGS},
-	{"getargs_H",		(PyCFunction)getargs_H,		 METH_VARARGS},
-	{"getargs_I",		(PyCFunction)getargs_I,		 METH_VARARGS},
-	{"getargs_k",		(PyCFunction)getargs_k,		 METH_VARARGS},
-	{"getargs_i",		(PyCFunction)getargs_i,		 METH_VARARGS},
-	{"getargs_l",		(PyCFunction)getargs_l,		 METH_VARARGS},
+	{"getargs_b",		getargs_b,			 METH_VARARGS},
+	{"getargs_B",		getargs_B,			 METH_VARARGS},
+	{"getargs_H",		getargs_H,			 METH_VARARGS},
+	{"getargs_I",		getargs_I,			 METH_VARARGS},
+	{"getargs_k",		getargs_k,			 METH_VARARGS},
+	{"getargs_i",		getargs_i,			 METH_VARARGS},
+	{"getargs_l",		getargs_l,			 METH_VARARGS},
+	{"getargs_n",		getargs_n, 			 METH_VARARGS},
 #ifdef HAVE_LONG_LONG
-	{"getargs_L",		(PyCFunction)getargs_L,		 METH_VARARGS},
-	{"getargs_K",		(PyCFunction)getargs_K,		 METH_VARARGS},
-	{"test_longlong_api",	(PyCFunction)test_longlong_api,	 METH_NOARGS},
+	{"getargs_L",		getargs_L,			 METH_VARARGS},
+	{"getargs_K",		getargs_K,			 METH_VARARGS},
+	{"test_longlong_api",	test_longlong_api,		 METH_NOARGS},
 	{"test_L_code",		(PyCFunction)test_L_code,	 METH_NOARGS},
+	{"codec_incrementalencoder",
+	 (PyCFunction)codec_incrementalencoder,	 METH_VARARGS},
+	{"codec_incrementaldecoder",
+	 (PyCFunction)codec_incrementaldecoder,	 METH_VARARGS},
 #endif
 #ifdef Py_USING_UNICODE
 	{"test_u_code",		(PyCFunction)test_u_code,	 METH_NOARGS},
 #endif
 #ifdef WITH_THREAD
-	{"_test_thread_state", (PyCFunction)test_thread_state, METH_VARARGS},
+	{"_test_thread_state",  test_thread_state, 		 METH_VARARGS},
 #endif
 	{NULL, NULL} /* sentinel */
 };
@@ -650,8 +714,10 @@
 	PyModule_AddObject(m, "ULONG_MAX", PyLong_FromUnsignedLong(ULONG_MAX));
 	PyModule_AddObject(m, "INT_MIN", PyInt_FromLong(INT_MIN));
 	PyModule_AddObject(m, "LONG_MIN", PyInt_FromLong(LONG_MIN));
+	PyModule_AddObject(m, "PY_SSIZE_T_MIN", PyInt_FromSsize_t(PY_SSIZE_T_MIN));
 	PyModule_AddObject(m, "INT_MAX", PyInt_FromLong(INT_MAX));
 	PyModule_AddObject(m, "LONG_MAX", PyInt_FromLong(LONG_MAX));
+	PyModule_AddObject(m, "PY_SSIZE_T_MAX", PyInt_FromSsize_t(PY_SSIZE_T_MAX));
 
 	TestError = PyErr_NewException("_testcapi.error", NULL, NULL);
 	Py_INCREF(TestError);
diff --git a/Modules/_tkinter.c b/Modules/_tkinter.c
index e8efaa7..c17b6c6 100644
--- a/Modules/_tkinter.c
+++ b/Modules/_tkinter.c
@@ -647,7 +647,7 @@
 
 	strcpy(argv0, className);
 	if (isupper(Py_CHARMASK(argv0[0])))
-		argv0[0] = tolower(argv0[0]);
+		argv0[0] = tolower(Py_CHARMASK(argv0[0]));
 	Tcl_SetVar(v->interp, "argv0", argv0, TCL_GLOBAL_ONLY);
 	ckfree(argv0);
 
diff --git a/Modules/almodule.c b/Modules/almodule.c
index 5254fca..fbeb13a 100644
--- a/Modules/almodule.c
+++ b/Modules/almodule.c
@@ -1482,7 +1482,8 @@
 	}
 	if (alGetParams(resource, pvs, npvs) < 0)
 		goto error;
-	v = PyList_New(npvs);
+	if (!(v = PyList_New(npvs)))
+		goto error;
 	for (i = 0; i < npvs; i++) {
 		if (pvs[i].sizeOut < 0) {
 			char buf[32];
@@ -1692,6 +1693,7 @@
 	if (alGetParamInfo(res, param, &pinfo) < 0)
 		return NULL;
 	v = PyDict_New();
+	if (!v) return NULL;
 
 	item = PyInt_FromLong((long) pinfo.resource);
 	PyDict_SetItemString(v, "resource", item);
diff --git a/Modules/arraymodule.c b/Modules/arraymodule.c
index 1650ff2..52a7f5e 100644
--- a/Modules/arraymodule.c
+++ b/Modules/arraymodule.c
@@ -186,7 +186,8 @@
 	if (!PyArg_Parse(v, "u#;array item must be unicode character", &p, &len))
 		return -1;
 	if (len != 1) {
-		PyErr_SetString(PyExc_TypeError, "array item must be unicode character");
+		PyErr_SetString(PyExc_TypeError,
+				"array item must be unicode character");
 		return -1;
 	}
 	if (i >= 0)
@@ -1163,7 +1164,7 @@
 	register char *p, *q;
 	/* little buffer to hold items while swapping */
 	char tmp[256];	/* 8 is probably enough -- but why skimp */
-	assert(itemsize <= sizeof(tmp));
+	assert((size_t)itemsize <= sizeof(tmp));
 
 	if (self->ob_size > 1) {
 		for (p = self->ob_item,
@@ -1673,7 +1674,8 @@
 			}
 
 			self->ob_size -= slicelength;
-			self->ob_item = PyMem_REALLOC(self->ob_item, itemsize*self->ob_size);
+			self->ob_item = (char *)PyMem_REALLOC(self->ob_item,
+							      itemsize*self->ob_size);
 			self->allocated = self->ob_size;
 
 			return 0;
@@ -1865,7 +1867,7 @@
 				if (n > 0) {
 					arrayobject *self = (arrayobject *)a;
 					char *item = self->ob_item;
-					item = PyMem_Realloc(item, n);
+					item = (char *)PyMem_Realloc(item, n);
 					if (item == NULL) {
 						PyErr_NoMemory();
 						Py_DECREF(a);
@@ -2060,8 +2062,7 @@
 static int
 arrayiter_traverse(arrayiterobject *it, visitproc visit, void *arg)
 {
-	if (it->ao != NULL)
-		return visit((PyObject *)(it->ao), arg);
+	Py_VISIT(it->ao);
 	return 0;
 }
 
diff --git a/Modules/audioop.c b/Modules/audioop.c
index beeacd3..ed70cdf 100644
--- a/Modules/audioop.c
+++ b/Modules/audioop.c
@@ -15,6 +15,8 @@
 #endif
 #endif
 
+typedef short PyInt16;
+
 #if defined(__CHAR_UNSIGNED__)
 #if defined(signed)
 /* This module currently does not work on systems where only unsigned
@@ -22,122 +24,267 @@
 #endif
 #endif
 
-/* Code shamelessly stolen from sox,
+/* Code shamelessly stolen from sox, 12.17.7, g711.c
 ** (c) Craig Reese, Joe Campbell and Jeff Poskanzer 1989 */
 
-#define MINLIN -32768
-#define MAXLIN 32767
-#define LINCLIP(x) do { if ( x < MINLIN ) x = MINLIN ; \
-                        else if ( x > MAXLIN ) x = MAXLIN; \
-                      } while ( 0 )
-
-static unsigned char st_linear_to_ulaw(int sample);
-
-/*
-** This macro converts from ulaw to 16 bit linear, faster.
-**
-** Jef Poskanzer
-** 23 October 1989
-**
-** Input: 8 bit ulaw sample
-** Output: signed 16 bit linear sample
-*/
-#define st_ulaw_to_linear(ulawbyte) ulaw_table[ulawbyte]
-
-static int ulaw_table[256] = {
-	-32124, -31100, -30076, -29052, -28028, -27004, -25980, -24956,
-	-23932, -22908, -21884, -20860, -19836, -18812, -17788, -16764,
-	-15996, -15484, -14972, -14460, -13948, -13436, -12924, -12412,
-	-11900, -11388, -10876, -10364,  -9852,  -9340,  -8828,  -8316,
-	-7932,  -7676,  -7420,  -7164,  -6908,  -6652,  -6396,  -6140,
-	-5884,  -5628,  -5372,  -5116,  -4860,  -4604,  -4348,  -4092,
-	-3900,  -3772,  -3644,  -3516,  -3388,  -3260,  -3132,  -3004,
-	-2876,  -2748,  -2620,  -2492,  -2364,  -2236,  -2108,  -1980,
-	-1884,  -1820,  -1756,  -1692,  -1628,  -1564,  -1500,  -1436,
-	-1372,  -1308,  -1244,  -1180,  -1116,  -1052,   -988,   -924,
-	-876,   -844,   -812,   -780,   -748,   -716,   -684,   -652,
-	-620,   -588,   -556,   -524,   -492,   -460,   -428,   -396,
-	-372,   -356,   -340,   -324,   -308,   -292,   -276,   -260,
-	-244,   -228,   -212,   -196,   -180,   -164,   -148,   -132,
-	-120,   -112,   -104,    -96,    -88,    -80,    -72,    -64,
-	-56,    -48,    -40,    -32,    -24,    -16,     -8,      0,
-	32124,  31100,  30076,  29052,  28028,  27004,  25980,  24956,
-	23932,  22908,  21884,  20860,  19836,  18812,  17788,  16764,
-	15996,  15484,  14972,  14460,  13948,  13436,  12924,  12412,
-	11900,  11388,  10876,  10364,   9852,   9340,   8828,   8316,
-	7932,   7676,   7420,   7164,   6908,   6652,   6396,   6140,
-	5884,   5628,   5372,   5116,   4860,   4604,   4348,   4092,
-	3900,   3772,   3644,   3516,   3388,   3260,   3132,   3004,
-	2876,   2748,   2620,   2492,   2364,   2236,   2108,   1980,
-	1884,   1820,   1756,   1692,   1628,   1564,   1500,   1436,
-	1372,   1308,   1244,   1180,   1116,   1052,    988,    924,
-	876,    844,    812,    780,    748,    716,    684,    652,
-	620,    588,    556,    524,    492,    460,    428,    396,
-	372,    356,    340,    324,    308,    292,    276,    260,
-	244,    228,    212,    196,    180,    164,    148,    132,
-	120,    112,    104,     96,     88,     80,     72,     64,
-	56,     48,     40,     32,     24,     16,      8,      0 };
-
-/* #define ZEROTRAP */   /* turn on the trap as per the MIL-STD */
+/* From g711.c:
+ *
+ * December 30, 1994:
+ * Functions linear2alaw, linear2ulaw have been updated to correctly
+ * convert unquantized 16 bit values.
+ * Tables for direct u- to A-law and A- to u-law conversions have been
+ * corrected.
+ * Borge Lindberg, Center for PersonKommunikation, Aalborg University.
+ * bli@cpk.auc.dk
+ *
+ */
 #define BIAS 0x84   /* define the add-in bias for 16 bit samples */
 #define CLIP 32635
+#define SIGN_BIT        (0x80)          /* Sign bit for a A-law byte. */
+#define QUANT_MASK      (0xf)           /* Quantization field mask. */
+#define SEG_SHIFT       (4)             /* Left shift for segment number. */
+#define SEG_MASK        (0x70)          /* Segment field mask. */
 
-static unsigned char
-st_linear_to_ulaw(int sample)
+static PyInt16 seg_aend[8] = {0x1F, 0x3F, 0x7F, 0xFF,
+                              0x1FF, 0x3FF, 0x7FF, 0xFFF};
+static PyInt16 seg_uend[8] = {0x3F, 0x7F, 0xFF, 0x1FF,
+                              0x3FF, 0x7FF, 0xFFF, 0x1FFF};
+
+static PyInt16
+search(PyInt16 val, PyInt16 *table, int size)
 {
-	static int exp_lut[256] = {0,0,1,1,2,2,2,2,3,3,3,3,3,3,3,3,
-				   4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,4,
-				   5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-				   5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,
-				   6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-				   6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-				   6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-				   6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,6,
-				   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
-				   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
-				   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
-				   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
-				   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
-				   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
-				   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,
-				   7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7};
-	int sign, exponent, mantissa;
-	unsigned char ulawbyte;
+        int i;
 
-	/* Get the sample into sign-magnitude. */
-	sign = (sample >> 8) & 0x80;		/* set aside the sign */
-	if ( sign != 0 ) sample = -sample;	/* get magnitude */
-	if ( sample > CLIP ) sample = CLIP;	/* clip the magnitude */
+        for (i = 0; i < size; i++) {
+                if (val <= *table++)
+                        return (i);
+        }
+        return (size);
+}
+#define st_ulaw2linear16(uc) (_st_ulaw2linear16[uc])
+#define st_alaw2linear16(uc) (_st_alaw2linear16[uc])
 
-	/* Convert from 16 bit linear to ulaw. */
-	sample = sample + BIAS;
-	exponent = exp_lut[( sample >> 7 ) & 0xFF];
-	mantissa = ( sample >> ( exponent + 3 ) ) & 0x0F;
-	ulawbyte = ~ ( sign | ( exponent << 4 ) | mantissa );
-#ifdef ZEROTRAP
-	if ( ulawbyte == 0 ) ulawbyte = 0x02;	/* optional CCITT trap */
-#endif
+static PyInt16 _st_ulaw2linear16[256] = {
+    -32124,  -31100,  -30076,  -29052,  -28028,  -27004,  -25980,
+    -24956,  -23932,  -22908,  -21884,  -20860,  -19836,  -18812,
+    -17788,  -16764,  -15996,  -15484,  -14972,  -14460,  -13948,
+    -13436,  -12924,  -12412,  -11900,  -11388,  -10876,  -10364,
+     -9852,   -9340,   -8828,   -8316,   -7932,   -7676,   -7420,
+     -7164,   -6908,   -6652,   -6396,   -6140,   -5884,   -5628,
+     -5372,   -5116,   -4860,   -4604,   -4348,   -4092,   -3900,
+     -3772,   -3644,   -3516,   -3388,   -3260,   -3132,   -3004,
+     -2876,   -2748,   -2620,   -2492,   -2364,   -2236,   -2108,
+     -1980,   -1884,   -1820,   -1756,   -1692,   -1628,   -1564,
+     -1500,   -1436,   -1372,   -1308,   -1244,   -1180,   -1116,
+     -1052,    -988,    -924,    -876,    -844,    -812,    -780,
+      -748,    -716,    -684,    -652,    -620,    -588,    -556,
+      -524,    -492,    -460,    -428,    -396,    -372,    -356,
+      -340,    -324,    -308,    -292,    -276,    -260,    -244,
+      -228,    -212,    -196,    -180,    -164,    -148,    -132,
+      -120,    -112,    -104,     -96,     -88,     -80,     -72,
+       -64,     -56,     -48,     -40,     -32,     -24,     -16,
+        -8,       0,   32124,   31100,   30076,   29052,   28028,
+     27004,   25980,   24956,   23932,   22908,   21884,   20860,
+     19836,   18812,   17788,   16764,   15996,   15484,   14972,
+     14460,   13948,   13436,   12924,   12412,   11900,   11388,
+     10876,   10364,    9852,    9340,    8828,    8316,    7932,
+      7676,    7420,    7164,    6908,    6652,    6396,    6140,
+      5884,    5628,    5372,    5116,    4860,    4604,    4348,
+      4092,    3900,    3772,    3644,    3516,    3388,    3260,
+      3132,    3004,    2876,    2748,    2620,    2492,    2364,
+      2236,    2108,    1980,    1884,    1820,    1756,    1692,
+      1628,    1564,    1500,    1436,    1372,    1308,    1244,
+      1180,    1116,    1052,     988,     924,     876,     844,
+       812,     780,     748,     716,     684,     652,     620,
+       588,     556,     524,     492,     460,     428,     396,
+       372,     356,     340,     324,     308,     292,     276,
+       260,     244,     228,     212,     196,     180,     164,
+       148,     132,     120,     112,     104,      96,      88,
+        80,      72,      64,      56,      48,      40,      32,
+        24,      16,       8,       0
+};
 
-	return ulawbyte;
+/*
+ * linear2ulaw() accepts a 14-bit signed integer and encodes it as u-law data
+ * stored in a unsigned char.  This function should only be called with
+ * the data shifted such that it only contains information in the lower
+ * 14-bits.
+ *
+ * In order to simplify the encoding process, the original linear magnitude
+ * is biased by adding 33 which shifts the encoding range from (0 - 8158) to
+ * (33 - 8191). The result can be seen in the following encoding table:
+ *
+ *      Biased Linear Input Code        Compressed Code
+ *      ------------------------        ---------------
+ *      00000001wxyza                   000wxyz
+ *      0000001wxyzab                   001wxyz
+ *      000001wxyzabc                   010wxyz
+ *      00001wxyzabcd                   011wxyz
+ *      0001wxyzabcde                   100wxyz
+ *      001wxyzabcdef                   101wxyz
+ *      01wxyzabcdefg                   110wxyz
+ *      1wxyzabcdefgh                   111wxyz
+ *
+ * Each biased linear code has a leading 1 which identifies the segment
+ * number. The value of the segment number is equal to 7 minus the number
+ * of leading 0's. The quantization interval is directly available as the
+ * four bits wxyz.  * The trailing bits (a - h) are ignored.
+ *
+ * Ordinarily the complement of the resulting code word is used for
+ * transmission, and so the code word is complemented before it is returned.
+ *
+ * For further information see John C. Bellamy's Digital Telephony, 1982,
+ * John Wiley & Sons, pps 98-111 and 472-476.
+ */
+static unsigned char
+st_14linear2ulaw(PyInt16 pcm_val)	/* 2's complement (14-bit range) */
+{
+        PyInt16         mask;
+        PyInt16         seg;
+        unsigned char   uval;
+
+        /* The original sox code does this in the calling function, not here */
+        pcm_val = pcm_val >> 2;
+
+        /* u-law inverts all bits */
+        /* Get the sign and the magnitude of the value. */
+        if (pcm_val < 0) {
+                pcm_val = -pcm_val;
+                mask = 0x7F;
+        } else {
+                mask = 0xFF;
+        }
+        if ( pcm_val > CLIP ) pcm_val = CLIP;           /* clip the magnitude */
+        pcm_val += (BIAS >> 2);
+
+        /* Convert the scaled magnitude to segment number. */
+        seg = search(pcm_val, seg_uend, 8);
+
+        /*
+         * Combine the sign, segment, quantization bits;
+         * and complement the code word.
+         */
+        if (seg >= 8)           /* out of range, return maximum value. */
+                return (unsigned char) (0x7F ^ mask);
+        else {
+                uval = (unsigned char) (seg << 4) | ((pcm_val >> (seg + 1)) & 0xF);
+                return (uval ^ mask);
+        }
+
+}
+
+static PyInt16 _st_alaw2linear16[256] = {
+     -5504,   -5248,   -6016,   -5760,   -4480,   -4224,   -4992,
+     -4736,   -7552,   -7296,   -8064,   -7808,   -6528,   -6272,
+     -7040,   -6784,   -2752,   -2624,   -3008,   -2880,   -2240,
+     -2112,   -2496,   -2368,   -3776,   -3648,   -4032,   -3904,
+     -3264,   -3136,   -3520,   -3392,  -22016,  -20992,  -24064,
+    -23040,  -17920,  -16896,  -19968,  -18944,  -30208,  -29184,
+    -32256,  -31232,  -26112,  -25088,  -28160,  -27136,  -11008,
+    -10496,  -12032,  -11520,   -8960,   -8448,   -9984,   -9472,
+    -15104,  -14592,  -16128,  -15616,  -13056,  -12544,  -14080,
+    -13568,    -344,    -328,    -376,    -360,    -280,    -264,
+      -312,    -296,    -472,    -456,    -504,    -488,    -408,
+      -392,    -440,    -424,     -88,     -72,    -120,    -104,
+       -24,      -8,     -56,     -40,    -216,    -200,    -248,
+      -232,    -152,    -136,    -184,    -168,   -1376,   -1312,
+     -1504,   -1440,   -1120,   -1056,   -1248,   -1184,   -1888,
+     -1824,   -2016,   -1952,   -1632,   -1568,   -1760,   -1696,
+      -688,    -656,    -752,    -720,    -560,    -528,    -624,
+      -592,    -944,    -912,   -1008,    -976,    -816,    -784,
+      -880,    -848,    5504,    5248,    6016,    5760,    4480,
+      4224,    4992,    4736,    7552,    7296,    8064,    7808,
+      6528,    6272,    7040,    6784,    2752,    2624,    3008,
+      2880,    2240,    2112,    2496,    2368,    3776,    3648,
+      4032,    3904,    3264,    3136,    3520,    3392,   22016,
+     20992,   24064,   23040,   17920,   16896,   19968,   18944,
+     30208,   29184,   32256,   31232,   26112,   25088,   28160,
+     27136,   11008,   10496,   12032,   11520,    8960,    8448,
+      9984,    9472,   15104,   14592,   16128,   15616,   13056,
+     12544,   14080,   13568,     344,     328,     376,     360,
+       280,     264,     312,     296,     472,     456,     504,
+       488,     408,     392,     440,     424,      88,      72,
+       120,     104,      24,       8,      56,      40,     216,
+       200,     248,     232,     152,     136,     184,     168,
+      1376,    1312,    1504,    1440,    1120,    1056,    1248,
+      1184,    1888,    1824,    2016,    1952,    1632,    1568,
+      1760,    1696,     688,     656,     752,     720,     560,
+       528,     624,     592,     944,     912,    1008,     976,
+       816,     784,     880,     848
+};
+
+/*
+ * linear2alaw() accepts an 13-bit signed integer and encodes it as A-law data
+ * stored in a unsigned char.  This function should only be called with
+ * the data shifted such that it only contains information in the lower
+ * 13-bits.
+ *
+ *              Linear Input Code       Compressed Code
+ *      ------------------------        ---------------
+ *      0000000wxyza                    000wxyz
+ *      0000001wxyza                    001wxyz
+ *      000001wxyzab                    010wxyz
+ *      00001wxyzabc                    011wxyz
+ *      0001wxyzabcd                    100wxyz
+ *      001wxyzabcde                    101wxyz
+ *      01wxyzabcdef                    110wxyz
+ *      1wxyzabcdefg                    111wxyz
+ *
+ * For further information see John C. Bellamy's Digital Telephony, 1982,
+ * John Wiley & Sons, pps 98-111 and 472-476.
+ */
+static unsigned char
+st_linear2alaw(PyInt16 pcm_val)	/* 2's complement (13-bit range) */
+{
+        PyInt16         mask;
+        short           seg;
+        unsigned char   aval;
+
+        /* The original sox code does this in the calling function, not here */
+        pcm_val = pcm_val >> 3;
+
+        /* A-law using even bit inversion */
+        if (pcm_val >= 0) {
+                mask = 0xD5;            /* sign (7th) bit = 1 */
+        } else {
+                mask = 0x55;            /* sign bit = 0 */
+                pcm_val = -pcm_val - 1;
+        }
+
+        /* Convert the scaled magnitude to segment number. */
+        seg = search(pcm_val, seg_aend, 8);
+
+        /* Combine the sign, segment, and quantization bits. */
+
+        if (seg >= 8)           /* out of range, return maximum value. */
+                return (unsigned char) (0x7F ^ mask);
+        else {
+                aval = (unsigned char) seg << SEG_SHIFT;
+                if (seg < 2)
+                        aval |= (pcm_val >> 1) & QUANT_MASK;
+                else
+                        aval |= (pcm_val >> seg) & QUANT_MASK;
+                return (aval ^ mask);
+        }
 }
 /* End of code taken from sox */
 
 /* Intel ADPCM step variation table */
 static int indexTable[16] = {
-	-1, -1, -1, -1, 2, 4, 6, 8,
-	-1, -1, -1, -1, 2, 4, 6, 8,
+        -1, -1, -1, -1, 2, 4, 6, 8,
+        -1, -1, -1, -1, 2, 4, 6, 8,
 };
 
 static int stepsizeTable[89] = {
-	7, 8, 9, 10, 11, 12, 13, 14, 16, 17,
-	19, 21, 23, 25, 28, 31, 34, 37, 41, 45,
-	50, 55, 60, 66, 73, 80, 88, 97, 107, 118,
-	130, 143, 157, 173, 190, 209, 230, 253, 279, 307,
-	337, 371, 408, 449, 494, 544, 598, 658, 724, 796,
-	876, 963, 1060, 1166, 1282, 1411, 1552, 1707, 1878, 2066,
-	2272, 2499, 2749, 3024, 3327, 3660, 4026, 4428, 4871, 5358,
-	5894, 6484, 7132, 7845, 8630, 9493, 10442, 11487, 12635, 13899,
-	15289, 16818, 18500, 20350, 22385, 24623, 27086, 29794, 32767
+        7, 8, 9, 10, 11, 12, 13, 14, 16, 17,
+        19, 21, 23, 25, 28, 31, 34, 37, 41, 45,
+        50, 55, 60, 66, 73, 80, 88, 97, 107, 118,
+        130, 143, 157, 173, 190, 209, 230, 253, 279, 307,
+        337, 371, 408, 449, 494, 544, 598, 658, 724, 796,
+        876, 963, 1060, 1166, 1282, 1411, 1552, 1707, 1878, 2066,
+        2272, 2499, 2749, 3024, 3327, 3660, 4026, 4428, 4871, 5358,
+        5894, 6484, 7132, 7845, 8630, 9493, 10442, 11487, 12635, 13899,
+        15289, 16818, 18500, 20350, 22385, 24623, 27086, 29794, 32767
 };
     
 #define CHARP(cp, i) ((signed char *)(cp+i))
@@ -151,137 +298,137 @@
 static PyObject *
 audioop_getsample(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	int len, size, val = 0;
-	int i;
+        signed char *cp;
+        int len, size, val = 0;
+        int i;
 
-	if ( !PyArg_Parse(args, "(s#ii)", &cp, &len, &size, &i) )
-		return 0;
-	if ( size != 1 && size != 2 && size != 4 ) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
-	if ( i < 0 || i >= len/size ) {
-		PyErr_SetString(AudioopError, "Index out of range");
-		return 0;
-	}
-	if ( size == 1 )      val = (int)*CHARP(cp, i);
-	else if ( size == 2 ) val = (int)*SHORTP(cp, i*2);
-	else if ( size == 4 ) val = (int)*LONGP(cp, i*4);
-	return PyInt_FromLong(val);
+        if ( !PyArg_Parse(args, "(s#ii)", &cp, &len, &size, &i) )
+                return 0;
+        if ( size != 1 && size != 2 && size != 4 ) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
+        if ( i < 0 || i >= len/size ) {
+                PyErr_SetString(AudioopError, "Index out of range");
+                return 0;
+        }
+        if ( size == 1 )      val = (int)*CHARP(cp, i);
+        else if ( size == 2 ) val = (int)*SHORTP(cp, i*2);
+        else if ( size == 4 ) val = (int)*LONGP(cp, i*4);
+        return PyInt_FromLong(val);
 }
 
 static PyObject *
 audioop_max(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	int len, size, val = 0;
-	int i;
-	int max = 0;
+        signed char *cp;
+        int len, size, val = 0;
+        int i;
+        int max = 0;
 
-	if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
-		return 0;
-	if ( size != 1 && size != 2 && size != 4 ) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
-	for ( i=0; i<len; i+= size) {
-		if ( size == 1 )      val = (int)*CHARP(cp, i);
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = (int)*LONGP(cp, i);
-		if ( val < 0 ) val = (-val);
-		if ( val > max ) max = val;
-	}
-	return PyInt_FromLong(max);
+        if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
+                return 0;
+        if ( size != 1 && size != 2 && size != 4 ) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
+        for ( i=0; i<len; i+= size) {
+                if ( size == 1 )      val = (int)*CHARP(cp, i);
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = (int)*LONGP(cp, i);
+                if ( val < 0 ) val = (-val);
+                if ( val > max ) max = val;
+        }
+        return PyInt_FromLong(max);
 }
 
 static PyObject *
 audioop_minmax(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	int len, size, val = 0;
-	int i;
-	int min = 0x7fffffff, max = -0x7fffffff;
+        signed char *cp;
+        int len, size, val = 0;
+        int i;
+        int min = 0x7fffffff, max = -0x7fffffff;
 
-	if (!PyArg_Parse(args, "(s#i)", &cp, &len, &size))
-		return NULL;
-	if (size != 1 && size != 2 && size != 4) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return NULL;
-	}
-	for (i = 0; i < len; i += size) {
-		if (size == 1) val = (int) *CHARP(cp, i);
-		else if (size == 2) val = (int) *SHORTP(cp, i);
-		else if (size == 4) val = (int) *LONGP(cp, i);
-		if (val > max) max = val;
-		if (val < min) min = val;
-	}
-	return Py_BuildValue("(ii)", min, max);
+        if (!PyArg_Parse(args, "(s#i)", &cp, &len, &size))
+                return NULL;
+        if (size != 1 && size != 2 && size != 4) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return NULL;
+        }
+        for (i = 0; i < len; i += size) {
+                if (size == 1) val = (int) *CHARP(cp, i);
+                else if (size == 2) val = (int) *SHORTP(cp, i);
+                else if (size == 4) val = (int) *LONGP(cp, i);
+                if (val > max) max = val;
+                if (val < min) min = val;
+        }
+        return Py_BuildValue("(ii)", min, max);
 }
 
 static PyObject *
 audioop_avg(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	int len, size, val = 0;
-	int i;
-	double avg = 0.0;
+        signed char *cp;
+        int len, size, val = 0;
+        int i;
+        double avg = 0.0;
 
-	if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
-		return 0;
-	if ( size != 1 && size != 2 && size != 4 ) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
-	for ( i=0; i<len; i+= size) {
-		if ( size == 1 )      val = (int)*CHARP(cp, i);
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = (int)*LONGP(cp, i);
-		avg += val;
-	}
-	if ( len == 0 )
-		val = 0;
-	else
-		val = (int)(avg / (double)(len/size));
-	return PyInt_FromLong(val);
+        if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
+                return 0;
+        if ( size != 1 && size != 2 && size != 4 ) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
+        for ( i=0; i<len; i+= size) {
+                if ( size == 1 )      val = (int)*CHARP(cp, i);
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = (int)*LONGP(cp, i);
+                avg += val;
+        }
+        if ( len == 0 )
+                val = 0;
+        else
+                val = (int)(avg / (double)(len/size));
+        return PyInt_FromLong(val);
 }
 
 static PyObject *
 audioop_rms(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	int len, size, val = 0;
-	int i;
-	double sum_squares = 0.0;
+        signed char *cp;
+        int len, size, val = 0;
+        int i;
+        double sum_squares = 0.0;
 
-	if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
-		return 0;
-	if ( size != 1 && size != 2 && size != 4 ) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
-	for ( i=0; i<len; i+= size) {
-		if ( size == 1 )      val = (int)*CHARP(cp, i);
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = (int)*LONGP(cp, i);
-		sum_squares += (double)val*(double)val;
-	}
-	if ( len == 0 )
-		val = 0;
-	else
-		val = (int)sqrt(sum_squares / (double)(len/size));
-	return PyInt_FromLong(val);
+        if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
+                return 0;
+        if ( size != 1 && size != 2 && size != 4 ) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
+        for ( i=0; i<len; i+= size) {
+                if ( size == 1 )      val = (int)*CHARP(cp, i);
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = (int)*LONGP(cp, i);
+                sum_squares += (double)val*(double)val;
+        }
+        if ( len == 0 )
+                val = 0;
+        else
+                val = (int)sqrt(sum_squares / (double)(len/size));
+        return PyInt_FromLong(val);
 }
 
 static double _sum2(short *a, short *b, int len)
 {
-	int i;
-	double sum = 0.0;
+        int i;
+        double sum = 0.0;
 
-	for( i=0; i<len; i++) {
-		sum = sum + (double)a[i]*(double)b[i];
-	}
-	return sum;
+        for( i=0; i<len; i++) {
+                sum = sum + (double)a[i]*(double)b[i];
+        }
+        return sum;
 }
 
 /*
@@ -303,15 +450,15 @@
 ** vj = ( sum(A[j+i]^2)*sum(R[i]^2) - sum(A[j+i]R[i])^2 ) / sum( A[j+i]^2 )
 **
 ** In the code variables correspond as follows:
-** cp1		A
-** cp2		R
-** len1		N
-** len2		n
-** aj_m1	A[j-1]
-** aj_lm1	A[j+n-1]
-** sum_ri_2	sum(R[i]^2)
-** sum_aij_2	sum(A[i+j]^2)
-** sum_aij_ri	sum(A[i+j]R[i])
+** cp1          A
+** cp2          R
+** len1         N
+** len2         n
+** aj_m1        A[j-1]
+** aj_lm1       A[j+n-1]
+** sum_ri_2     sum(R[i]^2)
+** sum_aij_2    sum(A[i+j]^2)
+** sum_aij_ri   sum(A[i+j]R[i])
 **
 ** sum_ri is calculated once, sum_aij_2 is updated each step and sum_aij_ri
 ** is completely recalculated each step.
@@ -319,55 +466,55 @@
 static PyObject *
 audioop_findfit(PyObject *self, PyObject *args)
 {
-	short *cp1, *cp2;
-	int len1, len2;
-	int j, best_j;
-	double aj_m1, aj_lm1;
-	double sum_ri_2, sum_aij_2, sum_aij_ri, result, best_result, factor;
+        short *cp1, *cp2;
+        int len1, len2;
+        int j, best_j;
+        double aj_m1, aj_lm1;
+        double sum_ri_2, sum_aij_2, sum_aij_ri, result, best_result, factor;
 
-	if ( !PyArg_Parse(args, "(s#s#)", &cp1, &len1, &cp2, &len2) )
-		return 0;
-	if ( len1 & 1 || len2 & 1 ) {
-		PyErr_SetString(AudioopError, "Strings should be even-sized");
-		return 0;
-	}
-	len1 >>= 1;
-	len2 >>= 1;
+        if ( !PyArg_Parse(args, "(s#s#)", &cp1, &len1, &cp2, &len2) )
+                return 0;
+        if ( len1 & 1 || len2 & 1 ) {
+                PyErr_SetString(AudioopError, "Strings should be even-sized");
+                return 0;
+        }
+        len1 >>= 1;
+        len2 >>= 1;
     
-	if ( len1 < len2 ) {
-		PyErr_SetString(AudioopError, "First sample should be longer");
-		return 0;
-	}
-	sum_ri_2 = _sum2(cp2, cp2, len2);
-	sum_aij_2 = _sum2(cp1, cp1, len2);
-	sum_aij_ri = _sum2(cp1, cp2, len2);
+        if ( len1 < len2 ) {
+                PyErr_SetString(AudioopError, "First sample should be longer");
+                return 0;
+        }
+        sum_ri_2 = _sum2(cp2, cp2, len2);
+        sum_aij_2 = _sum2(cp1, cp1, len2);
+        sum_aij_ri = _sum2(cp1, cp2, len2);
 
-	result = (sum_ri_2*sum_aij_2 - sum_aij_ri*sum_aij_ri) / sum_aij_2;
+        result = (sum_ri_2*sum_aij_2 - sum_aij_ri*sum_aij_ri) / sum_aij_2;
 
-	best_result = result;
-	best_j = 0;
-	j = 0;
+        best_result = result;
+        best_j = 0;
+        j = 0;
 
-	for ( j=1; j<=len1-len2; j++) {
-		aj_m1 = (double)cp1[j-1];
-		aj_lm1 = (double)cp1[j+len2-1];
+        for ( j=1; j<=len1-len2; j++) {
+                aj_m1 = (double)cp1[j-1];
+                aj_lm1 = (double)cp1[j+len2-1];
 
-		sum_aij_2 = sum_aij_2 + aj_lm1*aj_lm1 - aj_m1*aj_m1;
-		sum_aij_ri = _sum2(cp1+j, cp2, len2);
+                sum_aij_2 = sum_aij_2 + aj_lm1*aj_lm1 - aj_m1*aj_m1;
+                sum_aij_ri = _sum2(cp1+j, cp2, len2);
 
-		result = (sum_ri_2*sum_aij_2 - sum_aij_ri*sum_aij_ri)
-			/ sum_aij_2;
+                result = (sum_ri_2*sum_aij_2 - sum_aij_ri*sum_aij_ri)
+                        / sum_aij_2;
 
-		if ( result < best_result ) {
-			best_result = result;
-			best_j = j;
-		}
-	
-	}
+                if ( result < best_result ) {
+                        best_result = result;
+                        best_j = j;
+                }
+        
+        }
 
-	factor = _sum2(cp1+best_j, cp2, len2) / sum_ri_2;
+        factor = _sum2(cp1+best_j, cp2, len2) / sum_ri_2;
     
-	return Py_BuildValue("(if)", best_j, factor);
+        return Py_BuildValue("(if)", best_j, factor);
 }
 
 /*
@@ -377,27 +524,27 @@
 static PyObject *
 audioop_findfactor(PyObject *self, PyObject *args)
 {
-	short *cp1, *cp2;
-	int len1, len2;
-	double sum_ri_2, sum_aij_ri, result;
+        short *cp1, *cp2;
+        int len1, len2;
+        double sum_ri_2, sum_aij_ri, result;
 
-	if ( !PyArg_Parse(args, "(s#s#)", &cp1, &len1, &cp2, &len2) )
-		return 0;
-	if ( len1 & 1 || len2 & 1 ) {
-		PyErr_SetString(AudioopError, "Strings should be even-sized");
-		return 0;
-	}
-	if ( len1 != len2 ) {
-		PyErr_SetString(AudioopError, "Samples should be same size");
-		return 0;
-	}
-	len2 >>= 1;
-	sum_ri_2 = _sum2(cp2, cp2, len2);
-	sum_aij_ri = _sum2(cp1, cp2, len2);
+        if ( !PyArg_Parse(args, "(s#s#)", &cp1, &len1, &cp2, &len2) )
+                return 0;
+        if ( len1 & 1 || len2 & 1 ) {
+                PyErr_SetString(AudioopError, "Strings should be even-sized");
+                return 0;
+        }
+        if ( len1 != len2 ) {
+                PyErr_SetString(AudioopError, "Samples should be same size");
+                return 0;
+        }
+        len2 >>= 1;
+        sum_ri_2 = _sum2(cp2, cp2, len2);
+        sum_aij_ri = _sum2(cp1, cp2, len2);
 
-	result = sum_aij_ri / sum_ri_2;
+        result = sum_aij_ri / sum_ri_2;
 
-	return PyFloat_FromDouble(result);
+        return PyFloat_FromDouble(result);
 }
 
 /*
@@ -407,981 +554,1053 @@
 static PyObject *
 audioop_findmax(PyObject *self, PyObject *args)
 {
-	short *cp1;
-	int len1, len2;
-	int j, best_j;
-	double aj_m1, aj_lm1;
-	double result, best_result;
+        short *cp1;
+        int len1, len2;
+        int j, best_j;
+        double aj_m1, aj_lm1;
+        double result, best_result;
 
-	if ( !PyArg_Parse(args, "(s#i)", &cp1, &len1, &len2) )
-		return 0;
-	if ( len1 & 1 ) {
-		PyErr_SetString(AudioopError, "Strings should be even-sized");
-		return 0;
-	}
-	len1 >>= 1;
+        if ( !PyArg_Parse(args, "(s#i)", &cp1, &len1, &len2) )
+                return 0;
+        if ( len1 & 1 ) {
+                PyErr_SetString(AudioopError, "Strings should be even-sized");
+                return 0;
+        }
+        len1 >>= 1;
     
-	if ( len1 < len2 ) {
-		PyErr_SetString(AudioopError, "Input sample should be longer");
-		return 0;
-	}
+        if ( len1 < len2 ) {
+                PyErr_SetString(AudioopError, "Input sample should be longer");
+                return 0;
+        }
 
-	result = _sum2(cp1, cp1, len2);
+        result = _sum2(cp1, cp1, len2);
 
-	best_result = result;
-	best_j = 0;
-	j = 0;
+        best_result = result;
+        best_j = 0;
+        j = 0;
 
-	for ( j=1; j<=len1-len2; j++) {
-		aj_m1 = (double)cp1[j-1];
-		aj_lm1 = (double)cp1[j+len2-1];
+        for ( j=1; j<=len1-len2; j++) {
+                aj_m1 = (double)cp1[j-1];
+                aj_lm1 = (double)cp1[j+len2-1];
 
-		result = result + aj_lm1*aj_lm1 - aj_m1*aj_m1;
+                result = result + aj_lm1*aj_lm1 - aj_m1*aj_m1;
 
-		if ( result > best_result ) {
-			best_result = result;
-			best_j = j;
-		}
-	
-	}
+                if ( result > best_result ) {
+                        best_result = result;
+                        best_j = j;
+                }
+        
+        }
 
-	return PyInt_FromLong(best_j);
+        return PyInt_FromLong(best_j);
 }
 
 static PyObject *
 audioop_avgpp(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	int len, size, val = 0, prevval = 0, prevextremevalid = 0,
-		prevextreme = 0;
-	int i;
-	double avg = 0.0;
-	int diff, prevdiff, extremediff, nextreme = 0;
+        signed char *cp;
+        int len, size, val = 0, prevval = 0, prevextremevalid = 0,
+                prevextreme = 0;
+        int i;
+        double avg = 0.0;
+        int diff, prevdiff, extremediff, nextreme = 0;
 
-	if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
-		return 0;
-	if ( size != 1 && size != 2 && size != 4 ) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
-	/* Compute first delta value ahead. Also automatically makes us
-	** skip the first extreme value
-	*/
-	if ( size == 1 )      prevval = (int)*CHARP(cp, 0);
-	else if ( size == 2 ) prevval = (int)*SHORTP(cp, 0);
-	else if ( size == 4 ) prevval = (int)*LONGP(cp, 0);
-	if ( size == 1 )      val = (int)*CHARP(cp, size);
-	else if ( size == 2 ) val = (int)*SHORTP(cp, size);
-	else if ( size == 4 ) val = (int)*LONGP(cp, size);
-	prevdiff = val - prevval;
+        if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
+                return 0;
+        if ( size != 1 && size != 2 && size != 4 ) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
+        /* Compute first delta value ahead. Also automatically makes us
+        ** skip the first extreme value
+        */
+        if ( size == 1 )      prevval = (int)*CHARP(cp, 0);
+        else if ( size == 2 ) prevval = (int)*SHORTP(cp, 0);
+        else if ( size == 4 ) prevval = (int)*LONGP(cp, 0);
+        if ( size == 1 )      val = (int)*CHARP(cp, size);
+        else if ( size == 2 ) val = (int)*SHORTP(cp, size);
+        else if ( size == 4 ) val = (int)*LONGP(cp, size);
+        prevdiff = val - prevval;
     
-	for ( i=size; i<len; i+= size) {
-		if ( size == 1 )      val = (int)*CHARP(cp, i);
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = (int)*LONGP(cp, i);
-		diff = val - prevval;
-		if ( diff*prevdiff < 0 ) {
-			/* Derivative changed sign. Compute difference to last
-			** extreme value and remember.
-			*/
-			if ( prevextremevalid ) {
-				extremediff = prevval - prevextreme;
-				if ( extremediff < 0 )
-					extremediff = -extremediff;
-				avg += extremediff;
-				nextreme++;
-			}
-			prevextremevalid = 1;
-			prevextreme = prevval;
-		}
-		prevval = val;
-		if ( diff != 0 )
-			prevdiff = diff;	
-	}
-	if ( nextreme == 0 )
-		val = 0;
-	else
-		val = (int)(avg / (double)nextreme);
-	return PyInt_FromLong(val);
+        for ( i=size; i<len; i+= size) {
+                if ( size == 1 )      val = (int)*CHARP(cp, i);
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = (int)*LONGP(cp, i);
+                diff = val - prevval;
+                if ( diff*prevdiff < 0 ) {
+                        /* Derivative changed sign. Compute difference to last
+                        ** extreme value and remember.
+                        */
+                        if ( prevextremevalid ) {
+                                extremediff = prevval - prevextreme;
+                                if ( extremediff < 0 )
+                                        extremediff = -extremediff;
+                                avg += extremediff;
+                                nextreme++;
+                        }
+                        prevextremevalid = 1;
+                        prevextreme = prevval;
+                }
+                prevval = val;
+                if ( diff != 0 )
+                        prevdiff = diff;        
+        }
+        if ( nextreme == 0 )
+                val = 0;
+        else
+                val = (int)(avg / (double)nextreme);
+        return PyInt_FromLong(val);
 }
 
 static PyObject *
 audioop_maxpp(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	int len, size, val = 0, prevval = 0, prevextremevalid = 0,
-		prevextreme = 0;
-	int i;
-	int max = 0;
-	int diff, prevdiff, extremediff;
+        signed char *cp;
+        int len, size, val = 0, prevval = 0, prevextremevalid = 0,
+                prevextreme = 0;
+        int i;
+        int max = 0;
+        int diff, prevdiff, extremediff;
 
-	if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
-		return 0;
-	if ( size != 1 && size != 2 && size != 4 ) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
-	/* Compute first delta value ahead. Also automatically makes us
-	** skip the first extreme value
-	*/
-	if ( size == 1 )      prevval = (int)*CHARP(cp, 0);
-	else if ( size == 2 ) prevval = (int)*SHORTP(cp, 0);
-	else if ( size == 4 ) prevval = (int)*LONGP(cp, 0);
-	if ( size == 1 )      val = (int)*CHARP(cp, size);
-	else if ( size == 2 ) val = (int)*SHORTP(cp, size);
-	else if ( size == 4 ) val = (int)*LONGP(cp, size);
-	prevdiff = val - prevval;
+        if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
+                return 0;
+        if ( size != 1 && size != 2 && size != 4 ) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
+        /* Compute first delta value ahead. Also automatically makes us
+        ** skip the first extreme value
+        */
+        if ( size == 1 )      prevval = (int)*CHARP(cp, 0);
+        else if ( size == 2 ) prevval = (int)*SHORTP(cp, 0);
+        else if ( size == 4 ) prevval = (int)*LONGP(cp, 0);
+        if ( size == 1 )      val = (int)*CHARP(cp, size);
+        else if ( size == 2 ) val = (int)*SHORTP(cp, size);
+        else if ( size == 4 ) val = (int)*LONGP(cp, size);
+        prevdiff = val - prevval;
 
-	for ( i=size; i<len; i+= size) {
-		if ( size == 1 )      val = (int)*CHARP(cp, i);
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = (int)*LONGP(cp, i);
-		diff = val - prevval;
-		if ( diff*prevdiff < 0 ) {
-			/* Derivative changed sign. Compute difference to
-			** last extreme value and remember.
-			*/
-			if ( prevextremevalid ) {
-				extremediff = prevval - prevextreme;
-				if ( extremediff < 0 )
-					extremediff = -extremediff;
-				if ( extremediff > max )
-					max = extremediff;
-			}
-			prevextremevalid = 1;
-			prevextreme = prevval;
-		}
-		prevval = val;
-		if ( diff != 0 )
-			prevdiff = diff;
-	}
-	return PyInt_FromLong(max);
+        for ( i=size; i<len; i+= size) {
+                if ( size == 1 )      val = (int)*CHARP(cp, i);
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = (int)*LONGP(cp, i);
+                diff = val - prevval;
+                if ( diff*prevdiff < 0 ) {
+                        /* Derivative changed sign. Compute difference to
+                        ** last extreme value and remember.
+                        */
+                        if ( prevextremevalid ) {
+                                extremediff = prevval - prevextreme;
+                                if ( extremediff < 0 )
+                                        extremediff = -extremediff;
+                                if ( extremediff > max )
+                                        max = extremediff;
+                        }
+                        prevextremevalid = 1;
+                        prevextreme = prevval;
+                }
+                prevval = val;
+                if ( diff != 0 )
+                        prevdiff = diff;
+        }
+        return PyInt_FromLong(max);
 }
 
 static PyObject *
 audioop_cross(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	int len, size, val = 0;
-	int i;
-	int prevval, ncross;
+        signed char *cp;
+        int len, size, val = 0;
+        int i;
+        int prevval, ncross;
 
-	if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
-		return 0;
-	if ( size != 1 && size != 2 && size != 4 ) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
-	ncross = -1;
-	prevval = 17; /* Anything <> 0,1 */
-	for ( i=0; i<len; i+= size) {
-		if ( size == 1 )      val = ((int)*CHARP(cp, i)) >> 7;
-		else if ( size == 2 ) val = ((int)*SHORTP(cp, i)) >> 15;
-		else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 31;
-		val = val & 1;
-		if ( val != prevval ) ncross++;
-		prevval = val;
-	}
-	return PyInt_FromLong(ncross);
+        if ( !PyArg_Parse(args, "(s#i)", &cp, &len, &size) )
+                return 0;
+        if ( size != 1 && size != 2 && size != 4 ) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
+        ncross = -1;
+        prevval = 17; /* Anything <> 0,1 */
+        for ( i=0; i<len; i+= size) {
+                if ( size == 1 )      val = ((int)*CHARP(cp, i)) >> 7;
+                else if ( size == 2 ) val = ((int)*SHORTP(cp, i)) >> 15;
+                else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 31;
+                val = val & 1;
+                if ( val != prevval ) ncross++;
+                prevval = val;
+        }
+        return PyInt_FromLong(ncross);
 }
 
 static PyObject *
 audioop_mul(PyObject *self, PyObject *args)
 {
-	signed char *cp, *ncp;
-	int len, size, val = 0;
-	double factor, fval, maxval;
-	PyObject *rv;
-	int i;
+        signed char *cp, *ncp;
+        int len, size, val = 0;
+        double factor, fval, maxval;
+        PyObject *rv;
+        int i;
 
-	if ( !PyArg_Parse(args, "(s#id)", &cp, &len, &size, &factor ) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#id)", &cp, &len, &size, &factor ) )
+                return 0;
     
-	if ( size == 1 ) maxval = (double) 0x7f;
-	else if ( size == 2 ) maxval = (double) 0x7fff;
-	else if ( size == 4 ) maxval = (double) 0x7fffffff;
-	else {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size == 1 ) maxval = (double) 0x7f;
+        else if ( size == 2 ) maxval = (double) 0x7fff;
+        else if ( size == 4 ) maxval = (double) 0x7fffffff;
+        else {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	rv = PyString_FromStringAndSize(NULL, len);
-	if ( rv == 0 )
-		return 0;
-	ncp = (signed char *)PyString_AsString(rv);
+        rv = PyString_FromStringAndSize(NULL, len);
+        if ( rv == 0 )
+                return 0;
+        ncp = (signed char *)PyString_AsString(rv);
     
     
-	for ( i=0; i < len; i += size ) {
-		if ( size == 1 )      val = (int)*CHARP(cp, i);
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = (int)*LONGP(cp, i);
-		fval = (double)val*factor;
-		if ( fval > maxval ) fval = maxval;
-		else if ( fval < -maxval ) fval = -maxval;
-		val = (int)fval;
-		if ( size == 1 )      *CHARP(ncp, i) = (signed char)val;
-		else if ( size == 2 ) *SHORTP(ncp, i) = (short)val;
-		else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)val;
-	}
-	return rv;
+        for ( i=0; i < len; i += size ) {
+                if ( size == 1 )      val = (int)*CHARP(cp, i);
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = (int)*LONGP(cp, i);
+                fval = (double)val*factor;
+                if ( fval > maxval ) fval = maxval;
+                else if ( fval < -maxval ) fval = -maxval;
+                val = (int)fval;
+                if ( size == 1 )      *CHARP(ncp, i) = (signed char)val;
+                else if ( size == 2 ) *SHORTP(ncp, i) = (short)val;
+                else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)val;
+        }
+        return rv;
 }
 
 static PyObject *
 audioop_tomono(PyObject *self, PyObject *args)
 {
-	signed char *cp, *ncp;
-	int len, size, val1 = 0, val2 = 0;
-	double fac1, fac2, fval, maxval;
-	PyObject *rv;
-	int i;
+        signed char *cp, *ncp;
+        int len, size, val1 = 0, val2 = 0;
+        double fac1, fac2, fval, maxval;
+        PyObject *rv;
+        int i;
 
-	if ( !PyArg_Parse(args, "(s#idd)", &cp, &len, &size, &fac1, &fac2 ) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#idd)", &cp, &len, &size, &fac1, &fac2 ) )
+                return 0;
     
-	if ( size == 1 ) maxval = (double) 0x7f;
-	else if ( size == 2 ) maxval = (double) 0x7fff;
-	else if ( size == 4 ) maxval = (double) 0x7fffffff;
-	else {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size == 1 ) maxval = (double) 0x7f;
+        else if ( size == 2 ) maxval = (double) 0x7fff;
+        else if ( size == 4 ) maxval = (double) 0x7fffffff;
+        else {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	rv = PyString_FromStringAndSize(NULL, len/2);
-	if ( rv == 0 )
-		return 0;
-	ncp = (signed char *)PyString_AsString(rv);
+        rv = PyString_FromStringAndSize(NULL, len/2);
+        if ( rv == 0 )
+                return 0;
+        ncp = (signed char *)PyString_AsString(rv);
     
     
-	for ( i=0; i < len; i += size*2 ) {
-		if ( size == 1 )      val1 = (int)*CHARP(cp, i);
-		else if ( size == 2 ) val1 = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val1 = (int)*LONGP(cp, i);
-		if ( size == 1 )      val2 = (int)*CHARP(cp, i+1);
-		else if ( size == 2 ) val2 = (int)*SHORTP(cp, i+2);
-		else if ( size == 4 ) val2 = (int)*LONGP(cp, i+4);
-		fval = (double)val1*fac1 + (double)val2*fac2;
-		if ( fval > maxval ) fval = maxval;
-		else if ( fval < -maxval ) fval = -maxval;
-		val1 = (int)fval;
-		if ( size == 1 )      *CHARP(ncp, i/2) = (signed char)val1;
-		else if ( size == 2 ) *SHORTP(ncp, i/2) = (short)val1;
-		else if ( size == 4 ) *LONGP(ncp, i/2)= (Py_Int32)val1;
-	}
-	return rv;
+        for ( i=0; i < len; i += size*2 ) {
+                if ( size == 1 )      val1 = (int)*CHARP(cp, i);
+                else if ( size == 2 ) val1 = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val1 = (int)*LONGP(cp, i);
+                if ( size == 1 )      val2 = (int)*CHARP(cp, i+1);
+                else if ( size == 2 ) val2 = (int)*SHORTP(cp, i+2);
+                else if ( size == 4 ) val2 = (int)*LONGP(cp, i+4);
+                fval = (double)val1*fac1 + (double)val2*fac2;
+                if ( fval > maxval ) fval = maxval;
+                else if ( fval < -maxval ) fval = -maxval;
+                val1 = (int)fval;
+                if ( size == 1 )      *CHARP(ncp, i/2) = (signed char)val1;
+                else if ( size == 2 ) *SHORTP(ncp, i/2) = (short)val1;
+                else if ( size == 4 ) *LONGP(ncp, i/2)= (Py_Int32)val1;
+        }
+        return rv;
 }
 
 static PyObject *
 audioop_tostereo(PyObject *self, PyObject *args)
 {
-	signed char *cp, *ncp;
-	int len, size, val1, val2, val = 0;
-	double fac1, fac2, fval, maxval;
-	PyObject *rv;
-	int i;
+        signed char *cp, *ncp;
+        int len, size, val1, val2, val = 0;
+        double fac1, fac2, fval, maxval;
+        PyObject *rv;
+        int i;
 
-	if ( !PyArg_Parse(args, "(s#idd)", &cp, &len, &size, &fac1, &fac2 ) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#idd)", &cp, &len, &size, &fac1, &fac2 ) )
+                return 0;
     
-	if ( size == 1 ) maxval = (double) 0x7f;
-	else if ( size == 2 ) maxval = (double) 0x7fff;
-	else if ( size == 4 ) maxval = (double) 0x7fffffff;
-	else {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size == 1 ) maxval = (double) 0x7f;
+        else if ( size == 2 ) maxval = (double) 0x7fff;
+        else if ( size == 4 ) maxval = (double) 0x7fffffff;
+        else {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	rv = PyString_FromStringAndSize(NULL, len*2);
-	if ( rv == 0 )
-		return 0;
-	ncp = (signed char *)PyString_AsString(rv);
+        rv = PyString_FromStringAndSize(NULL, len*2);
+        if ( rv == 0 )
+                return 0;
+        ncp = (signed char *)PyString_AsString(rv);
     
     
-	for ( i=0; i < len; i += size ) {
-		if ( size == 1 )      val = (int)*CHARP(cp, i);
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = (int)*LONGP(cp, i);
+        for ( i=0; i < len; i += size ) {
+                if ( size == 1 )      val = (int)*CHARP(cp, i);
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = (int)*LONGP(cp, i);
 
-		fval = (double)val*fac1;
-		if ( fval > maxval ) fval = maxval;
-		else if ( fval < -maxval ) fval = -maxval;
-		val1 = (int)fval;
+                fval = (double)val*fac1;
+                if ( fval > maxval ) fval = maxval;
+                else if ( fval < -maxval ) fval = -maxval;
+                val1 = (int)fval;
 
-		fval = (double)val*fac2;
-		if ( fval > maxval ) fval = maxval;
-		else if ( fval < -maxval ) fval = -maxval;
-		val2 = (int)fval;
+                fval = (double)val*fac2;
+                if ( fval > maxval ) fval = maxval;
+                else if ( fval < -maxval ) fval = -maxval;
+                val2 = (int)fval;
 
-		if ( size == 1 )      *CHARP(ncp, i*2) = (signed char)val1;
-		else if ( size == 2 ) *SHORTP(ncp, i*2) = (short)val1;
-		else if ( size == 4 ) *LONGP(ncp, i*2) = (Py_Int32)val1;
+                if ( size == 1 )      *CHARP(ncp, i*2) = (signed char)val1;
+                else if ( size == 2 ) *SHORTP(ncp, i*2) = (short)val1;
+                else if ( size == 4 ) *LONGP(ncp, i*2) = (Py_Int32)val1;
 
-		if ( size == 1 )      *CHARP(ncp, i*2+1) = (signed char)val2;
-		else if ( size == 2 ) *SHORTP(ncp, i*2+2) = (short)val2;
-		else if ( size == 4 ) *LONGP(ncp, i*2+4) = (Py_Int32)val2;
-	}
-	return rv;
+                if ( size == 1 )      *CHARP(ncp, i*2+1) = (signed char)val2;
+                else if ( size == 2 ) *SHORTP(ncp, i*2+2) = (short)val2;
+                else if ( size == 4 ) *LONGP(ncp, i*2+4) = (Py_Int32)val2;
+        }
+        return rv;
 }
 
 static PyObject *
 audioop_add(PyObject *self, PyObject *args)
 {
-	signed char *cp1, *cp2, *ncp;
-	int len1, len2, size, val1 = 0, val2 = 0, maxval, newval;
-	PyObject *rv;
-	int i;
+        signed char *cp1, *cp2, *ncp;
+        int len1, len2, size, val1 = 0, val2 = 0, maxval, newval;
+        PyObject *rv;
+        int i;
 
-	if ( !PyArg_Parse(args, "(s#s#i)",
-			  &cp1, &len1, &cp2, &len2, &size ) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#s#i)",
+                          &cp1, &len1, &cp2, &len2, &size ) )
+                return 0;
 
-	if ( len1 != len2 ) {
-		PyErr_SetString(AudioopError, "Lengths should be the same");
-		return 0;
-	}
+        if ( len1 != len2 ) {
+                PyErr_SetString(AudioopError, "Lengths should be the same");
+                return 0;
+        }
     
-	if ( size == 1 ) maxval = 0x7f;
-	else if ( size == 2 ) maxval = 0x7fff;
-	else if ( size == 4 ) maxval = 0x7fffffff;
-	else {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size == 1 ) maxval = 0x7f;
+        else if ( size == 2 ) maxval = 0x7fff;
+        else if ( size == 4 ) maxval = 0x7fffffff;
+        else {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
 
-	rv = PyString_FromStringAndSize(NULL, len1);
-	if ( rv == 0 )
-		return 0;
-	ncp = (signed char *)PyString_AsString(rv);
+        rv = PyString_FromStringAndSize(NULL, len1);
+        if ( rv == 0 )
+                return 0;
+        ncp = (signed char *)PyString_AsString(rv);
 
-	for ( i=0; i < len1; i += size ) {
-		if ( size == 1 )      val1 = (int)*CHARP(cp1, i);
-		else if ( size == 2 ) val1 = (int)*SHORTP(cp1, i);
-		else if ( size == 4 ) val1 = (int)*LONGP(cp1, i);
-	
-		if ( size == 1 )      val2 = (int)*CHARP(cp2, i);
-		else if ( size == 2 ) val2 = (int)*SHORTP(cp2, i);
-		else if ( size == 4 ) val2 = (int)*LONGP(cp2, i);
+        for ( i=0; i < len1; i += size ) {
+                if ( size == 1 )      val1 = (int)*CHARP(cp1, i);
+                else if ( size == 2 ) val1 = (int)*SHORTP(cp1, i);
+                else if ( size == 4 ) val1 = (int)*LONGP(cp1, i);
+        
+                if ( size == 1 )      val2 = (int)*CHARP(cp2, i);
+                else if ( size == 2 ) val2 = (int)*SHORTP(cp2, i);
+                else if ( size == 4 ) val2 = (int)*LONGP(cp2, i);
 
-		newval = val1 + val2;
-		/* truncate in case of overflow */
-		if (newval > maxval) newval = maxval;
-		else if (newval < -maxval) newval = -maxval;
-		else if (size == 4 && (newval^val1) < 0 && (newval^val2) < 0)
-			newval = val1 > 0 ? maxval : - maxval;
+                newval = val1 + val2;
+                /* truncate in case of overflow */
+                if (newval > maxval) newval = maxval;
+                else if (newval < -maxval) newval = -maxval;
+                else if (size == 4 && (newval^val1) < 0 && (newval^val2) < 0)
+                        newval = val1 > 0 ? maxval : - maxval;
 
-		if ( size == 1 )      *CHARP(ncp, i) = (signed char)newval;
-		else if ( size == 2 ) *SHORTP(ncp, i) = (short)newval;
-		else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)newval;
-	}
-	return rv;
+                if ( size == 1 )      *CHARP(ncp, i) = (signed char)newval;
+                else if ( size == 2 ) *SHORTP(ncp, i) = (short)newval;
+                else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)newval;
+        }
+        return rv;
 }
 
 static PyObject *
 audioop_bias(PyObject *self, PyObject *args)
 {
-	signed char *cp, *ncp;
-	int len, size, val = 0;
-	PyObject *rv;
-	int i;
-	int bias;
+        signed char *cp, *ncp;
+        int len, size, val = 0;
+        PyObject *rv;
+        int i;
+        int bias;
 
-	if ( !PyArg_Parse(args, "(s#ii)",
-			  &cp, &len, &size , &bias) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#ii)",
+                          &cp, &len, &size , &bias) )
+                return 0;
 
-	if ( size != 1 && size != 2 && size != 4) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size != 1 && size != 2 && size != 4) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	rv = PyString_FromStringAndSize(NULL, len);
-	if ( rv == 0 )
-		return 0;
-	ncp = (signed char *)PyString_AsString(rv);
+        rv = PyString_FromStringAndSize(NULL, len);
+        if ( rv == 0 )
+                return 0;
+        ncp = (signed char *)PyString_AsString(rv);
     
     
-	for ( i=0; i < len; i += size ) {
-		if ( size == 1 )      val = (int)*CHARP(cp, i);
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = (int)*LONGP(cp, i);
-	
-		if ( size == 1 )      *CHARP(ncp, i) = (signed char)(val+bias);
-		else if ( size == 2 ) *SHORTP(ncp, i) = (short)(val+bias);
-		else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)(val+bias);
-	}
-	return rv;
+        for ( i=0; i < len; i += size ) {
+                if ( size == 1 )      val = (int)*CHARP(cp, i);
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = (int)*LONGP(cp, i);
+        
+                if ( size == 1 )      *CHARP(ncp, i) = (signed char)(val+bias);
+                else if ( size == 2 ) *SHORTP(ncp, i) = (short)(val+bias);
+                else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)(val+bias);
+        }
+        return rv;
 }
 
 static PyObject *
 audioop_reverse(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	unsigned char *ncp;
-	int len, size, val = 0;
-	PyObject *rv;
-	int i, j;
+        signed char *cp;
+        unsigned char *ncp;
+        int len, size, val = 0;
+        PyObject *rv;
+        int i, j;
 
-	if ( !PyArg_Parse(args, "(s#i)",
-			  &cp, &len, &size) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#i)",
+                          &cp, &len, &size) )
+                return 0;
 
-	if ( size != 1 && size != 2 && size != 4 ) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size != 1 && size != 2 && size != 4 ) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	rv = PyString_FromStringAndSize(NULL, len);
-	if ( rv == 0 )
-		return 0;
-	ncp = (unsigned char *)PyString_AsString(rv);
+        rv = PyString_FromStringAndSize(NULL, len);
+        if ( rv == 0 )
+                return 0;
+        ncp = (unsigned char *)PyString_AsString(rv);
     
-	for ( i=0; i < len; i += size ) {
-		if ( size == 1 )      val = ((int)*CHARP(cp, i)) << 8;
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 16;
+        for ( i=0; i < len; i += size ) {
+                if ( size == 1 )      val = ((int)*CHARP(cp, i)) << 8;
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 16;
 
-		j = len - i - size;
-	
-		if ( size == 1 )      *CHARP(ncp, j) = (signed char)(val >> 8);
-		else if ( size == 2 ) *SHORTP(ncp, j) = (short)(val);
-		else if ( size == 4 ) *LONGP(ncp, j) = (Py_Int32)(val<<16);
-	}
-	return rv;
+                j = len - i - size;
+        
+                if ( size == 1 )      *CHARP(ncp, j) = (signed char)(val >> 8);
+                else if ( size == 2 ) *SHORTP(ncp, j) = (short)(val);
+                else if ( size == 4 ) *LONGP(ncp, j) = (Py_Int32)(val<<16);
+        }
+        return rv;
 }
 
 static PyObject *
 audioop_lin2lin(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	unsigned char *ncp;
-	int len, size, size2, val = 0;
-	PyObject *rv;
-	int i, j;
+        signed char *cp;
+        unsigned char *ncp;
+        int len, size, size2, val = 0;
+        PyObject *rv;
+        int i, j;
 
-	if ( !PyArg_Parse(args, "(s#ii)",
-			  &cp, &len, &size, &size2) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#ii)",
+                          &cp, &len, &size, &size2) )
+                return 0;
 
-	if ( (size != 1 && size != 2 && size != 4) ||
-	     (size2 != 1 && size2 != 2 && size2 != 4)) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( (size != 1 && size != 2 && size != 4) ||
+             (size2 != 1 && size2 != 2 && size2 != 4)) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	rv = PyString_FromStringAndSize(NULL, (len/size)*size2);
-	if ( rv == 0 )
-		return 0;
-	ncp = (unsigned char *)PyString_AsString(rv);
+        rv = PyString_FromStringAndSize(NULL, (len/size)*size2);
+        if ( rv == 0 )
+                return 0;
+        ncp = (unsigned char *)PyString_AsString(rv);
     
-	for ( i=0, j=0; i < len; i += size, j += size2 ) {
-		if ( size == 1 )      val = ((int)*CHARP(cp, i)) << 8;
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 16;
+        for ( i=0, j=0; i < len; i += size, j += size2 ) {
+                if ( size == 1 )      val = ((int)*CHARP(cp, i)) << 8;
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 16;
 
-		if ( size2 == 1 )  *CHARP(ncp, j) = (signed char)(val >> 8);
-		else if ( size2 == 2 ) *SHORTP(ncp, j) = (short)(val);
-		else if ( size2 == 4 ) *LONGP(ncp, j) = (Py_Int32)(val<<16);
-	}
-	return rv;
+                if ( size2 == 1 )  *CHARP(ncp, j) = (signed char)(val >> 8);
+                else if ( size2 == 2 ) *SHORTP(ncp, j) = (short)(val);
+                else if ( size2 == 4 ) *LONGP(ncp, j) = (Py_Int32)(val<<16);
+        }
+        return rv;
 }
 
 static int
 gcd(int a, int b)
 {
-	while (b > 0) {
-		int tmp = a % b;
-		a = b;
-		b = tmp;
-	}
-	return a;
+        while (b > 0) {
+                int tmp = a % b;
+                a = b;
+                b = tmp;
+        }
+        return a;
 }
 
 static PyObject *
 audioop_ratecv(PyObject *self, PyObject *args)
 {
-	char *cp, *ncp;
-	int len, size, nchannels, inrate, outrate, weightA, weightB;
-	int chan, d, *prev_i, *cur_i, cur_o;
-	PyObject *state, *samps, *str, *rv = NULL;
-	int bytes_per_frame;
+        char *cp, *ncp;
+        int len, size, nchannels, inrate, outrate, weightA, weightB;
+        int chan, d, *prev_i, *cur_i, cur_o;
+        PyObject *state, *samps, *str, *rv = NULL;
+        int bytes_per_frame;
 
-	weightA = 1;
-	weightB = 0;
-	if (!PyArg_ParseTuple(args, "s#iiiiO|ii:ratecv", &cp, &len, &size, &nchannels,
-			      &inrate, &outrate, &state, &weightA, &weightB))
-		return NULL;
-	if (size != 1 && size != 2 && size != 4) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return NULL;
-	}
-	if (nchannels < 1) {
-		PyErr_SetString(AudioopError, "# of channels should be >= 1");
-		return NULL;
-	}
-	bytes_per_frame = size * nchannels;
-	if (bytes_per_frame / nchannels != size) {
-		/* This overflow test is rigorously correct because
-		   both multiplicands are >= 1.  Use the argument names
-		   from the docs for the error msg. */
-		PyErr_SetString(PyExc_OverflowError,
-		                "width * nchannels too big for a C int");
-		return NULL;
-	}
-	if (weightA < 1 || weightB < 0) {
-		PyErr_SetString(AudioopError,
-			"weightA should be >= 1, weightB should be >= 0");
-		return NULL;
-	}
-	if (len % bytes_per_frame != 0) {
-		PyErr_SetString(AudioopError, "not a whole number of frames");
-		return NULL;
-	}
-	if (inrate <= 0 || outrate <= 0) {
-		PyErr_SetString(AudioopError, "sampling rate not > 0");
-		return NULL;
-	}
-	/* divide inrate and outrate by their greatest common divisor */
-	d = gcd(inrate, outrate);
-	inrate /= d;
-	outrate /= d;
+        weightA = 1;
+        weightB = 0;
+        if (!PyArg_ParseTuple(args, "s#iiiiO|ii:ratecv", &cp, &len, &size, &nchannels,
+                              &inrate, &outrate, &state, &weightA, &weightB))
+                return NULL;
+        if (size != 1 && size != 2 && size != 4) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return NULL;
+        }
+        if (nchannels < 1) {
+                PyErr_SetString(AudioopError, "# of channels should be >= 1");
+                return NULL;
+        }
+        bytes_per_frame = size * nchannels;
+        if (bytes_per_frame / nchannels != size) {
+                /* This overflow test is rigorously correct because
+                   both multiplicands are >= 1.  Use the argument names
+                   from the docs for the error msg. */
+                PyErr_SetString(PyExc_OverflowError,
+                                "width * nchannels too big for a C int");
+                return NULL;
+        }
+        if (weightA < 1 || weightB < 0) {
+                PyErr_SetString(AudioopError,
+                        "weightA should be >= 1, weightB should be >= 0");
+                return NULL;
+        }
+        if (len % bytes_per_frame != 0) {
+                PyErr_SetString(AudioopError, "not a whole number of frames");
+                return NULL;
+        }
+        if (inrate <= 0 || outrate <= 0) {
+                PyErr_SetString(AudioopError, "sampling rate not > 0");
+                return NULL;
+        }
+        /* divide inrate and outrate by their greatest common divisor */
+        d = gcd(inrate, outrate);
+        inrate /= d;
+        outrate /= d;
 
-	prev_i = (int *) malloc(nchannels * sizeof(int));
-	cur_i = (int *) malloc(nchannels * sizeof(int));
-	if (prev_i == NULL || cur_i == NULL) {
-		(void) PyErr_NoMemory();
-		goto exit;
-	}
+        prev_i = (int *) malloc(nchannels * sizeof(int));
+        cur_i = (int *) malloc(nchannels * sizeof(int));
+        if (prev_i == NULL || cur_i == NULL) {
+                (void) PyErr_NoMemory();
+                goto exit;
+        }
 
-	len /= bytes_per_frame;	/* # of frames */
+        len /= bytes_per_frame; /* # of frames */
 
-	if (state == Py_None) {
-		d = -outrate;
-		for (chan = 0; chan < nchannels; chan++)
-			prev_i[chan] = cur_i[chan] = 0;
-	}
-	else {
-		if (!PyArg_ParseTuple(state,
-				"iO!;audioop.ratecv: illegal state argument",
-				&d, &PyTuple_Type, &samps))
-			goto exit;
-		if (PyTuple_Size(samps) != nchannels) {
-			PyErr_SetString(AudioopError,
-					"illegal state argument");
-			goto exit;
-		}
-		for (chan = 0; chan < nchannels; chan++) {
-			if (!PyArg_ParseTuple(PyTuple_GetItem(samps, chan),
-					      "ii:ratecv",&prev_i[chan],&cur_i[chan]))
-				goto exit;
-		}
-	}
+        if (state == Py_None) {
+                d = -outrate;
+                for (chan = 0; chan < nchannels; chan++)
+                        prev_i[chan] = cur_i[chan] = 0;
+        }
+        else {
+                if (!PyArg_ParseTuple(state,
+                                "iO!;audioop.ratecv: illegal state argument",
+                                &d, &PyTuple_Type, &samps))
+                        goto exit;
+                if (PyTuple_Size(samps) != nchannels) {
+                        PyErr_SetString(AudioopError,
+                                        "illegal state argument");
+                        goto exit;
+                }
+                for (chan = 0; chan < nchannels; chan++) {
+                        if (!PyArg_ParseTuple(PyTuple_GetItem(samps, chan),
+                                              "ii:ratecv",&prev_i[chan],&cur_i[chan]))
+                                goto exit;
+                }
+        }
 
-	/* str <- Space for the output buffer. */
-	{
-		/* There are len input frames, so we need (mathematically)
-		   ceiling(len*outrate/inrate) output frames, and each frame
-		   requires bytes_per_frame bytes.  Computing this
-		   without spurious overflow is the challenge; we can
-		   settle for a reasonable upper bound, though. */
-		int ceiling;   /* the number of output frames */
-		int nbytes;    /* the number of output bytes needed */
-		int q = len / inrate;
-		/* Now len = q * inrate + r exactly (with r = len % inrate),
-		   and this is less than q * inrate + inrate = (q+1)*inrate.
-		   So a reasonable upper bound on len*outrate/inrate is
-		   ((q+1)*inrate)*outrate/inrate =
-		   (q+1)*outrate.
-		*/
-		ceiling = (q+1) * outrate;
-		nbytes = ceiling * bytes_per_frame;
-		/* See whether anything overflowed; if not, get the space. */
-		if (q+1 < 0 ||
-		    ceiling / outrate != q+1 ||
-		    nbytes / bytes_per_frame != ceiling)
-			str = NULL;
-		else
-			str = PyString_FromStringAndSize(NULL, nbytes);
+        /* str <- Space for the output buffer. */
+        {
+                /* There are len input frames, so we need (mathematically)
+                   ceiling(len*outrate/inrate) output frames, and each frame
+                   requires bytes_per_frame bytes.  Computing this
+                   without spurious overflow is the challenge; we can
+                   settle for a reasonable upper bound, though. */
+                int ceiling;   /* the number of output frames */
+                int nbytes;    /* the number of output bytes needed */
+                int q = len / inrate;
+                /* Now len = q * inrate + r exactly (with r = len % inrate),
+                   and this is less than q * inrate + inrate = (q+1)*inrate.
+                   So a reasonable upper bound on len*outrate/inrate is
+                   ((q+1)*inrate)*outrate/inrate =
+                   (q+1)*outrate.
+                */
+                ceiling = (q+1) * outrate;
+                nbytes = ceiling * bytes_per_frame;
+                /* See whether anything overflowed; if not, get the space. */
+                if (q+1 < 0 ||
+                    ceiling / outrate != q+1 ||
+                    nbytes / bytes_per_frame != ceiling)
+                        str = NULL;
+                else
+                        str = PyString_FromStringAndSize(NULL, nbytes);
 
-		if (str == NULL) {
-			PyErr_SetString(PyExc_MemoryError,
-				"not enough memory for output buffer");
-			goto exit;
-		}
-	}
-	ncp = PyString_AsString(str);
+                if (str == NULL) {
+                        PyErr_SetString(PyExc_MemoryError,
+                                "not enough memory for output buffer");
+                        goto exit;
+                }
+        }
+        ncp = PyString_AsString(str);
 
-	for (;;) {
-		while (d < 0) {
-			if (len == 0) {
-				samps = PyTuple_New(nchannels);
-				if (samps == NULL)
-					goto exit;
-				for (chan = 0; chan < nchannels; chan++)
-					PyTuple_SetItem(samps, chan,
-						Py_BuildValue("(ii)",
-							      prev_i[chan],
-							      cur_i[chan]));
-				if (PyErr_Occurred())
-					goto exit;
-				/* We have checked before that the length
-				 * of the string fits into int. */
-				len = (int)(ncp - PyString_AsString(str));
-				if (len == 0) {
-					/*don't want to resize to zero length*/
-					rv = PyString_FromStringAndSize("", 0);
-					Py_DECREF(str);
-					str = rv;
-				} else if (_PyString_Resize(&str, len) < 0)
-					goto exit;
-				rv = Py_BuildValue("(O(iO))", str, d, samps);
-				Py_DECREF(samps);
-				Py_DECREF(str);
-				goto exit; /* return rv */
-			}
-			for (chan = 0; chan < nchannels; chan++) {
-				prev_i[chan] = cur_i[chan];
-				if (size == 1)
-				    cur_i[chan] = ((int)*CHARP(cp, 0)) << 8;
-				else if (size == 2)
-				    cur_i[chan] = (int)*SHORTP(cp, 0);
-				else if (size == 4)
-				    cur_i[chan] = ((int)*LONGP(cp, 0)) >> 16;
-				cp += size;
-				/* implements a simple digital filter */
-				cur_i[chan] =
-					(weightA * cur_i[chan] +
-					 weightB * prev_i[chan]) /
-					(weightA + weightB);
-			}
-			len--;
-			d += outrate;
-		}
-		while (d >= 0) {
-			for (chan = 0; chan < nchannels; chan++) {
-				cur_o = (prev_i[chan] * d +
-					 cur_i[chan] * (outrate - d)) /
-					outrate;
-				if (size == 1)
-				    *CHARP(ncp, 0) = (signed char)(cur_o >> 8);
-				else if (size == 2)
-				    *SHORTP(ncp, 0) = (short)(cur_o);
-				else if (size == 4)
-				    *LONGP(ncp, 0) = (Py_Int32)(cur_o<<16);
-				ncp += size;
-			}
-			d -= inrate;
-		}
-	}
+        for (;;) {
+                while (d < 0) {
+                        if (len == 0) {
+                                samps = PyTuple_New(nchannels);
+                                if (samps == NULL)
+                                        goto exit;
+                                for (chan = 0; chan < nchannels; chan++)
+                                        PyTuple_SetItem(samps, chan,
+                                                Py_BuildValue("(ii)",
+                                                              prev_i[chan],
+                                                              cur_i[chan]));
+                                if (PyErr_Occurred())
+                                        goto exit;
+                                /* We have checked before that the length
+                                 * of the string fits into int. */
+                                len = (int)(ncp - PyString_AsString(str));
+                                if (len == 0) {
+                                        /*don't want to resize to zero length*/
+                                        rv = PyString_FromStringAndSize("", 0);
+                                        Py_DECREF(str);
+                                        str = rv;
+                                } else if (_PyString_Resize(&str, len) < 0)
+                                        goto exit;
+                                rv = Py_BuildValue("(O(iO))", str, d, samps);
+                                Py_DECREF(samps);
+                                Py_DECREF(str);
+                                goto exit; /* return rv */
+                        }
+                        for (chan = 0; chan < nchannels; chan++) {
+                                prev_i[chan] = cur_i[chan];
+                                if (size == 1)
+                                    cur_i[chan] = ((int)*CHARP(cp, 0)) << 8;
+                                else if (size == 2)
+                                    cur_i[chan] = (int)*SHORTP(cp, 0);
+                                else if (size == 4)
+                                    cur_i[chan] = ((int)*LONGP(cp, 0)) >> 16;
+                                cp += size;
+                                /* implements a simple digital filter */
+                                cur_i[chan] =
+                                        (weightA * cur_i[chan] +
+                                         weightB * prev_i[chan]) /
+                                        (weightA + weightB);
+                        }
+                        len--;
+                        d += outrate;
+                }
+                while (d >= 0) {
+                        for (chan = 0; chan < nchannels; chan++) {
+                                cur_o = (prev_i[chan] * d +
+                                         cur_i[chan] * (outrate - d)) /
+                                        outrate;
+                                if (size == 1)
+                                    *CHARP(ncp, 0) = (signed char)(cur_o >> 8);
+                                else if (size == 2)
+                                    *SHORTP(ncp, 0) = (short)(cur_o);
+                                else if (size == 4)
+                                    *LONGP(ncp, 0) = (Py_Int32)(cur_o<<16);
+                                ncp += size;
+                        }
+                        d -= inrate;
+                }
+        }
   exit:
-	if (prev_i != NULL)
-		free(prev_i);
-	if (cur_i != NULL)
-		free(cur_i);
-	return rv;
+        if (prev_i != NULL)
+                free(prev_i);
+        if (cur_i != NULL)
+                free(cur_i);
+        return rv;
 }
 
 static PyObject *
 audioop_lin2ulaw(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	unsigned char *ncp;
-	int len, size, val = 0;
-	PyObject *rv;
-	int i;
+        signed char *cp;
+        unsigned char *ncp;
+        int len, size, val = 0;
+        PyObject *rv;
+        int i;
 
-	if ( !PyArg_Parse(args, "(s#i)",
-			  &cp, &len, &size) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#i)",
+                          &cp, &len, &size) )
+                return 0;
 
-	if ( size != 1 && size != 2 && size != 4) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size != 1 && size != 2 && size != 4) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	rv = PyString_FromStringAndSize(NULL, len/size);
-	if ( rv == 0 )
-		return 0;
-	ncp = (unsigned char *)PyString_AsString(rv);
+        rv = PyString_FromStringAndSize(NULL, len/size);
+        if ( rv == 0 )
+                return 0;
+        ncp = (unsigned char *)PyString_AsString(rv);
     
-	for ( i=0; i < len; i += size ) {
-		if ( size == 1 )      val = ((int)*CHARP(cp, i)) << 8;
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 16;
+        for ( i=0; i < len; i += size ) {
+                if ( size == 1 )      val = ((int)*CHARP(cp, i)) << 8;
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 16;
 
-		*ncp++ = st_linear_to_ulaw(val);
-	}
-	return rv;
+                *ncp++ = st_14linear2ulaw(val);
+        }
+        return rv;
 }
 
 static PyObject *
 audioop_ulaw2lin(PyObject *self, PyObject *args)
 {
-	unsigned char *cp;
-	unsigned char cval;
-	signed char *ncp;
-	int len, size, val;
-	PyObject *rv;
-	int i;
+        unsigned char *cp;
+        unsigned char cval;
+        signed char *ncp;
+        int len, size, val;
+        PyObject *rv;
+        int i;
 
-	if ( !PyArg_Parse(args, "(s#i)",
-			  &cp, &len, &size) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#i)",
+                          &cp, &len, &size) )
+                return 0;
 
-	if ( size != 1 && size != 2 && size != 4) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size != 1 && size != 2 && size != 4) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	rv = PyString_FromStringAndSize(NULL, len*size);
-	if ( rv == 0 )
-		return 0;
-	ncp = (signed char *)PyString_AsString(rv);
+        rv = PyString_FromStringAndSize(NULL, len*size);
+        if ( rv == 0 )
+                return 0;
+        ncp = (signed char *)PyString_AsString(rv);
     
-	for ( i=0; i < len*size; i += size ) {
-		cval = *cp++;
-		val = st_ulaw_to_linear(cval);
-	
-		if ( size == 1 )      *CHARP(ncp, i) = (signed char)(val >> 8);
-		else if ( size == 2 ) *SHORTP(ncp, i) = (short)(val);
-		else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)(val<<16);
-	}
-	return rv;
+        for ( i=0; i < len*size; i += size ) {
+                cval = *cp++;
+                val = st_ulaw2linear16(cval);
+        
+                if ( size == 1 )      *CHARP(ncp, i) = (signed char)(val >> 8);
+                else if ( size == 2 ) *SHORTP(ncp, i) = (short)(val);
+                else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)(val<<16);
+        }
+        return rv;
+}
+
+static PyObject *
+audioop_lin2alaw(PyObject *self, PyObject *args)
+{
+        signed char *cp;
+        unsigned char *ncp;
+        int len, size, val = 0;
+        PyObject *rv;
+        int i;
+
+        if ( !PyArg_Parse(args, "(s#i)",
+                          &cp, &len, &size) )
+                return 0;
+
+        if ( size != 1 && size != 2 && size != 4) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
+    
+        rv = PyString_FromStringAndSize(NULL, len/size);
+        if ( rv == 0 )
+                return 0;
+        ncp = (unsigned char *)PyString_AsString(rv);
+    
+        for ( i=0; i < len; i += size ) {
+                if ( size == 1 )      val = ((int)*CHARP(cp, i)) << 8;
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 16;
+
+                *ncp++ = st_linear2alaw(val);
+        }
+        return rv;
+}
+
+static PyObject *
+audioop_alaw2lin(PyObject *self, PyObject *args)
+{
+        unsigned char *cp;
+        unsigned char cval;
+        signed char *ncp;
+        int len, size, val;
+        PyObject *rv;
+        int i;
+
+        if ( !PyArg_Parse(args, "(s#i)",
+                          &cp, &len, &size) )
+                return 0;
+
+        if ( size != 1 && size != 2 && size != 4) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
+    
+        rv = PyString_FromStringAndSize(NULL, len*size);
+        if ( rv == 0 )
+                return 0;
+        ncp = (signed char *)PyString_AsString(rv);
+    
+        for ( i=0; i < len*size; i += size ) {
+                cval = *cp++;
+                val = st_alaw2linear16(cval);
+        
+                if ( size == 1 )      *CHARP(ncp, i) = (signed char)(val >> 8);
+                else if ( size == 2 ) *SHORTP(ncp, i) = (short)(val);
+                else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)(val<<16);
+        }
+        return rv;
 }
 
 static PyObject *
 audioop_lin2adpcm(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	signed char *ncp;
-	int len, size, val = 0, step, valpred, delta,
-		index, sign, vpdiff, diff;
-	PyObject *rv, *state, *str;
-	int i, outputbuffer = 0, bufferstep;
+        signed char *cp;
+        signed char *ncp;
+        int len, size, val = 0, step, valpred, delta,
+                index, sign, vpdiff, diff;
+        PyObject *rv, *state, *str;
+        int i, outputbuffer = 0, bufferstep;
 
-	if ( !PyArg_Parse(args, "(s#iO)",
-			  &cp, &len, &size, &state) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#iO)",
+                          &cp, &len, &size, &state) )
+                return 0;
     
 
-	if ( size != 1 && size != 2 && size != 4) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size != 1 && size != 2 && size != 4) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	str = PyString_FromStringAndSize(NULL, len/(size*2));
-	if ( str == 0 )
-		return 0;
-	ncp = (signed char *)PyString_AsString(str);
+        str = PyString_FromStringAndSize(NULL, len/(size*2));
+        if ( str == 0 )
+                return 0;
+        ncp = (signed char *)PyString_AsString(str);
 
-	/* Decode state, should have (value, step) */
-	if ( state == Py_None ) {
-		/* First time, it seems. Set defaults */
-		valpred = 0;
-		step = 7;
-		index = 0;
-	} else if ( !PyArg_Parse(state, "(ii)", &valpred, &index) )
-		return 0;
+        /* Decode state, should have (value, step) */
+        if ( state == Py_None ) {
+                /* First time, it seems. Set defaults */
+                valpred = 0;
+                step = 7;
+                index = 0;
+        } else if ( !PyArg_Parse(state, "(ii)", &valpred, &index) )
+                return 0;
 
-	step = stepsizeTable[index];
-	bufferstep = 1;
+        step = stepsizeTable[index];
+        bufferstep = 1;
 
-	for ( i=0; i < len; i += size ) {
-		if ( size == 1 )      val = ((int)*CHARP(cp, i)) << 8;
-		else if ( size == 2 ) val = (int)*SHORTP(cp, i);
-		else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 16;
+        for ( i=0; i < len; i += size ) {
+                if ( size == 1 )      val = ((int)*CHARP(cp, i)) << 8;
+                else if ( size == 2 ) val = (int)*SHORTP(cp, i);
+                else if ( size == 4 ) val = ((int)*LONGP(cp, i)) >> 16;
 
-		/* Step 1 - compute difference with previous value */
-		diff = val - valpred;
-		sign = (diff < 0) ? 8 : 0;
-		if ( sign ) diff = (-diff);
+                /* Step 1 - compute difference with previous value */
+                diff = val - valpred;
+                sign = (diff < 0) ? 8 : 0;
+                if ( sign ) diff = (-diff);
 
-		/* Step 2 - Divide and clamp */
-		/* Note:
-		** This code *approximately* computes:
-		**    delta = diff*4/step;
-		**    vpdiff = (delta+0.5)*step/4;
-		** but in shift step bits are dropped. The net result of this
-		** is that even if you have fast mul/div hardware you cannot
-		** put it to good use since the fixup would be too expensive.
-		*/
-		delta = 0;
-		vpdiff = (step >> 3);
-	
-		if ( diff >= step ) {
-			delta = 4;
-			diff -= step;
-			vpdiff += step;
-		}
-		step >>= 1;
-		if ( diff >= step  ) {
-			delta |= 2;
-			diff -= step;
-			vpdiff += step;
-		}
-		step >>= 1;
-		if ( diff >= step ) {
-			delta |= 1;
-			vpdiff += step;
-		}
+                /* Step 2 - Divide and clamp */
+                /* Note:
+                ** This code *approximately* computes:
+                **    delta = diff*4/step;
+                **    vpdiff = (delta+0.5)*step/4;
+                ** but in shift step bits are dropped. The net result of this
+                ** is that even if you have fast mul/div hardware you cannot
+                ** put it to good use since the fixup would be too expensive.
+                */
+                delta = 0;
+                vpdiff = (step >> 3);
+        
+                if ( diff >= step ) {
+                        delta = 4;
+                        diff -= step;
+                        vpdiff += step;
+                }
+                step >>= 1;
+                if ( diff >= step  ) {
+                        delta |= 2;
+                        diff -= step;
+                        vpdiff += step;
+                }
+                step >>= 1;
+                if ( diff >= step ) {
+                        delta |= 1;
+                        vpdiff += step;
+                }
 
-		/* Step 3 - Update previous value */
-		if ( sign )
-			valpred -= vpdiff;
-		else
-			valpred += vpdiff;
+                /* Step 3 - Update previous value */
+                if ( sign )
+                        valpred -= vpdiff;
+                else
+                        valpred += vpdiff;
 
-		/* Step 4 - Clamp previous value to 16 bits */
-		if ( valpred > 32767 )
-			valpred = 32767;
-		else if ( valpred < -32768 )
-			valpred = -32768;
+                /* Step 4 - Clamp previous value to 16 bits */
+                if ( valpred > 32767 )
+                        valpred = 32767;
+                else if ( valpred < -32768 )
+                        valpred = -32768;
 
-		/* Step 5 - Assemble value, update index and step values */
-		delta |= sign;
-	
-		index += indexTable[delta];
-		if ( index < 0 ) index = 0;
-		if ( index > 88 ) index = 88;
-		step = stepsizeTable[index];
+                /* Step 5 - Assemble value, update index and step values */
+                delta |= sign;
+        
+                index += indexTable[delta];
+                if ( index < 0 ) index = 0;
+                if ( index > 88 ) index = 88;
+                step = stepsizeTable[index];
 
-		/* Step 6 - Output value */
-		if ( bufferstep ) {
-			outputbuffer = (delta << 4) & 0xf0;
-		} else {
-			*ncp++ = (delta & 0x0f) | outputbuffer;
-		}
-		bufferstep = !bufferstep;
-	}
-	rv = Py_BuildValue("(O(ii))", str, valpred, index);
-	Py_DECREF(str);
-	return rv;
+                /* Step 6 - Output value */
+                if ( bufferstep ) {
+                        outputbuffer = (delta << 4) & 0xf0;
+                } else {
+                        *ncp++ = (delta & 0x0f) | outputbuffer;
+                }
+                bufferstep = !bufferstep;
+        }
+        rv = Py_BuildValue("(O(ii))", str, valpred, index);
+        Py_DECREF(str);
+        return rv;
 }
 
 static PyObject *
 audioop_adpcm2lin(PyObject *self, PyObject *args)
 {
-	signed char *cp;
-	signed char *ncp;
-	int len, size, valpred, step, delta, index, sign, vpdiff;
-	PyObject *rv, *str, *state;
-	int i, inputbuffer = 0, bufferstep;
+        signed char *cp;
+        signed char *ncp;
+        int len, size, valpred, step, delta, index, sign, vpdiff;
+        PyObject *rv, *str, *state;
+        int i, inputbuffer = 0, bufferstep;
 
-	if ( !PyArg_Parse(args, "(s#iO)",
-			  &cp, &len, &size, &state) )
-		return 0;
+        if ( !PyArg_Parse(args, "(s#iO)",
+                          &cp, &len, &size, &state) )
+                return 0;
 
-	if ( size != 1 && size != 2 && size != 4) {
-		PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
-		return 0;
-	}
+        if ( size != 1 && size != 2 && size != 4) {
+                PyErr_SetString(AudioopError, "Size should be 1, 2 or 4");
+                return 0;
+        }
     
-	/* Decode state, should have (value, step) */
-	if ( state == Py_None ) {
-		/* First time, it seems. Set defaults */
-		valpred = 0;
-		step = 7;
-		index = 0;
-	} else if ( !PyArg_Parse(state, "(ii)", &valpred, &index) )
-		return 0;
+        /* Decode state, should have (value, step) */
+        if ( state == Py_None ) {
+                /* First time, it seems. Set defaults */
+                valpred = 0;
+                step = 7;
+                index = 0;
+        } else if ( !PyArg_Parse(state, "(ii)", &valpred, &index) )
+                return 0;
     
-	str = PyString_FromStringAndSize(NULL, len*size*2);
-	if ( str == 0 )
-		return 0;
-	ncp = (signed char *)PyString_AsString(str);
+        str = PyString_FromStringAndSize(NULL, len*size*2);
+        if ( str == 0 )
+                return 0;
+        ncp = (signed char *)PyString_AsString(str);
 
-	step = stepsizeTable[index];
-	bufferstep = 0;
+        step = stepsizeTable[index];
+        bufferstep = 0;
     
-	for ( i=0; i < len*size*2; i += size ) {
-		/* Step 1 - get the delta value and compute next index */
-		if ( bufferstep ) {
-			delta = inputbuffer & 0xf;
-		} else {
-			inputbuffer = *cp++;
-			delta = (inputbuffer >> 4) & 0xf;
-		}
+        for ( i=0; i < len*size*2; i += size ) {
+                /* Step 1 - get the delta value and compute next index */
+                if ( bufferstep ) {
+                        delta = inputbuffer & 0xf;
+                } else {
+                        inputbuffer = *cp++;
+                        delta = (inputbuffer >> 4) & 0xf;
+                }
 
-		bufferstep = !bufferstep;
+                bufferstep = !bufferstep;
 
-		/* Step 2 - Find new index value (for later) */
-		index += indexTable[delta];
-		if ( index < 0 ) index = 0;
-		if ( index > 88 ) index = 88;
+                /* Step 2 - Find new index value (for later) */
+                index += indexTable[delta];
+                if ( index < 0 ) index = 0;
+                if ( index > 88 ) index = 88;
 
-		/* Step 3 - Separate sign and magnitude */
-		sign = delta & 8;
-		delta = delta & 7;
+                /* Step 3 - Separate sign and magnitude */
+                sign = delta & 8;
+                delta = delta & 7;
 
-		/* Step 4 - Compute difference and new predicted value */
-		/*
-		** Computes 'vpdiff = (delta+0.5)*step/4', but see comment
-		** in adpcm_coder.
-		*/
-		vpdiff = step >> 3;
-		if ( delta & 4 ) vpdiff += step;
-		if ( delta & 2 ) vpdiff += step>>1;
-		if ( delta & 1 ) vpdiff += step>>2;
+                /* Step 4 - Compute difference and new predicted value */
+                /*
+                ** Computes 'vpdiff = (delta+0.5)*step/4', but see comment
+                ** in adpcm_coder.
+                */
+                vpdiff = step >> 3;
+                if ( delta & 4 ) vpdiff += step;
+                if ( delta & 2 ) vpdiff += step>>1;
+                if ( delta & 1 ) vpdiff += step>>2;
 
-		if ( sign )
-			valpred -= vpdiff;
-		else
-			valpred += vpdiff;
+                if ( sign )
+                        valpred -= vpdiff;
+                else
+                        valpred += vpdiff;
 
-		/* Step 5 - clamp output value */
-		if ( valpred > 32767 )
-			valpred = 32767;
-		else if ( valpred < -32768 )
-			valpred = -32768;
+                /* Step 5 - clamp output value */
+                if ( valpred > 32767 )
+                        valpred = 32767;
+                else if ( valpred < -32768 )
+                        valpred = -32768;
 
-		/* Step 6 - Update step value */
-		step = stepsizeTable[index];
+                /* Step 6 - Update step value */
+                step = stepsizeTable[index];
 
-		/* Step 6 - Output value */
-		if ( size == 1 ) *CHARP(ncp, i) = (signed char)(valpred >> 8);
-		else if ( size == 2 ) *SHORTP(ncp, i) = (short)(valpred);
-		else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)(valpred<<16);
-	}
+                /* Step 6 - Output value */
+                if ( size == 1 ) *CHARP(ncp, i) = (signed char)(valpred >> 8);
+                else if ( size == 2 ) *SHORTP(ncp, i) = (short)(valpred);
+                else if ( size == 4 ) *LONGP(ncp, i) = (Py_Int32)(valpred<<16);
+        }
 
-	rv = Py_BuildValue("(O(ii))", str, valpred, index);
-	Py_DECREF(str);
-	return rv;
+        rv = Py_BuildValue("(O(ii))", str, valpred, index);
+        Py_DECREF(str);
+        return rv;
 }
 
 static PyMethodDef audioop_methods[] = {
-	{ "max", audioop_max, METH_OLDARGS },
-	{ "minmax", audioop_minmax, METH_OLDARGS },
-	{ "avg", audioop_avg, METH_OLDARGS },
-	{ "maxpp", audioop_maxpp, METH_OLDARGS },
-	{ "avgpp", audioop_avgpp, METH_OLDARGS },
-	{ "rms", audioop_rms, METH_OLDARGS },
-	{ "findfit", audioop_findfit, METH_OLDARGS },
-	{ "findmax", audioop_findmax, METH_OLDARGS },
-	{ "findfactor", audioop_findfactor, METH_OLDARGS },
-	{ "cross", audioop_cross, METH_OLDARGS },
-	{ "mul", audioop_mul, METH_OLDARGS },
-	{ "add", audioop_add, METH_OLDARGS },
-	{ "bias", audioop_bias, METH_OLDARGS },
-	{ "ulaw2lin", audioop_ulaw2lin, METH_OLDARGS },
-	{ "lin2ulaw", audioop_lin2ulaw, METH_OLDARGS },
-	{ "lin2lin", audioop_lin2lin, METH_OLDARGS },
-	{ "adpcm2lin", audioop_adpcm2lin, METH_OLDARGS },
-	{ "lin2adpcm", audioop_lin2adpcm, METH_OLDARGS },
-	{ "tomono", audioop_tomono, METH_OLDARGS },
-	{ "tostereo", audioop_tostereo, METH_OLDARGS },
-	{ "getsample", audioop_getsample, METH_OLDARGS },
-	{ "reverse", audioop_reverse, METH_OLDARGS },
-	{ "ratecv", audioop_ratecv, METH_VARARGS },
-	{ 0,          0 }
+        { "max", audioop_max, METH_OLDARGS },
+        { "minmax", audioop_minmax, METH_OLDARGS },
+        { "avg", audioop_avg, METH_OLDARGS },
+        { "maxpp", audioop_maxpp, METH_OLDARGS },
+        { "avgpp", audioop_avgpp, METH_OLDARGS },
+        { "rms", audioop_rms, METH_OLDARGS },
+        { "findfit", audioop_findfit, METH_OLDARGS },
+        { "findmax", audioop_findmax, METH_OLDARGS },
+        { "findfactor", audioop_findfactor, METH_OLDARGS },
+        { "cross", audioop_cross, METH_OLDARGS },
+        { "mul", audioop_mul, METH_OLDARGS },
+        { "add", audioop_add, METH_OLDARGS },
+        { "bias", audioop_bias, METH_OLDARGS },
+        { "ulaw2lin", audioop_ulaw2lin, METH_OLDARGS },
+        { "lin2ulaw", audioop_lin2ulaw, METH_OLDARGS },
+        { "alaw2lin", audioop_alaw2lin, METH_OLDARGS },
+        { "lin2alaw", audioop_lin2alaw, METH_OLDARGS },
+        { "lin2lin", audioop_lin2lin, METH_OLDARGS },
+        { "adpcm2lin", audioop_adpcm2lin, METH_OLDARGS },
+        { "lin2adpcm", audioop_lin2adpcm, METH_OLDARGS },
+        { "tomono", audioop_tomono, METH_OLDARGS },
+        { "tostereo", audioop_tostereo, METH_OLDARGS },
+        { "getsample", audioop_getsample, METH_OLDARGS },
+        { "reverse", audioop_reverse, METH_OLDARGS },
+        { "ratecv", audioop_ratecv, METH_VARARGS },
+        { 0,          0 }
 };
 
 PyMODINIT_FUNC
 initaudioop(void)
 {
-	PyObject *m, *d;
-	m = Py_InitModule("audioop", audioop_methods);
-	if (m == NULL)
-		return;
-	d = PyModule_GetDict(m);
-	AudioopError = PyErr_NewException("audioop.error", NULL, NULL);
-	if (AudioopError != NULL)
-	     PyDict_SetItemString(d,"error",AudioopError);
+        PyObject *m, *d;
+        m = Py_InitModule("audioop", audioop_methods);
+        if (m == NULL)
+                return;
+        d = PyModule_GetDict(m);
+        if (d == NULL)
+                return;
+        AudioopError = PyErr_NewException("audioop.error", NULL, NULL);
+        if (AudioopError != NULL)
+             PyDict_SetItemString(d,"error",AudioopError);
 }
diff --git a/Modules/cPickle.c b/Modules/cPickle.c
index 727dcc9..18df599 100644
--- a/Modules/cPickle.c
+++ b/Modules/cPickle.c
@@ -123,7 +123,7 @@
   *__getstate___str, *__setstate___str, *__name___str, *__reduce___str,
   *__reduce_ex___str,
   *write_str, *append_str,
-  *read_str, *readline_str, *__main___str, *__basicnew___str,
+  *read_str, *readline_str, *__main___str, 
   *copy_reg_str, *dispatch_table_str;
 
 /*************************************************************************
@@ -2909,38 +2909,28 @@
 static int
 Pickler_traverse(Picklerobject *self, visitproc visit, void *arg)
 {
-	int err;
-#define VISIT(SLOT) \
-	if (SLOT) { \
-		err = visit((PyObject *)(SLOT), arg); \
-		if (err) \
-			return err; \
-	}
-	VISIT(self->write);
-	VISIT(self->memo);
-	VISIT(self->fast_memo);
-	VISIT(self->arg);
-	VISIT(self->file);
-	VISIT(self->pers_func);
-	VISIT(self->inst_pers_func);
-	VISIT(self->dispatch_table);
-#undef VISIT
+	Py_VISIT(self->write);
+	Py_VISIT(self->memo);
+	Py_VISIT(self->fast_memo);
+	Py_VISIT(self->arg);
+	Py_VISIT(self->file);
+	Py_VISIT(self->pers_func);
+	Py_VISIT(self->inst_pers_func);
+	Py_VISIT(self->dispatch_table);
 	return 0;
 }
 
 static int
 Pickler_clear(Picklerobject *self)
 {
-#define CLEAR(SLOT) Py_XDECREF(SLOT); SLOT = NULL;
-	CLEAR(self->write);
-	CLEAR(self->memo);
-	CLEAR(self->fast_memo);
-	CLEAR(self->arg);
-	CLEAR(self->file);
-	CLEAR(self->pers_func);
-	CLEAR(self->inst_pers_func);
-	CLEAR(self->dispatch_table);
-#undef CLEAR
+	Py_CLEAR(self->write);
+	Py_CLEAR(self->memo);
+	Py_CLEAR(self->fast_memo);
+	Py_CLEAR(self->arg);
+	Py_CLEAR(self->file);
+	Py_CLEAR(self->pers_func);
+	Py_CLEAR(self->inst_pers_func);
+	Py_CLEAR(self->dispatch_table);
 	return 0;
 }
 
@@ -5260,41 +5250,30 @@
 static int
 Unpickler_traverse(Unpicklerobject *self, visitproc visit, void *arg)
 {
-	int err;
-
-#define VISIT(SLOT) \
-	if (SLOT) { \
-		err = visit((PyObject *)(SLOT), arg); \
-		if (err) \
-			return err; \
-	}
-	VISIT(self->readline);
-	VISIT(self->read);
-	VISIT(self->file);
-	VISIT(self->memo);
-	VISIT(self->stack);
-	VISIT(self->pers_func);
-	VISIT(self->arg);
-	VISIT(self->last_string);
-	VISIT(self->find_class);
-#undef VISIT
+	Py_VISIT(self->readline);
+	Py_VISIT(self->read);
+	Py_VISIT(self->file);
+	Py_VISIT(self->memo);
+	Py_VISIT(self->stack);
+	Py_VISIT(self->pers_func);
+	Py_VISIT(self->arg);
+	Py_VISIT(self->last_string);
+	Py_VISIT(self->find_class);
 	return 0;
 }
 
 static int
 Unpickler_clear(Unpicklerobject *self)
 {
-#define CLEAR(SLOT) Py_XDECREF(SLOT); SLOT = NULL
-	CLEAR(self->readline);
-	CLEAR(self->read);
-	CLEAR(self->file);
-	CLEAR(self->memo);
-	CLEAR(self->stack);
-	CLEAR(self->pers_func);
-	CLEAR(self->arg);
-	CLEAR(self->last_string);
-	CLEAR(self->find_class);
-#undef CLEAR
+	Py_CLEAR(self->readline);
+	Py_CLEAR(self->read);
+	Py_CLEAR(self->file);
+	Py_CLEAR(self->memo);
+	Py_CLEAR(self->stack);
+	Py_CLEAR(self->pers_func);
+	Py_CLEAR(self->arg);
+	Py_CLEAR(self->last_string);
+	Py_CLEAR(self->find_class);
 	return 0;
 }
 
@@ -5602,7 +5581,6 @@
 	INIT_STR(readline);
 	INIT_STR(copy_reg);
 	INIT_STR(dispatch_table);
-	INIT_STR(__basicnew__);
 
 	if (!( copy_reg = PyImport_ImportModule("copy_reg")))
 		return -1;
diff --git a/Modules/cStringIO.c b/Modules/cStringIO.c
index bdc9f00..4debb72 100644
--- a/Modules/cStringIO.c
+++ b/Modules/cStringIO.c
@@ -503,17 +503,17 @@
   0,	       			/*tp_itemsize*/
   /* methods */
   (destructor)O_dealloc,	/*tp_dealloc*/
-  (printfunc)0,			/*tp_print*/
+  0,				/*tp_print*/
   0,		 		/*tp_getattr */
   0,		 		/*tp_setattr */
-  (cmpfunc)0,			/*tp_compare*/
-  (reprfunc)0,			/*tp_repr*/
+  0,				/*tp_compare*/
+  0,				/*tp_repr*/
   0,				/*tp_as_number*/
   0,				/*tp_as_sequence*/
   0,				/*tp_as_mapping*/
-  (hashfunc)0,			/*tp_hash*/
-  (ternaryfunc)0,		/*tp_call*/
-  (reprfunc)0,			/*tp_str*/
+  0,				/*tp_hash*/
+  0	,			/*tp_call*/
+  0,				/*tp_str*/
   0,				/*tp_getattro */
   0,				/*tp_setattro */
   0,				/*tp_as_buffer */
@@ -624,17 +624,17 @@
   0,					/*tp_itemsize*/
   /* methods */
   (destructor)I_dealloc,		/*tp_dealloc*/
-  (printfunc)0,				/*tp_print*/
+  0,					/*tp_print*/
   0,		 			/* tp_getattr */
-  (setattrfunc)0,			/*tp_setattr*/
-  (cmpfunc)0,				/*tp_compare*/
-  (reprfunc)0,				/*tp_repr*/
+  0,					/*tp_setattr*/
+  0,					/*tp_compare*/
+  0,					/*tp_repr*/
   0,					/*tp_as_number*/
   0,					/*tp_as_sequence*/
   0,					/*tp_as_mapping*/
-  (hashfunc)0,				/*tp_hash*/
-  (ternaryfunc)0,			/*tp_call*/
-  (reprfunc)0,				/*tp_str*/
+  0,					/*tp_hash*/
+  0,					/*tp_call*/
+  0,					/*tp_str*/
   0,					/* tp_getattro */
   0,					/* tp_setattro */
   0,					/* tp_as_buffer */
diff --git a/Modules/ccpython.cc b/Modules/ccpython.cc
deleted file mode 100644
index a6e97ff..0000000
--- a/Modules/ccpython.cc
+++ /dev/null
@@ -1,11 +0,0 @@
-/* Minimal main program -- everything is loaded from the library */
-
-#include "Python.h"
-
-extern "C"
-DL_EXPORT(int) Py_Main( int argc, char *argv[] );
-
-int main( int argc, char *argv[] )
-{
-	return Py_Main(argc, argv);
-}
diff --git a/Modules/cjkcodecs/_codecs_cn.c b/Modules/cjkcodecs/_codecs_cn.c
index fd048d9..fb51297 100644
--- a/Modules/cjkcodecs/_codecs_cn.c
+++ b/Modules/cjkcodecs/_codecs_cn.c
@@ -217,11 +217,8 @@
 					break;
 				}
 
-			if (utrrange->first == 0) {
-				PyErr_SetString(PyExc_RuntimeError,
-						"unicode mapping invalid");
+			if (utrrange->first == 0)
 				return 1;
-			}
 			continue;
 		}
 
diff --git a/Modules/cjkcodecs/multibytecodec.c b/Modules/cjkcodecs/multibytecodec.c
index f51b6f2..340de18 100644
--- a/Modules/cjkcodecs/multibytecodec.c
+++ b/Modules/cjkcodecs/multibytecodec.c
@@ -6,6 +6,7 @@
 
 #define PY_SSIZE_T_CLEAN
 #include "Python.h"
+#include "structmember.h"
 #include "multibytecodec.h"
 
 typedef struct {
@@ -38,22 +39,14 @@
 are 'ignore' and 'replace' as well as any other name registerd with\n\
 codecs.register_error that is able to handle UnicodeDecodeErrors.");
 
-PyDoc_STRVAR(MultibyteCodec_StreamReader__doc__,
-"I.StreamReader(stream[, errors]) -> StreamReader instance");
-
-PyDoc_STRVAR(MultibyteCodec_StreamWriter__doc__,
-"I.StreamWriter(stream[, errors]) -> StreamWriter instance");
-
 static char *codeckwarglist[] = {"input", "errors", NULL};
+static char *incnewkwarglist[] = {"errors", NULL};
+static char *incrementalkwarglist[] = {"input", "final", NULL};
 static char *streamkwarglist[] = {"stream", "errors", NULL};
 
 static PyObject *multibytecodec_encode(MultibyteCodec *,
 		MultibyteCodec_State *, const Py_UNICODE **, Py_ssize_t,
 		PyObject *, int);
-static PyObject *mbstreamreader_create(MultibyteCodec *,
-		PyObject *, const char *);
-static PyObject *mbstreamwriter_create(MultibyteCodec *,
-		PyObject *, const char *);
 
 #define MBENC_RESET	MBENC_MAX<<1 /* reset after an encoding session */
 
@@ -83,7 +76,7 @@
 }
 
 static PyObject *
-get_errorcallback(const char *errors)
+internal_error_callback(const char *errors)
 {
 	if (errors == NULL || strcmp(errors, "strict") == 0)
 		return ERROR_STRICT;
@@ -91,17 +84,88 @@
 		return ERROR_IGNORE;
 	else if (strcmp(errors, "replace") == 0)
 		return ERROR_REPLACE;
-	else {
-		return PyCodec_LookupError(errors);
-	}
+	else
+		return PyString_FromString(errors);
 }
 
+static PyObject *
+call_error_callback(PyObject *errors, PyObject *exc)
+{
+	PyObject *args, *cb, *r;
+
+	assert(PyString_Check(errors));
+	cb = PyCodec_LookupError(PyString_AS_STRING(errors));
+	if (cb == NULL)
+		return NULL;
+
+	args = PyTuple_New(1);
+	if (args == NULL) {
+		Py_DECREF(cb);
+		return NULL;
+	}
+
+	PyTuple_SET_ITEM(args, 0, exc);
+	Py_INCREF(exc);
+
+	r = PyObject_CallObject(cb, args);
+	Py_DECREF(args);
+	Py_DECREF(cb);
+	return r;
+}
+
+static PyObject *
+codecctx_errors_get(MultibyteStatefulCodecContext *self)
+{
+	const char *errors;
+
+	if (self->errors == ERROR_STRICT)
+		errors = "strict";
+	else if (self->errors == ERROR_IGNORE)
+		errors = "ignore";
+	else if (self->errors == ERROR_REPLACE)
+		errors = "replace";
+	else {
+		Py_INCREF(self->errors);
+		return self->errors;
+	}
+
+	return PyString_FromString(errors);
+}
+
+static int
+codecctx_errors_set(MultibyteStatefulCodecContext *self, PyObject *value,
+		    void *closure)
+{
+	PyObject *cb;
+
+	if (!PyString_Check(value)) {
+		PyErr_SetString(PyExc_TypeError, "errors must be a string");
+		return -1;
+	}
+
+	cb = internal_error_callback(PyString_AS_STRING(value));
+	if (cb == NULL)
+		return -1;
+
+	ERROR_DECREF(self->errors);
+	self->errors = cb;
+	return 0;
+}
+
+/* This getset handlers list is used by all the stateful codec objects */
+static PyGetSetDef codecctx_getsets[] = {
+	{"errors",	(getter)codecctx_errors_get,
+			(setter)codecctx_errors_set,
+			PyDoc_STR("how to treat errors")},
+	{NULL,}
+};
+
 static int
 expand_encodebuffer(MultibyteEncodeBuffer *buf, Py_ssize_t esize)
 {
 	Py_ssize_t orgpos, orgsize;
 
-	orgpos = (Py_ssize_t)((char*)buf->outbuf -
+	orgpos = (Py_ssize_t)((char *)buf->outbuf -
 				PyString_AS_STRING(buf->outobj));
 	orgsize = PyString_GET_SIZE(buf->outobj);
 	if (_PyString_Resize(&buf->outobj, orgsize + (
@@ -125,8 +189,7 @@
 {
 	Py_ssize_t orgpos, orgsize;
 
-	orgpos = (Py_ssize_t)(buf->outbuf -
-				PyUnicode_AS_UNICODE(buf->outobj));
+	orgpos = (Py_ssize_t)(buf->outbuf - PyUnicode_AS_UNICODE(buf->outobj));
 	orgsize = PyUnicode_GET_SIZE(buf->outobj);
 	if (PyUnicode_Resize(&buf->outobj, orgsize + (
 	    esize < (orgsize >> 1) ? (orgsize >> 1) | 1 : esize)) == -1)
@@ -144,16 +207,21 @@
 			goto errorexit;					\
 }
 
+
+/**
+ * MultibyteCodec object
+ */
+
 static int
 multibytecodec_encerror(MultibyteCodec *codec,
 			MultibyteCodec_State *state,
 			MultibyteEncodeBuffer *buf,
 			PyObject *errors, Py_ssize_t e)
 {
-	PyObject *retobj = NULL, *retstr = NULL, *argsobj, *tobj;
+	PyObject *retobj = NULL, *retstr = NULL, *tobj;
 	Py_ssize_t retstrsize, newpos;
-	const char *reason;
 	Py_ssize_t esize, start, end;
+	const char *reason;
 
 	if (e > 0) {
 		reason = "illegal multibyte sequence";
@@ -166,7 +234,7 @@
 			return 0; /* retry it */
 		case MBERR_TOOFEW:
 			reason = "incomplete multibyte sequence";
-			esize = (size_t)(buf->inbuf_end - buf->inbuf);
+			esize = (Py_ssize_t)(buf->inbuf_end - buf->inbuf);
 			break;
 		case MBERR_INTERNAL:
 			PyErr_SetString(PyExc_RuntimeError,
@@ -230,21 +298,15 @@
 		goto errorexit;
 	}
 
-	argsobj = PyTuple_New(1);
-	if (argsobj == NULL)
-		goto errorexit;
-
-	PyTuple_SET_ITEM(argsobj, 0, buf->excobj);
-	Py_INCREF(buf->excobj);
-	retobj = PyObject_CallObject(errors, argsobj);
-	Py_DECREF(argsobj);
+	retobj = call_error_callback(errors, buf->excobj);
 	if (retobj == NULL)
 		goto errorexit;
 
 	if (!PyTuple_Check(retobj) || PyTuple_GET_SIZE(retobj) != 2 ||
 	    !PyUnicode_Check((tobj = PyTuple_GET_ITEM(retobj, 0))) ||
-	    !PyInt_Check(PyTuple_GET_ITEM(retobj, 1))) {
-		PyErr_SetString(PyExc_ValueError,
+	    !(PyInt_Check(PyTuple_GET_ITEM(retobj, 1)) ||
+	      PyLong_Check(PyTuple_GET_ITEM(retobj, 1)))) {
+		PyErr_SetString(PyExc_TypeError,
 				"encoding error handler must return "
 				"(unicode, int) tuple");
 		goto errorexit;
@@ -267,12 +329,13 @@
 	buf->outbuf += retstrsize;
 
 	newpos = PyInt_AsSsize_t(PyTuple_GET_ITEM(retobj, 1));
-	if (newpos < 0)
+	if (newpos < 0 && !PyErr_Occurred())
 		newpos += (Py_ssize_t)(buf->inbuf_end - buf->inbuf_top);
 	if (newpos < 0 || buf->inbuf_top + newpos > buf->inbuf_end) {
+		PyErr_Clear();
 		PyErr_Format(PyExc_IndexError,
-			     "position %d from error handler out of bounds",
-			     (int)newpos);
+			     "position %zd from error handler out of bounds",
+			     newpos);
 		goto errorexit;
 	}
 	buf->inbuf = buf->inbuf_top + newpos;
@@ -293,7 +356,7 @@
 			MultibyteDecodeBuffer *buf,
 			PyObject *errors, Py_ssize_t e)
 {
-	PyObject *argsobj, *retobj = NULL, *retuni = NULL;
+	PyObject *retobj = NULL, *retuni = NULL;
 	Py_ssize_t retunisize, newpos;
 	const char *reason;
 	Py_ssize_t esize, start, end;
@@ -309,7 +372,7 @@
 			return 0; /* retry it */
 		case MBERR_TOOFEW:
 			reason = "incomplete multibyte sequence";
-			esize = (size_t)(buf->inbuf_end - buf->inbuf);
+			esize = (Py_ssize_t)(buf->inbuf_end - buf->inbuf);
 			break;
 		case MBERR_INTERNAL:
 			PyErr_SetString(PyExc_RuntimeError,
@@ -354,21 +417,15 @@
 		goto errorexit;
 	}
 
-	argsobj = PyTuple_New(1);
-	if (argsobj == NULL)
-		goto errorexit;
-
-	PyTuple_SET_ITEM(argsobj, 0, buf->excobj);
-	Py_INCREF(buf->excobj);
-	retobj = PyObject_CallObject(errors, argsobj);
-	Py_DECREF(argsobj);
+	retobj = call_error_callback(errors, buf->excobj);
 	if (retobj == NULL)
 		goto errorexit;
 
 	if (!PyTuple_Check(retobj) || PyTuple_GET_SIZE(retobj) != 2 ||
 	    !PyUnicode_Check((retuni = PyTuple_GET_ITEM(retobj, 0))) ||
-	    !PyInt_Check(PyTuple_GET_ITEM(retobj, 1))) {
-		PyErr_SetString(PyExc_ValueError,
+	    !(PyInt_Check(PyTuple_GET_ITEM(retobj, 1)) ||
+	      PyLong_Check(PyTuple_GET_ITEM(retobj, 1)))) {
+		PyErr_SetString(PyExc_TypeError,
 				"decoding error handler must return "
 				"(unicode, int) tuple");
 		goto errorexit;
@@ -383,12 +440,13 @@
 	}
 
 	newpos = PyInt_AsSsize_t(PyTuple_GET_ITEM(retobj, 1));
-	if (newpos < 0)
+	if (newpos < 0 && !PyErr_Occurred())
 		newpos += (Py_ssize_t)(buf->inbuf_end - buf->inbuf_top);
 	if (newpos < 0 || buf->inbuf_top + newpos > buf->inbuf_end) {
+		PyErr_Clear();
 		PyErr_Format(PyExc_IndexError,
-				"position %d from error handler out of bounds",
-				(int)newpos);
+			     "position %zd from error handler out of bounds",
+			     newpos);
 		goto errorexit;
 	}
 	buf->inbuf = buf->inbuf_top + newpos;
@@ -453,7 +511,7 @@
 				goto errorexit;
 		}
 
-	finalsize = (Py_ssize_t)((char*)buf.outbuf -
+	finalsize = (Py_ssize_t)((char *)buf.outbuf -
 				 PyString_AS_STRING(buf.outobj));
 
 	if (finalsize != PyString_GET_SIZE(buf.outobj))
@@ -500,7 +558,7 @@
 	data = PyUnicode_AS_UNICODE(arg);
 	datalen = PyUnicode_GET_SIZE(arg);
 
-	errorcb = get_errorcallback(errors);
+	errorcb = internal_error_callback(errors);
 	if (errorcb == NULL) {
 		Py_XDECREF(ucvt);
 		return NULL;
@@ -515,16 +573,12 @@
 	if (r == NULL)
 		goto errorexit;
 
-	if (errorcb > ERROR_MAX) {
-		Py_DECREF(errorcb);
-	}
+	ERROR_DECREF(errorcb);
 	Py_XDECREF(ucvt);
 	return make_tuple(r, datalen);
 
 errorexit:
-	if (errorcb > ERROR_MAX) {
-		Py_DECREF(errorcb);
-	}
+	ERROR_DECREF(errorcb);
 	Py_XDECREF(ucvt);
 	return NULL;
 }
@@ -543,18 +597,16 @@
 				codeckwarglist, &data, &datalen, &errors))
 		return NULL;
 
-	errorcb = get_errorcallback(errors);
+	errorcb = internal_error_callback(errors);
 	if (errorcb == NULL)
 		return NULL;
 
 	if (datalen == 0) {
-		if (errorcb > ERROR_MAX) {
-			Py_DECREF(errorcb);
-		}
+		ERROR_DECREF(errorcb);
 		return make_tuple(PyUnicode_FromUnicode(NULL, 0), 0);
 	}
 
-	buf.outobj = buf.excobj = NULL;
+	buf.excobj = NULL;
 	buf.inbuf = buf.inbuf_top = (unsigned char *)data;
 	buf.inbuf_end = buf.inbuf_top + datalen;
 	buf.outobj = PyUnicode_FromUnicode(NULL, datalen);
@@ -590,49 +642,17 @@
 			goto errorexit;
 
 	Py_XDECREF(buf.excobj);
-	if (errorcb > ERROR_MAX) {
-		Py_DECREF(errorcb);
-	}
+	ERROR_DECREF(errorcb);
 	return make_tuple(buf.outobj, datalen);
 
 errorexit:
-	if (errorcb > ERROR_MAX) {
-		Py_DECREF(errorcb);
-	}
+	ERROR_DECREF(errorcb);
 	Py_XDECREF(buf.excobj);
 	Py_XDECREF(buf.outobj);
 
 	return NULL;
 }
 
-static PyObject *
-MultibyteCodec_StreamReader(MultibyteCodecObject *self,
-			    PyObject *args, PyObject *kwargs)
-{
-	PyObject *stream;
-	char *errors = NULL;
-
-	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|s:StreamReader",
-				streamkwarglist, &stream, &errors))
-		return NULL;
-
-	return mbstreamreader_create(self->codec, stream, errors);
-}
-
-static PyObject *
-MultibyteCodec_StreamWriter(MultibyteCodecObject *self,
-			    PyObject *args, PyObject *kwargs)
-{
-	PyObject *stream;
-	char *errors = NULL;
-
-	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|s:StreamWriter",
-				streamkwarglist, &stream, &errors))
-		return NULL;
-
-	return mbstreamwriter_create(self->codec, stream, errors);
-}
-
 static struct PyMethodDef multibytecodec_methods[] = {
 	{"encode",	(PyCFunction)MultibyteCodec_Encode,
 			METH_VARARGS | METH_KEYWORDS,
@@ -640,12 +660,6 @@
 	{"decode",	(PyCFunction)MultibyteCodec_Decode,
 			METH_VARARGS | METH_KEYWORDS,
 			MultibyteCodec_Decode__doc__},
-	{"StreamReader",(PyCFunction)MultibyteCodec_StreamReader,
-			METH_VARARGS | METH_KEYWORDS,
-			MultibyteCodec_StreamReader__doc__},
-	{"StreamWriter",(PyCFunction)MultibyteCodec_StreamWriter,
-			METH_VARARGS | METH_KEYWORDS,
-			MultibyteCodec_StreamWriter__doc__},
 	{NULL,		NULL},
 };
 
@@ -655,8 +669,6 @@
 	PyObject_Del(self);
 }
 
-
-
 static PyTypeObject MultibyteCodec_Type = {
 	PyObject_HEAD_INIT(NULL)
 	0,				/* ob_size */
@@ -690,13 +702,498 @@
 	multibytecodec_methods,		/* tp_methods */
 };
 
+
+/**
+ * Utility functions for stateful codec mechanism
+ */
+
+#define STATEFUL_DCTX(o)	((MultibyteStatefulDecoderContext *)(o))
+#define STATEFUL_ECTX(o)	((MultibyteStatefulEncoderContext *)(o))
+
+static PyObject *
+encoder_encode_stateful(MultibyteStatefulEncoderContext *ctx,
+			PyObject *unistr, int final)
+{
+	PyObject *ucvt, *r = NULL;
+	Py_UNICODE *inbuf, *inbuf_end, *inbuf_tmp = NULL;
+	Py_ssize_t datalen, origpending;
+
+	if (PyUnicode_Check(unistr))
+		ucvt = NULL;
+	else {
+		unistr = ucvt = PyObject_Unicode(unistr);
+		if (unistr == NULL)
+			return NULL;
+		else if (!PyUnicode_Check(unistr)) {
+			PyErr_SetString(PyExc_TypeError,
+				"couldn't convert the object to unicode.");
+			Py_DECREF(ucvt);
+			return NULL;
+		}
+	}
+
+	datalen = PyUnicode_GET_SIZE(unistr);
+	origpending = ctx->pendingsize;
+
+	if (origpending > 0) {
+		inbuf_tmp = PyMem_New(Py_UNICODE, datalen + ctx->pendingsize);
+		if (inbuf_tmp == NULL)
+			goto errorexit;
+		memcpy(inbuf_tmp, ctx->pending,
+			Py_UNICODE_SIZE * ctx->pendingsize);
+		memcpy(inbuf_tmp + ctx->pendingsize,
+			PyUnicode_AS_UNICODE(unistr),
+			Py_UNICODE_SIZE * datalen);
+		datalen += ctx->pendingsize;
+		ctx->pendingsize = 0;
+		inbuf = inbuf_tmp;
+	}
+	else
+		inbuf = (Py_UNICODE *)PyUnicode_AS_UNICODE(unistr);
+
+	inbuf_end = inbuf + datalen;
+
+	r = multibytecodec_encode(ctx->codec, &ctx->state,
+			(const Py_UNICODE **)&inbuf,
+			datalen, ctx->errors, final ? MBENC_FLUSH : 0);
+	if (r == NULL) {
+		/* recover the original pending buffer */
+		if (origpending > 0)
+			memcpy(ctx->pending, inbuf_tmp,
+				Py_UNICODE_SIZE * origpending);
+		ctx->pendingsize = origpending;
+		goto errorexit;
+	}
+
+	if (inbuf < inbuf_end) {
+		ctx->pendingsize = (Py_ssize_t)(inbuf_end - inbuf);
+		if (ctx->pendingsize > MAXENCPENDING) {
+			/* normal codecs can't reach here */
+			ctx->pendingsize = 0;
+			PyErr_SetString(PyExc_UnicodeError,
+					"pending buffer overflow");
+			goto errorexit;
+		}
+		memcpy(ctx->pending, inbuf,
+			ctx->pendingsize * Py_UNICODE_SIZE);
+	}
+
+	if (inbuf_tmp != NULL)
+		PyMem_Del(inbuf_tmp);
+	Py_XDECREF(ucvt);
+	return r;
+
+errorexit:
+	if (inbuf_tmp != NULL)
+		PyMem_Del(inbuf_tmp);
+	Py_XDECREF(r);
+	Py_XDECREF(ucvt);
+	return NULL;
+}
+
+static int
+decoder_append_pending(MultibyteStatefulDecoderContext *ctx,
+		       MultibyteDecodeBuffer *buf)
+{
+	Py_ssize_t npendings;
+
+	npendings = (Py_ssize_t)(buf->inbuf_end - buf->inbuf);
+	if (npendings + ctx->pendingsize > MAXDECPENDING) {
+		PyErr_SetString(PyExc_UnicodeError, "pending buffer overflow");
+		return -1;
+	}
+	memcpy(ctx->pending + ctx->pendingsize, buf->inbuf, npendings);
+	ctx->pendingsize += npendings;
+	return 0;
+}
+
+static int
+decoder_prepare_buffer(MultibyteDecodeBuffer *buf, const char *data,
+		       Py_ssize_t size)
+{
+	buf->inbuf = buf->inbuf_top = (const unsigned char *)data;
+	buf->inbuf_end = buf->inbuf_top + size;
+	if (buf->outobj == NULL) { /* only if outobj is not allocated yet */
+		buf->outobj = PyUnicode_FromUnicode(NULL, size);
+		if (buf->outobj == NULL)
+			return -1;
+		buf->outbuf = PyUnicode_AS_UNICODE(buf->outobj);
+		buf->outbuf_end = buf->outbuf +
+				  PyUnicode_GET_SIZE(buf->outobj);
+	}
+
+	return 0;
+}
+
+static int
+decoder_feed_buffer(MultibyteStatefulDecoderContext *ctx,
+		    MultibyteDecodeBuffer *buf)
+{
+	while (buf->inbuf < buf->inbuf_end) {
+		Py_ssize_t inleft, outleft;
+		int r;
+
+		inleft = (Py_ssize_t)(buf->inbuf_end - buf->inbuf);
+		outleft = (Py_ssize_t)(buf->outbuf_end - buf->outbuf);
+
+		r = ctx->codec->decode(&ctx->state, ctx->codec->config,
+			&buf->inbuf, inleft, &buf->outbuf, outleft);
+		if (r == 0 || r == MBERR_TOOFEW)
+			break;
+		else if (multibytecodec_decerror(ctx->codec, &ctx->state,
+						 buf, ctx->errors, r))
+			return -1;
+	}
+	return 0;
+}
+
+
+/**
+ * MultibyteIncrementalEncoder object
+ */
+
+static PyObject *
+mbiencoder_encode(MultibyteIncrementalEncoderObject *self,
+		  PyObject *args, PyObject *kwargs)
+{
+	PyObject *data;
+	int final = 0;
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "O|i:encode",
+			incrementalkwarglist, &data, &final))
+		return NULL;
+
+	return encoder_encode_stateful(STATEFUL_ECTX(self), data, final);
+}
+
+static PyObject *
+mbiencoder_reset(MultibyteIncrementalEncoderObject *self)
+{
+	if (self->codec->decreset != NULL &&
+	    self->codec->decreset(&self->state, self->codec->config) != 0)
+		return NULL;
+	self->pendingsize = 0;
+
+	Py_RETURN_NONE;
+}
+
+static struct PyMethodDef mbiencoder_methods[] = {
+	{"encode",	(PyCFunction)mbiencoder_encode,
+			METH_VARARGS | METH_KEYWORDS, NULL},
+	{"reset",	(PyCFunction)mbiencoder_reset,
+			METH_NOARGS, NULL},
+	{NULL,		NULL},
+};
+
+static PyObject *
+mbiencoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+	MultibyteIncrementalEncoderObject *self;
+	PyObject *codec = NULL;
+	char *errors = NULL;
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwds, "|s:IncrementalEncoder",
+					 incnewkwarglist, &errors))
+		return NULL;
+
+	self = (MultibyteIncrementalEncoderObject *)type->tp_alloc(type, 0);
+	if (self == NULL)
+		return NULL;
+
+	codec = PyObject_GetAttrString((PyObject *)type, "codec");
+	if (codec == NULL)
+		goto errorexit;
+	if (!MultibyteCodec_Check(codec)) {
+		PyErr_SetString(PyExc_TypeError, "codec is unexpected type");
+		goto errorexit;
+	}
+
+	self->codec = ((MultibyteCodecObject *)codec)->codec;
+	self->pendingsize = 0;
+	self->errors = internal_error_callback(errors);
+	if (self->errors == NULL)
+		goto errorexit;
+	if (self->codec->encinit != NULL &&
+	    self->codec->encinit(&self->state, self->codec->config) != 0)
+		goto errorexit;
+
+	Py_DECREF(codec);
+	return (PyObject *)self;
+
+errorexit:
+	Py_XDECREF(self);
+	Py_XDECREF(codec);
+	return NULL;
+}
+
+static int
+mbiencoder_traverse(MultibyteIncrementalEncoderObject *self,
+		    visitproc visit, void *arg)
+{
+	if (ERROR_ISCUSTOM(self->errors))
+		Py_VISIT(self->errors);
+	return 0;
+}
+
+static void
+mbiencoder_dealloc(MultibyteIncrementalEncoderObject *self)
+{
+	PyObject_GC_UnTrack(self);
+	ERROR_DECREF(self->errors);
+	self->ob_type->tp_free(self);
+}
+
+static PyTypeObject MultibyteIncrementalEncoder_Type = {
+	PyObject_HEAD_INIT(NULL)
+	0,				/* ob_size */
+	"MultibyteIncrementalEncoder",	/* tp_name */
+	sizeof(MultibyteIncrementalEncoderObject), /* tp_basicsize */
+	0,				/* tp_itemsize */
+	/*  methods  */
+	(destructor)mbiencoder_dealloc, /* tp_dealloc */
+	0,				/* tp_print */
+	0,				/* tp_getattr */
+	0,				/* tp_setattr */
+	0,				/* tp_compare */
+	0,				/* tp_repr */
+	0,				/* tp_as_number */
+	0,				/* tp_as_sequence */
+	0,				/* tp_as_mapping */
+	0,				/* tp_hash */
+	0,				/* tp_call */
+	0,				/* tp_str */
+	PyObject_GenericGetAttr,	/* tp_getattro */
+	0,				/* tp_setattro */
+	0,				/* tp_as_buffer */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC
+		| Py_TPFLAGS_BASETYPE,	/* tp_flags */
+	0,				/* tp_doc */
+	(traverseproc)mbiencoder_traverse,	/* tp_traverse */
+	0,				/* tp_clear */
+	0,				/* tp_richcompare */
+	0,				/* tp_weaklistoffset */
+	0,				/* tp_iter */
+	0,				/* tp_iterext */
+	mbiencoder_methods,		/* tp_methods */
+	0,				/* tp_members */
+	codecctx_getsets,		/* tp_getset */
+	0,				/* tp_base */
+	0,				/* tp_dict */
+	0,				/* tp_descr_get */
+	0,				/* tp_descr_set */
+	0,				/* tp_dictoffset */
+	0,				/* tp_init */
+	0,				/* tp_alloc */
+	mbiencoder_new,			/* tp_new */
+};
+
+
+/**
+ * MultibyteIncrementalDecoder object
+ */
+
+static PyObject *
+mbidecoder_decode(MultibyteIncrementalDecoderObject *self,
+		  PyObject *args, PyObject *kwargs)
+{
+	MultibyteDecodeBuffer buf;
+	char *data, *wdata;
+	Py_ssize_t wsize, finalsize = 0, size, origpending;
+	int final = 0;
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "t#|i:decode",
+			incrementalkwarglist, &data, &size, &final))
+		return NULL;
+
+	buf.outobj = buf.excobj = NULL;
+	origpending = self->pendingsize;
+
+	if (self->pendingsize == 0) {
+		wsize = size;
+		wdata = data;
+	}
+	else {
+		wsize = size + self->pendingsize;
+		wdata = PyMem_Malloc(wsize);
+		if (wdata == NULL)
+			goto errorexit;
+		memcpy(wdata, self->pending, self->pendingsize);
+		memcpy(wdata + self->pendingsize, data, size);
+		self->pendingsize = 0;
+	}
+
+	if (decoder_prepare_buffer(&buf, wdata, wsize) != 0)
+		goto errorexit;
+
+	if (decoder_feed_buffer(STATEFUL_DCTX(self), &buf))
+		goto errorexit;
+
+	if (final && buf.inbuf < buf.inbuf_end) {
+		if (multibytecodec_decerror(self->codec, &self->state,
+				&buf, self->errors, MBERR_TOOFEW)) {
+			/* recover the original pending buffer */
+			memcpy(self->pending, wdata, origpending);
+			self->pendingsize = origpending;
+			goto errorexit;
+		}
+	}
+
+	if (buf.inbuf < buf.inbuf_end) { /* pending sequence still exists */
+		if (decoder_append_pending(STATEFUL_DCTX(self), &buf) != 0)
+			goto errorexit;
+	}
+
+	finalsize = (Py_ssize_t)(buf.outbuf - PyUnicode_AS_UNICODE(buf.outobj));
+	if (finalsize != PyUnicode_GET_SIZE(buf.outobj))
+		if (PyUnicode_Resize(&buf.outobj, finalsize) == -1)
+			goto errorexit;
+
+	if (wdata != data)
+		PyMem_Del(wdata);
+	Py_XDECREF(buf.excobj);
+	return buf.outobj;
+
+errorexit:
+	if (wdata != NULL && wdata != data)
+		PyMem_Del(wdata);
+	Py_XDECREF(buf.excobj);
+	Py_XDECREF(buf.outobj);
+	return NULL;
+}
+
+static PyObject *
+mbidecoder_reset(MultibyteIncrementalDecoderObject *self)
+{
+	if (self->codec->decreset != NULL &&
+	    self->codec->decreset(&self->state, self->codec->config) != 0)
+		return NULL;
+	self->pendingsize = 0;
+
+	Py_RETURN_NONE;
+}
+
+static struct PyMethodDef mbidecoder_methods[] = {
+	{"decode",	(PyCFunction)mbidecoder_decode,
+			METH_VARARGS | METH_KEYWORDS, NULL},
+	{"reset",	(PyCFunction)mbidecoder_reset,
+			METH_NOARGS, NULL},
+	{NULL,		NULL},
+};
+
+static PyObject *
+mbidecoder_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+	MultibyteIncrementalDecoderObject *self;
+	PyObject *codec = NULL;
+	char *errors = NULL;
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwds, "|s:IncrementalDecoder",
+					 incnewkwarglist, &errors))
+		return NULL;
+
+	self = (MultibyteIncrementalDecoderObject *)type->tp_alloc(type, 0);
+	if (self == NULL)
+		return NULL;
+
+	codec = PyObject_GetAttrString((PyObject *)type, "codec");
+	if (codec == NULL)
+		goto errorexit;
+	if (!MultibyteCodec_Check(codec)) {
+		PyErr_SetString(PyExc_TypeError, "codec is unexpected type");
+		goto errorexit;
+	}
+
+	self->codec = ((MultibyteCodecObject *)codec)->codec;
+	self->pendingsize = 0;
+	self->errors = internal_error_callback(errors);
+	if (self->errors == NULL)
+		goto errorexit;
+	if (self->codec->decinit != NULL &&
+	    self->codec->decinit(&self->state, self->codec->config) != 0)
+		goto errorexit;
+
+	Py_DECREF(codec);
+	return (PyObject *)self;
+
+errorexit:
+	Py_XDECREF(self);
+	Py_XDECREF(codec);
+	return NULL;
+}
+
+static int
+mbidecoder_traverse(MultibyteIncrementalDecoderObject *self,
+		    visitproc visit, void *arg)
+{
+	if (ERROR_ISCUSTOM(self->errors))
+		Py_VISIT(self->errors);
+	return 0;
+}
+
+static void
+mbidecoder_dealloc(MultibyteIncrementalDecoderObject *self)
+{
+	PyObject_GC_UnTrack(self);
+	ERROR_DECREF(self->errors);
+	self->ob_type->tp_free(self);
+}
+
+static PyTypeObject MultibyteIncrementalDecoder_Type = {
+	PyObject_HEAD_INIT(NULL)
+	0,				/* ob_size */
+	"MultibyteIncrementalDecoder",	/* tp_name */
+	sizeof(MultibyteIncrementalDecoderObject), /* tp_basicsize */
+	0,				/* tp_itemsize */
+	/*  methods  */
+	(destructor)mbidecoder_dealloc, /* tp_dealloc */
+	0,				/* tp_print */
+	0,				/* tp_getattr */
+	0,				/* tp_setattr */
+	0,				/* tp_compare */
+	0,				/* tp_repr */
+	0,				/* tp_as_number */
+	0,				/* tp_as_sequence */
+	0,				/* tp_as_mapping */
+	0,				/* tp_hash */
+	0,				/* tp_call */
+	0,				/* tp_str */
+	PyObject_GenericGetAttr,	/* tp_getattro */
+	0,				/* tp_setattro */
+	0,				/* tp_as_buffer */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC
+		| Py_TPFLAGS_BASETYPE,	/* tp_flags */
+	0,				/* tp_doc */
+	(traverseproc)mbidecoder_traverse,	/* tp_traverse */
+	0,				/* tp_clear */
+	0,				/* tp_richcompare */
+	0,				/* tp_weaklistoffset */
+	0,				/* tp_iter */
+	0,				/* tp_iterext */
+	mbidecoder_methods,		/* tp_methods */
+	0,				/* tp_members */
+	codecctx_getsets,		/* tp_getset */
+	0,				/* tp_base */
+	0,				/* tp_dict */
+	0,				/* tp_descr_get */
+	0,				/* tp_descr_set */
+	0,				/* tp_dictoffset */
+	0,				/* tp_init */
+	0,				/* tp_alloc */
+	mbidecoder_new,			/* tp_new */
+};
+
+
+/**
+ * MultibyteStreamReader object
+ */
+
 static PyObject *
 mbstreamreader_iread(MultibyteStreamReaderObject *self,
 		     const char *method, Py_ssize_t sizehint)
 {
 	MultibyteDecodeBuffer buf;
 	PyObject *cres;
-	Py_ssize_t rsize, r, finalsize = 0;
+	Py_ssize_t rsize, finalsize = 0;
 
 	if (sizehint == 0)
 		return PyUnicode_FromUnicode(NULL, 0);
@@ -740,39 +1237,13 @@
 		}
 
 		rsize = PyString_GET_SIZE(cres);
-		buf.inbuf = buf.inbuf_top =
-			(unsigned char *)PyString_AS_STRING(cres);
-		buf.inbuf_end = buf.inbuf_top + rsize;
-		if (buf.outobj == NULL) {
-			buf.outobj = PyUnicode_FromUnicode(NULL, rsize);
-			if (buf.outobj == NULL)
-				goto errorexit;
-			buf.outbuf = PyUnicode_AS_UNICODE(buf.outobj);
-			buf.outbuf_end = buf.outbuf +
-					PyUnicode_GET_SIZE(buf.outobj);
-		}
+		if (decoder_prepare_buffer(&buf, PyString_AS_STRING(cres),
+					   rsize) != 0)
+			goto errorexit;
 
-		r = 0;
-		if (rsize > 0)
-			while (buf.inbuf < buf.inbuf_end) {
-				Py_ssize_t inleft, outleft;
-
-				inleft = (Py_ssize_t)(buf.inbuf_end -
-						      buf.inbuf);
-				outleft = (Py_ssize_t)(buf.outbuf_end -
-						       buf.outbuf);
-
-				r = self->codec->decode(&self->state,
-							self->codec->config,
-							&buf.inbuf, inleft,
-							&buf.outbuf, outleft);
-				if (r == 0 || r == MBERR_TOOFEW)
-					break;
-				else if (multibytecodec_decerror(self->codec,
-						&self->state, &buf,
-						self->errors, r))
-					goto errorexit;
-			}
+		if (rsize > 0 && decoder_feed_buffer(
+				(MultibyteStatefulDecoderContext *)self, &buf))
+			goto errorexit;
 
 		if (rsize == 0 || sizehint < 0) { /* end of file */
 			if (buf.inbuf < buf.inbuf_end &&
@@ -782,20 +1253,9 @@
 		}
 
 		if (buf.inbuf < buf.inbuf_end) { /* pending sequence exists */
-			Py_ssize_t npendings;
-
-			/* we can't assume that pendingsize is still 0 here.
-			 * because this function can be called recursively
-			 * from error callback */
-			npendings = (Py_ssize_t)(buf.inbuf_end - buf.inbuf);
-			if (npendings + self->pendingsize > MAXDECPENDING) {
-				PyErr_SetString(PyExc_RuntimeError,
-						"pending buffer overflow");
+			if (decoder_append_pending(STATEFUL_DCTX(self),
+						   &buf) != 0)
 				goto errorexit;
-			}
-			memcpy(self->pending + self->pendingsize, buf.inbuf,
-				npendings);
-			self->pendingsize += npendings;
 		}
 
 		finalsize = (Py_ssize_t)(buf.outbuf -
@@ -901,8 +1361,7 @@
 		return NULL;
 	self->pendingsize = 0;
 
-	Py_INCREF(Py_None);
-	return Py_None;
+	Py_RETURN_NONE;
 }
 
 static struct PyMethodDef mbstreamreader_methods[] = {
@@ -917,18 +1376,75 @@
 	{NULL,		NULL},
 };
 
+static PyMemberDef mbstreamreader_members[] = {
+	{"stream",	T_OBJECT,
+			offsetof(MultibyteStreamReaderObject, stream),
+			READONLY, NULL},
+	{NULL,}
+};
+
+static PyObject *
+mbstreamreader_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+	MultibyteStreamReaderObject *self;
+	PyObject *stream, *codec = NULL;
+	char *errors = NULL;
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|s:StreamReader",
+				streamkwarglist, &stream, &errors))
+		return NULL;
+
+	self = (MultibyteStreamReaderObject *)type->tp_alloc(type, 0);
+	if (self == NULL)
+		return NULL;
+
+	codec = PyObject_GetAttrString((PyObject *)type, "codec");
+	if (codec == NULL)
+		goto errorexit;
+	if (!MultibyteCodec_Check(codec)) {
+		PyErr_SetString(PyExc_TypeError, "codec is unexpected type");
+		goto errorexit;
+	}
+
+	self->codec = ((MultibyteCodecObject *)codec)->codec;
+	self->stream = stream;
+	Py_INCREF(stream);
+	self->pendingsize = 0;
+	self->errors = internal_error_callback(errors);
+	if (self->errors == NULL)
+		goto errorexit;
+	if (self->codec->decinit != NULL &&
+	    self->codec->decinit(&self->state, self->codec->config) != 0)
+		goto errorexit;
+
+	Py_DECREF(codec);
+	return (PyObject *)self;
+
+errorexit:
+	Py_XDECREF(self);
+	Py_XDECREF(codec);
+	return NULL;
+}
+
+static int
+mbstreamreader_traverse(MultibyteStreamReaderObject *self,
+			visitproc visit, void *arg)
+{
+	if (ERROR_ISCUSTOM(self->errors))
+		Py_VISIT(self->errors);
+	Py_VISIT(self->stream);
+	return 0;
+}
+
 static void
 mbstreamreader_dealloc(MultibyteStreamReaderObject *self)
 {
-	if (self->errors > ERROR_MAX) {
-		Py_DECREF(self->errors);
-	}
+	PyObject_GC_UnTrack(self);
+	ERROR_DECREF(self->errors);
 	Py_DECREF(self->stream);
-	PyObject_Del(self);
+	self->ob_type->tp_free(self);
 }
 
-
-
 static PyTypeObject MultibyteStreamReader_Type = {
 	PyObject_HEAD_INIT(NULL)
 	0,				/* ob_size */
@@ -951,97 +1467,50 @@
 	PyObject_GenericGetAttr,	/* tp_getattro */
 	0,				/* tp_setattro */
 	0,				/* tp_as_buffer */
-	Py_TPFLAGS_DEFAULT,		/* tp_flags */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC
+		| Py_TPFLAGS_BASETYPE,	/* tp_flags */
 	0,				/* tp_doc */
-	0,				/* tp_traverse */
+	(traverseproc)mbstreamreader_traverse,	/* tp_traverse */
 	0,				/* tp_clear */
 	0,				/* tp_richcompare */
 	0,				/* tp_weaklistoffset */
 	0,				/* tp_iter */
 	0,				/* tp_iterext */
 	mbstreamreader_methods,		/* tp_methods */
+	mbstreamreader_members,		/* tp_members */
+	codecctx_getsets,		/* tp_getset */
+	0,				/* tp_base */
+	0,				/* tp_dict */
+	0,				/* tp_descr_get */
+	0,				/* tp_descr_set */
+	0,				/* tp_dictoffset */
+	0,				/* tp_init */
+	0,				/* tp_alloc */
+	mbstreamreader_new,		/* tp_new */
 };
 
+
+/**
+ * MultibyteStreamWriter object
+ */
+
 static int
 mbstreamwriter_iwrite(MultibyteStreamWriterObject *self,
 		      PyObject *unistr)
 {
-	PyObject *wr, *ucvt, *r = NULL;
-	Py_UNICODE *inbuf, *inbuf_end, *inbuf_tmp = NULL;
-	Py_ssize_t datalen;
+	PyObject *str, *wr;
 
-	if (PyUnicode_Check(unistr))
-		ucvt = NULL;
-	else {
-		unistr = ucvt = PyObject_Unicode(unistr);
-		if (unistr == NULL)
-			return -1;
-		else if (!PyUnicode_Check(unistr)) {
-			PyErr_SetString(PyExc_TypeError,
-				"couldn't convert the object to unicode.");
-			Py_DECREF(ucvt);
-			return -1;
-		}
-	}
+	str = encoder_encode_stateful(STATEFUL_ECTX(self), unistr, 0);
+	if (str == NULL)
+		return -1;
 
-	datalen = PyUnicode_GET_SIZE(unistr);
-	if (datalen == 0) {
-		Py_XDECREF(ucvt);
-		return 0;
-	}
-
-	if (self->pendingsize > 0) {
-		inbuf_tmp = PyMem_New(Py_UNICODE, datalen + self->pendingsize);
-		if (inbuf_tmp == NULL)
-			goto errorexit;
-		memcpy(inbuf_tmp, self->pending,
-			Py_UNICODE_SIZE * self->pendingsize);
-		memcpy(inbuf_tmp + self->pendingsize,
-			PyUnicode_AS_UNICODE(unistr),
-			Py_UNICODE_SIZE * datalen);
-		datalen += self->pendingsize;
-		self->pendingsize = 0;
-		inbuf = inbuf_tmp;
-	}
-	else
-		inbuf = (Py_UNICODE *)PyUnicode_AS_UNICODE(unistr);
-
-	inbuf_end = inbuf + datalen;
-
-	r = multibytecodec_encode(self->codec, &self->state,
-			(const Py_UNICODE **)&inbuf, datalen, self->errors, 0);
-	if (r == NULL)
-		goto errorexit;
-
-	if (inbuf < inbuf_end) {
-		self->pendingsize = (Py_ssize_t)(inbuf_end - inbuf);
-		if (self->pendingsize > MAXENCPENDING) {
-			self->pendingsize = 0;
-			PyErr_SetString(PyExc_RuntimeError,
-					"pending buffer overflow");
-			goto errorexit;
-		}
-		memcpy(self->pending, inbuf,
-			self->pendingsize * Py_UNICODE_SIZE);
-	}
-
-	wr = PyObject_CallMethod(self->stream, "write", "O", r);
+	wr = PyObject_CallMethod(self->stream, "write", "O", str);
+	Py_DECREF(str);
 	if (wr == NULL)
-		goto errorexit;
+		return -1;
 
-	if (inbuf_tmp != NULL)
-		PyMem_Del(inbuf_tmp);
-	Py_DECREF(r);
 	Py_DECREF(wr);
-	Py_XDECREF(ucvt);
 	return 0;
-
-errorexit:
-	if (inbuf_tmp != NULL)
-		PyMem_Del(inbuf_tmp);
-	Py_XDECREF(r);
-	Py_XDECREF(ucvt);
-	return -1;
 }
 
 static PyObject *
@@ -1054,10 +1523,8 @@
 
 	if (mbstreamwriter_iwrite(self, strobj))
 		return NULL;
-	else {
-		Py_INCREF(Py_None);
-		return Py_None;
-	}
+	else
+		Py_RETURN_NONE;
 }
 
 static PyObject *
@@ -1087,8 +1554,7 @@
 			return NULL;
 	}
 
-	Py_INCREF(Py_None);
-	return Py_None;
+	Py_RETURN_NONE;
 }
 
 static PyObject *
@@ -1119,18 +1585,69 @@
 	}
 	Py_DECREF(pwrt);
 
-	Py_INCREF(Py_None);
-	return Py_None;
+	Py_RETURN_NONE;
+}
+
+static PyObject *
+mbstreamwriter_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
+{
+	MultibyteStreamWriterObject *self;
+	PyObject *stream, *codec = NULL;
+	char *errors = NULL;
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwds, "O|s:StreamWriter",
+				streamkwarglist, &stream, &errors))
+		return NULL;
+
+	self = (MultibyteStreamWriterObject *)type->tp_alloc(type, 0);
+	if (self == NULL)
+		return NULL;
+
+	codec = PyObject_GetAttrString((PyObject *)type, "codec");
+	if (codec == NULL)
+		goto errorexit;
+	if (!MultibyteCodec_Check(codec)) {
+		PyErr_SetString(PyExc_TypeError, "codec is unexpected type");
+		goto errorexit;
+	}
+
+	self->codec = ((MultibyteCodecObject *)codec)->codec;
+	self->stream = stream;
+	Py_INCREF(stream);
+	self->pendingsize = 0;
+	self->errors = internal_error_callback(errors);
+	if (self->errors == NULL)
+		goto errorexit;
+	if (self->codec->encinit != NULL &&
+	    self->codec->encinit(&self->state, self->codec->config) != 0)
+		goto errorexit;
+
+	Py_DECREF(codec);
+	return (PyObject *)self;
+
+errorexit:
+	Py_XDECREF(self);
+	Py_XDECREF(codec);
+	return NULL;
+}
+
+static int
+mbstreamwriter_traverse(MultibyteStreamWriterObject *self,
+			visitproc visit, void *arg)
+{
+	if (ERROR_ISCUSTOM(self->errors))
+		Py_VISIT(self->errors);
+	Py_VISIT(self->stream);
+	return 0;
 }
 
 static void
 mbstreamwriter_dealloc(MultibyteStreamWriterObject *self)
 {
-	if (self->errors > ERROR_MAX) {
-		Py_DECREF(self->errors);
-	}
+	PyObject_GC_UnTrack(self);
+	ERROR_DECREF(self->errors);
 	Py_DECREF(self->stream);
-	PyObject_Del(self);
+	self->ob_type->tp_free(self);
 }
 
 static struct PyMethodDef mbstreamwriter_methods[] = {
@@ -1143,7 +1660,12 @@
 	{NULL,		NULL},
 };
 
-
+static PyMemberDef mbstreamwriter_members[] = {
+	{"stream",	T_OBJECT,
+			offsetof(MultibyteStreamWriterObject, stream),
+			READONLY, NULL},
+	{NULL,}
+};
 
 static PyTypeObject MultibyteStreamWriter_Type = {
 	PyObject_HEAD_INIT(NULL)
@@ -1167,17 +1689,33 @@
 	PyObject_GenericGetAttr,	/* tp_getattro */
 	0,				/* tp_setattro */
 	0,				/* tp_as_buffer */
-	Py_TPFLAGS_DEFAULT,		/* tp_flags */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC
+		| Py_TPFLAGS_BASETYPE,	/* tp_flags */
 	0,				/* tp_doc */
-	0,				/* tp_traverse */
+	(traverseproc)mbstreamwriter_traverse,	/* tp_traverse */
 	0,				/* tp_clear */
 	0,				/* tp_richcompare */
 	0,				/* tp_weaklistoffset */
 	0,				/* tp_iter */
 	0,				/* tp_iterext */
 	mbstreamwriter_methods,		/* tp_methods */
+	mbstreamwriter_members,		/* tp_members */
+	codecctx_getsets,		/* tp_getset */
+	0,				/* tp_base */
+	0,				/* tp_dict */
+	0,				/* tp_descr_get */
+	0,				/* tp_descr_set */
+	0,				/* tp_dictoffset */
+	0,				/* tp_init */
+	0,				/* tp_alloc */
+	mbstreamwriter_new,		/* tp_new */
 };
 
+
+/**
+ * Exposed factory function
+ */
+
 static PyObject *
 __create_codec(PyObject *ignore, PyObject *arg)
 {
@@ -1201,80 +1739,38 @@
 	return (PyObject *)self;
 }
 
-static PyObject *
-mbstreamreader_create(MultibyteCodec *codec,
-		      PyObject *stream, const char *errors)
-{
-	MultibyteStreamReaderObject *self;
-
-	self = PyObject_New(MultibyteStreamReaderObject,
-			&MultibyteStreamReader_Type);
-	if (self == NULL)
-		return NULL;
-
-	self->codec = codec;
-	self->stream = stream;
-	Py_INCREF(stream);
-	self->pendingsize = 0;
-	self->errors = get_errorcallback(errors);
-	if (self->errors == NULL)
-		goto errorexit;
-	if (self->codec->decinit != NULL &&
-	    self->codec->decinit(&self->state, self->codec->config) != 0)
-		goto errorexit;
-
-	return (PyObject *)self;
-
-errorexit:
-	Py_XDECREF(self);
-	return NULL;
-}
-
-static PyObject *
-mbstreamwriter_create(MultibyteCodec *codec,
-		      PyObject *stream, const char *errors)
-{
-	MultibyteStreamWriterObject *self;
-
-	self = PyObject_New(MultibyteStreamWriterObject,
-			&MultibyteStreamWriter_Type);
-	if (self == NULL)
-		return NULL;
-
-	self->codec = codec;
-	self->stream = stream;
-	Py_INCREF(stream);
-	self->pendingsize = 0;
-	self->errors = get_errorcallback(errors);
-	if (self->errors == NULL)
-		goto errorexit;
-	if (self->codec->encinit != NULL &&
-	    self->codec->encinit(&self->state, self->codec->config) != 0)
-		goto errorexit;
-
-	return (PyObject *)self;
-
-errorexit:
-	Py_XDECREF(self);
-	return NULL;
-}
-
 static struct PyMethodDef __methods[] = {
 	{"__create_codec", (PyCFunction)__create_codec, METH_O},
 	{NULL, NULL},
 };
 
-void
+PyMODINIT_FUNC
 init_multibytecodec(void)
 {
+	int i;
+	PyObject *m;
+	PyTypeObject *typelist[] = {
+		&MultibyteIncrementalEncoder_Type,
+		&MultibyteIncrementalDecoder_Type,
+		&MultibyteStreamReader_Type,
+		&MultibyteStreamWriter_Type,
+		NULL
+	};
+
 	if (PyType_Ready(&MultibyteCodec_Type) < 0)
 		return;
-	if (PyType_Ready(&MultibyteStreamReader_Type) < 0)
-		return;
-	if (PyType_Ready(&MultibyteStreamWriter_Type) < 0)
+
+	m = Py_InitModule("_multibytecodec", __methods);
+	if (m == NULL)
 		return;
 
-	Py_InitModule("_multibytecodec", __methods);
+	for (i = 0; typelist[i] != NULL; i++) {
+		if (PyType_Ready(typelist[i]) < 0)
+			return;
+		Py_INCREF(typelist[i]);
+		PyModule_AddObject(m, typelist[i]->tp_name,
+				   (PyObject *)typelist[i]);
+	}
 
 	if (PyErr_Occurred())
 		Py_FatalError("can't initialize the _multibytecodec module");
diff --git a/Modules/cjkcodecs/multibytecodec.h b/Modules/cjkcodecs/multibytecodec.h
index ec49c78..22ea5d4 100644
--- a/Modules/cjkcodecs/multibytecodec.h
+++ b/Modules/cjkcodecs/multibytecodec.h
@@ -67,24 +67,51 @@
 	MultibyteCodec *codec;
 } MultibyteCodecObject;
 
-#define MAXDECPENDING	8
+#define MultibyteCodec_Check(op) ((op)->ob_type == &MultibyteCodec_Type)
+
+#define _MultibyteStatefulCodec_HEAD		\
+	PyObject_HEAD				\
+	MultibyteCodec *codec;			\
+	MultibyteCodec_State state;		\
+	PyObject *errors;
 typedef struct {
-	PyObject_HEAD
-	MultibyteCodec *codec;
-	MultibyteCodec_State state;
-	unsigned char pending[MAXDECPENDING];
-	Py_ssize_t pendingsize;
-	PyObject *stream, *errors;
-} MultibyteStreamReaderObject;
+	_MultibyteStatefulCodec_HEAD
+} MultibyteStatefulCodecContext;
 
 #define MAXENCPENDING	2
-typedef struct {
-	PyObject_HEAD
-	MultibyteCodec *codec;
-	MultibyteCodec_State state;
-	Py_UNICODE pending[MAXENCPENDING];
+#define _MultibyteStatefulEncoder_HEAD		\
+	_MultibyteStatefulCodec_HEAD		\
+	Py_UNICODE pending[MAXENCPENDING];	\
 	Py_ssize_t pendingsize;
-	PyObject *stream, *errors;
+typedef struct {
+	_MultibyteStatefulEncoder_HEAD
+} MultibyteStatefulEncoderContext;
+
+#define MAXDECPENDING	8
+#define _MultibyteStatefulDecoder_HEAD		\
+	_MultibyteStatefulCodec_HEAD		\
+	unsigned char pending[MAXDECPENDING];	\
+	Py_ssize_t pendingsize;
+typedef struct {
+	_MultibyteStatefulDecoder_HEAD
+} MultibyteStatefulDecoderContext;
+
+typedef struct {
+	_MultibyteStatefulEncoder_HEAD
+} MultibyteIncrementalEncoderObject;
+
+typedef struct {
+	_MultibyteStatefulDecoder_HEAD
+} MultibyteIncrementalDecoderObject;
+
+typedef struct {
+	_MultibyteStatefulDecoder_HEAD
+	PyObject *stream;
+} MultibyteStreamReaderObject;
+
+typedef struct {
+	_MultibyteStatefulEncoder_HEAD
+	PyObject *stream;
 } MultibyteStreamWriterObject;
 
 /* positive values for illegal sequences */
@@ -95,7 +122,12 @@
 #define ERROR_STRICT		(PyObject *)(1)
 #define ERROR_IGNORE		(PyObject *)(2)
 #define ERROR_REPLACE		(PyObject *)(3)
-#define ERROR_MAX		ERROR_REPLACE
+#define ERROR_ISCUSTOM(p)	((p) < ERROR_STRICT || ERROR_REPLACE < (p))
+#define ERROR_DECREF(p) do {			\
+	if (p != NULL && ERROR_ISCUSTOM(p)) {	\
+		Py_DECREF(p);			\
+	}					\
+} while (0);
 
 #define MBENC_FLUSH		0x0001 /* encode all characters encodable */
 #define MBENC_MAX		MBENC_FLUSH
diff --git a/Modules/collectionsmodule.c b/Modules/collectionsmodule.c
index b80ab07..c7e2c85 100644
--- a/Modules/collectionsmodule.c
+++ b/Modules/collectionsmodule.c
@@ -832,11 +832,11 @@
 	0,				/* tp_itemsize */
 	/* methods */
 	(destructor)deque_dealloc,	/* tp_dealloc */
-	(printfunc)deque_tp_print,	/* tp_print */
+	deque_tp_print,			/* tp_print */
 	0,				/* tp_getattr */
 	0,				/* tp_setattr */
 	0,				/* tp_compare */
-	(reprfunc)deque_repr,		/* tp_repr */
+	deque_repr,			/* tp_repr */
 	0,				/* tp_as_number */
 	&deque_as_sequence,		/* tp_as_sequence */
 	0,				/* tp_as_mapping */
@@ -1236,10 +1236,7 @@
 static int
 defdict_tp_clear(defdictobject *dd)
 {
-	if (dd->default_factory != NULL) {
-		Py_DECREF(dd->default_factory);
-		dd->default_factory = NULL;
-	}
+	Py_CLEAR(dd->default_factory);
 	return PyDict_Type.tp_clear((PyObject *)dd);
 }
 
@@ -1277,8 +1274,11 @@
 A defaultdict compares equal to a dict with the same items.\n\
 ");
 
+/* See comment in xxsubtype.c */
+#define DEFERRED_ADDRESS(ADDR) 0
+
 static PyTypeObject defdict_type = {
-	PyObject_HEAD_INIT(NULL)
+	PyObject_HEAD_INIT(DEFERRED_ADDRESS(&PyType_Type))
 	0,				/* ob_size */
 	"collections.defaultdict",	/* tp_name */
 	sizeof(defdictobject),		/* tp_basicsize */
@@ -1302,7 +1302,7 @@
 	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_BASETYPE | Py_TPFLAGS_HAVE_GC |
 		Py_TPFLAGS_HAVE_WEAKREFS,	/* tp_flags */
 	defdict_doc,			/* tp_doc */
-	(traverseproc)defdict_traverse,	/* tp_traverse */
+	defdict_traverse,		/* tp_traverse */
 	(inquiry)defdict_tp_clear,	/* tp_clear */
 	0,				/* tp_richcompare */
 	0,				/* tp_weaklistoffset*/
@@ -1311,12 +1311,12 @@
 	defdict_methods,		/* tp_methods */
 	defdict_members,		/* tp_members */
 	0,				/* tp_getset */
-	&PyDict_Type,			/* tp_base */
+	DEFERRED_ADDRESS(&PyDict_Type),	/* tp_base */
 	0,				/* tp_dict */
 	0,				/* tp_descr_get */
 	0,				/* tp_descr_set */
 	0,				/* tp_dictoffset */
-	(initproc)defdict_init,		/* tp_init */
+	defdict_init,			/* tp_init */
 	PyType_GenericAlloc,		/* tp_alloc */
 	0,				/* tp_new */
 	PyObject_GC_Del,		/* tp_free */
@@ -1344,6 +1344,7 @@
 	Py_INCREF(&deque_type);
 	PyModule_AddObject(m, "deque", (PyObject *)&deque_type);
 
+	defdict_type.tp_base = &PyDict_Type;
 	if (PyType_Ready(&defdict_type) < 0)
 		return;
 	Py_INCREF(&defdict_type);
diff --git a/Modules/config.c.in b/Modules/config.c.in
index 9ec281c..f811991 100644
--- a/Modules/config.c.in
+++ b/Modules/config.c.in
@@ -17,6 +17,10 @@
 
 #include "Python.h"
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 
 /* -- ADDMODULE MARKER 1 -- */
 
@@ -50,3 +54,9 @@
 	/* Sentinel */
 	{0, 0}
 };
+
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Modules/datetimemodule.c b/Modules/datetimemodule.c
index c1a0cb3..9ae235a 100644
--- a/Modules/datetimemodule.c
+++ b/Modules/datetimemodule.c
@@ -1228,8 +1228,8 @@
 				}
 			}
 			assert(zreplacement != NULL);
-			ptoappend = PyString_AsString(zreplacement);
-			ntoappend = PyString_Size(zreplacement);
+			ptoappend = PyString_AS_STRING(zreplacement);
+			ntoappend = PyString_GET_SIZE(zreplacement);
 		}
 		else if (ch == 'Z') {
 			/* format tzname */
@@ -1257,14 +1257,18 @@
 						Py_DECREF(temp);
 						if (Zreplacement == NULL)
 							goto Done;
+						if (!PyString_Check(Zreplacement)) {
+							PyErr_SetString(PyExc_TypeError, "tzname.replace() did not return a string");
+							goto Done;
+						}
 					}
 					else
 						Py_DECREF(temp);
 				}
 			}
 			assert(Zreplacement != NULL);
-			ptoappend = PyString_AsString(Zreplacement);
-			ntoappend = PyString_Size(Zreplacement);
+			ptoappend = PyString_AS_STRING(Zreplacement);
+			ntoappend = PyString_GET_SIZE(Zreplacement);
 		}
 		else {
 			/* percent followed by neither z nor Z */
@@ -1275,6 +1279,7 @@
  		/* Append the ntoappend chars starting at ptoappend to
  		 * the new format.
  		 */
+ 		assert(ptoappend != NULL);
  		assert(ntoappend >= 0);
  		if (ntoappend == 0)
  			continue;
@@ -2404,11 +2409,11 @@
 date_repr(PyDateTime_Date *self)
 {
 	char buffer[1028];
-	const char *typename;
+	const char *type_name;
 
-	typename = self->ob_type->tp_name;
+	type_name = self->ob_type->tp_name;
 	PyOS_snprintf(buffer, sizeof(buffer), "%s(%d, %d, %d)",
-		      typename,
+		      type_name,
 		      GET_YEAR(self), GET_MONTH(self), GET_DAY(self));
 
 	return PyString_FromString(buffer);
@@ -3130,7 +3135,7 @@
 time_repr(PyDateTime_Time *self)
 {
 	char buffer[100];
-	const char *typename = self->ob_type->tp_name;
+	const char *type_name = self->ob_type->tp_name;
 	int h = TIME_GET_HOUR(self);
 	int m = TIME_GET_MINUTE(self);
 	int s = TIME_GET_SECOND(self);
@@ -3139,13 +3144,13 @@
 
 	if (us)
 		PyOS_snprintf(buffer, sizeof(buffer),
-			      "%s(%d, %d, %d, %d)", typename, h, m, s, us);
+			      "%s(%d, %d, %d, %d)", type_name, h, m, s, us);
 	else if (s)
 		PyOS_snprintf(buffer, sizeof(buffer),
-			      "%s(%d, %d, %d)", typename, h, m, s);
+			      "%s(%d, %d, %d)", type_name, h, m, s);
 	else
 		PyOS_snprintf(buffer, sizeof(buffer),
-			      "%s(%d, %d)", typename, h, m);
+			      "%s(%d, %d)", type_name, h, m);
 	result = PyString_FromString(buffer);
 	if (result != NULL && HASTZINFO(self))
 		result = append_keyword_tzinfo(result, self->tzinfo);
@@ -3816,6 +3821,10 @@
 		if (PySequence_Check(obj) && PySequence_Size(obj) >= 6)
 			for (i=0; i < 6; i++) {
 				PyObject *p = PySequence_GetItem(obj, i);
+				if (p == NULL) {
+					Py_DECREF(obj);
+					return NULL;
+				}
 				if (PyInt_Check(p))
 					ia[i] = PyInt_AsLong(p);
 				else
@@ -4023,13 +4032,13 @@
 datetime_repr(PyDateTime_DateTime *self)
 {
 	char buffer[1000];
-	const char *typename = self->ob_type->tp_name;
+	const char *type_name = self->ob_type->tp_name;
 	PyObject *baserepr;
 
 	if (DATE_GET_MICROSECOND(self)) {
 		PyOS_snprintf(buffer, sizeof(buffer),
 			      "%s(%d, %d, %d, %d, %d, %d, %d)",
-			      typename,
+			      type_name,
 			      GET_YEAR(self), GET_MONTH(self), GET_DAY(self),
 			      DATE_GET_HOUR(self), DATE_GET_MINUTE(self),
 			      DATE_GET_SECOND(self),
@@ -4038,7 +4047,7 @@
 	else if (DATE_GET_SECOND(self)) {
 		PyOS_snprintf(buffer, sizeof(buffer),
 			      "%s(%d, %d, %d, %d, %d, %d)",
-			      typename,
+			      type_name,
 			      GET_YEAR(self), GET_MONTH(self), GET_DAY(self),
 			      DATE_GET_HOUR(self), DATE_GET_MINUTE(self),
 			      DATE_GET_SECOND(self));
@@ -4046,7 +4055,7 @@
 	else {
 		PyOS_snprintf(buffer, sizeof(buffer),
 			      "%s(%d, %d, %d, %d, %d)",
-			      typename,
+			      type_name,
 			      GET_YEAR(self), GET_MONTH(self), GET_DAY(self),
 			      DATE_GET_HOUR(self), DATE_GET_MINUTE(self));
 	}
diff --git a/Modules/functionalmodule.c b/Modules/functionalmodule.c
index 4b2e9b4..38ef43a 100644
--- a/Modules/functionalmodule.c
+++ b/Modules/functionalmodule.c
@@ -48,7 +48,7 @@
 
 	pto->fn = func;
 	Py_INCREF(func);
-	pto->args = PyTuple_GetSlice(args, 1, INT_MAX);
+	pto->args = PyTuple_GetSlice(args, 1, PY_SSIZE_T_MAX);
 	if (pto->args == NULL) {
 		pto->kw = NULL;
 		Py_DECREF(pto);
diff --git a/Modules/gcmodule.c b/Modules/gcmodule.c
index 7e3f95a..0176d6f 100644
--- a/Modules/gcmodule.c
+++ b/Modules/gcmodule.c
@@ -413,8 +413,12 @@
 		assert(delstr != NULL);
 		return _PyInstance_Lookup(op, delstr) != NULL;
 	}
-	else 
+	else if (PyType_HasFeature(op->ob_type, Py_TPFLAGS_HEAPTYPE))
 		return op->ob_type->tp_del != NULL;
+	else if (PyGen_CheckExact(op))
+		return PyGen_NeedsFinalizing((PyGenObject *)op);
+	else
+		return 0;
 }
 
 /* Move the objects in unreachable with __del__ methods into `finalizers`.
@@ -730,6 +734,8 @@
 	PyGC_Head unreachable; /* non-problematic unreachable trash */
 	PyGC_Head finalizers;  /* objects with, & reachable from, __del__ */
 	PyGC_Head *gc;
+	static PyObject *tmod = NULL;
+	double t1 = 0.0;
 
 	if (delstr == NULL) {
 		delstr = PyString_InternFromString("__del__");
@@ -737,19 +743,29 @@
 			Py_FatalError("gc couldn't allocate \"__del__\"");
 	}
 
+	if (tmod == NULL) {
+		tmod = PyImport_ImportModule("time");
+		if (tmod == NULL)
+			PyErr_Clear();
+	}
+
 	if (debug & DEBUG_STATS) {
+		if (tmod != NULL) {
+			PyObject *f = PyObject_CallMethod(tmod, "time", NULL);
+			if (f == NULL) {
+				PyErr_Clear();
+			}
+			else {
+				t1 = PyFloat_AsDouble(f);
+				Py_DECREF(f);
+			}
+		}
 		PySys_WriteStderr("gc: collecting generation %d...\n",
 				  generation);
 		PySys_WriteStderr("gc: objects in each generation:");
-		for (i = 0; i < NUM_GENERATIONS; i++) {
-#ifdef MS_WIN64
-			PySys_WriteStderr(" %Id", gc_list_size(GEN_HEAD(i)));
-#else
-			PySys_WriteStderr(" %ld",
-				Py_SAFE_DOWNCAST(gc_list_size(GEN_HEAD(i)),
-						 Py_ssize_t, long));
-#endif
-		}
+		for (i = 0; i < NUM_GENERATIONS; i++)
+			PySys_WriteStderr(" %" PY_FORMAT_SIZE_T "d",
+					  gc_list_size(GEN_HEAD(i)));
 		PySys_WriteStderr("\n");
 	}
 
@@ -816,6 +832,17 @@
 		if (debug & DEBUG_COLLECTABLE) {
 			debug_cycle("collectable", FROM_GC(gc));
 		}
+		if (tmod != NULL && (debug & DEBUG_STATS)) {
+			PyObject *f = PyObject_CallMethod(tmod, "time", NULL);
+			if (f == NULL) {
+				PyErr_Clear();
+			}
+			else {
+				t1 = PyFloat_AsDouble(f)-t1;
+				Py_DECREF(f);
+				PySys_WriteStderr("gc: %.4fs elapsed.\n", t1);
+			}
+		}
 	}
 
 	/* Clear weakrefs and invoke callbacks as necessary. */
@@ -837,21 +864,14 @@
 			debug_cycle("uncollectable", FROM_GC(gc));
 	}
 	if (debug & DEBUG_STATS) {
-		if (m == 0 && n == 0) {
+		if (m == 0 && n == 0)
 			PySys_WriteStderr("gc: done.\n");
-		}
-		else {
-#ifdef MS_WIN64
+		else
 			PySys_WriteStderr(
-			    "gc: done, %Id unreachable, %Id uncollectable.\n",
+			    "gc: done, "
+			    "%" PY_FORMAT_SIZE_T "d unreachable, "
+			    "%" PY_FORMAT_SIZE_T "d uncollectable.\n",
 			    n+m, n);
-#else
-			PySys_WriteStderr(
-			    "gc: done, %ld unreachable, %ld uncollectable.\n",
-			    Py_SAFE_DOWNCAST(n+m, Py_ssize_t, long),
-			    Py_SAFE_DOWNCAST(n, Py_ssize_t, long));
-#endif
-		}
 	}
 
 	/* Append instances in the uncollectable set to a Python
@@ -1050,7 +1070,7 @@
 static int
 referrersvisit(PyObject* obj, PyObject *objs)
 {
-	int i;
+	Py_ssize_t i;
 	for (i = 0; i < PyTuple_GET_SIZE(objs); i++)
 		if (PyTuple_GET_ITEM(objs, i) == obj)
 			return 1;
@@ -1085,6 +1105,8 @@
 {
 	int i;
 	PyObject *result = PyList_New(0);
+	if (!result) return NULL;
+
 	for (i = 0; i < NUM_GENERATIONS; i++) {
 		if (!(gc_referrers_for(args, GEN_HEAD(i), result))) {
 			Py_DECREF(result);
@@ -1108,7 +1130,7 @@
 static PyObject *
 gc_get_referents(PyObject *self, PyObject *args)
 {
-	int i;
+	Py_ssize_t i;
 	PyObject *result = PyList_New(0);
 
 	if (result == NULL)
@@ -1288,7 +1310,8 @@
 _PyObject_GC_Malloc(size_t basicsize)
 {
 	PyObject *op;
-	PyGC_Head *g = PyObject_MALLOC(sizeof(PyGC_Head) + basicsize);
+	PyGC_Head *g = (PyGC_Head *)PyObject_MALLOC(
+                sizeof(PyGC_Head) + basicsize);
 	if (g == NULL)
 		return PyErr_NoMemory();
 	g->gc.gc_refs = GC_UNTRACKED;
@@ -1330,7 +1353,7 @@
 {
 	const size_t basicsize = _PyObject_VAR_SIZE(op->ob_type, nitems);
 	PyGC_Head *g = AS_GC(op);
-	g = PyObject_REALLOC(g,  sizeof(PyGC_Head) + basicsize);
+	g = (PyGC_Head *)PyObject_REALLOC(g,  sizeof(PyGC_Head) + basicsize);
 	if (g == NULL)
 		return (PyVarObject *)PyErr_NoMemory();
 	op = (PyVarObject *) FROM_GC(g);
diff --git a/Modules/getpath.c b/Modules/getpath.c
index 4716d15..8eba730 100644
--- a/Modules/getpath.c
+++ b/Modules/getpath.c
@@ -91,6 +91,11 @@
  * process to find the installed Python tree.
  */
 
+#ifdef __cplusplus
+ extern "C" {
+#endif
+
+
 #ifndef VERSION
 #if defined(__VMS)
 #define VERSION "2_1"
@@ -566,7 +571,7 @@
     bufsz += strlen(exec_prefix) + 1;
 
     /* This is the only malloc call in this file */
-    buf = PyMem_Malloc(bufsz);
+    buf = (char *)PyMem_Malloc(bufsz);
 
     if (buf == NULL) {
         /* We can't exit, so print a warning and limp along */
@@ -681,3 +686,9 @@
         calculate_path();
     return progpath;
 }
+
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Modules/grpmodule.c b/Modules/grpmodule.c
index de849c9..12d33dd 100644
--- a/Modules/grpmodule.c
+++ b/Modules/grpmodule.c
@@ -29,6 +29,7 @@
 };
 
 
+static int initialized;
 static PyTypeObject StructGrpType;
 
 static PyObject *
@@ -174,6 +175,8 @@
     if (m == NULL)
         return;
     d = PyModule_GetDict(m);
-    PyStructSequence_InitType(&StructGrpType, &struct_group_type_desc);
+    if (!initialized)
+	    PyStructSequence_InitType(&StructGrpType, &struct_group_type_desc);
     PyDict_SetItemString(d, "struct_group", (PyObject *) &StructGrpType);
+    initialized = 1;
 }
diff --git a/Modules/itertoolsmodule.c b/Modules/itertoolsmodule.c
index 49d241f..94617a9 100644
--- a/Modules/itertoolsmodule.c
+++ b/Modules/itertoolsmodule.c
@@ -340,7 +340,7 @@
 {
 	teedataobject *tdo;
 
-	tdo = PyObject_New(teedataobject, &teedataobject_type);
+	tdo = PyObject_GC_New(teedataobject, &teedataobject_type);
 	if (tdo == NULL)
 		return NULL;
 
@@ -348,6 +348,7 @@
 	tdo->nextlink = NULL;
 	Py_INCREF(it);
 	tdo->it = it;
+	PyObject_GC_Track(tdo);
 	return (PyObject *)tdo;
 }
 
@@ -381,16 +382,34 @@
 	return value;
 }
 
+static int
+teedataobject_traverse(teedataobject *tdo, visitproc visit, void * arg)
+{
+	int i;
+	Py_VISIT(tdo->it);
+	for (i = 0; i < tdo->numread; i++)
+		Py_VISIT(tdo->values[i]);
+	Py_VISIT(tdo->nextlink);
+	return 0;
+}
+
+static int
+teedataobject_clear(teedataobject *tdo)
+{
+	int i;
+	Py_CLEAR(tdo->it);
+	for (i=0 ; i<tdo->numread ; i++)
+		Py_CLEAR(tdo->values[i]);
+	Py_CLEAR(tdo->nextlink);
+	return 0;
+}
+
 static void
 teedataobject_dealloc(teedataobject *tdo)
 {
-	int i;
-
-	for (i=0 ; i<tdo->numread ; i++)
-		Py_DECREF(tdo->values[i]);
-	Py_XDECREF(tdo->it);
-	Py_XDECREF(tdo->nextlink);
-	PyObject_Del(tdo);
+	PyObject_GC_UnTrack(tdo);
+	teedataobject_clear(tdo);
+	PyObject_GC_Del(tdo);
 }
 
 PyDoc_STRVAR(teedataobject_doc, "Data container common to multiple tee objects.");
@@ -417,9 +436,26 @@
 	PyObject_GenericGetAttr,		/* tp_getattro */
 	0,					/* tp_setattro */
 	0,					/* tp_as_buffer */
-	Py_TPFLAGS_DEFAULT,			/* tp_flags */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,	/* tp_flags */
 	teedataobject_doc,			/* tp_doc */
-	0,					/* tp_traverse */
+	(traverseproc)teedataobject_traverse,	/* tp_traverse */
+	(inquiry)teedataobject_clear,		/* tp_clear */
+	0,					/* tp_richcompare */
+	0,					/* tp_weaklistoffset */
+	0,					/* tp_iter */
+	0,					/* tp_iternext */
+	0,					/* tp_methods */
+	0,					/* tp_members */
+	0,					/* tp_getset */
+	0,					/* tp_base */
+	0,					/* tp_dict */
+	0,					/* tp_descr_get */
+	0,					/* tp_descr_set */
+	0,					/* tp_dictoffset */
+	0,					/* tp_init */
+	0,					/* tp_alloc */
+	0,					/* tp_new */
+	PyObject_GC_Del,			/* tp_free */
 };
 
 
@@ -443,18 +479,26 @@
 	return value;
 }
 
+static int
+tee_traverse(teeobject *to, visitproc visit, void *arg)
+{
+	Py_VISIT((PyObject *)to->dataobj);
+	return 0;
+}
+
 static PyObject *
 tee_copy(teeobject *to)
 {
 	teeobject *newto;
 
-	newto = PyObject_New(teeobject, &tee_type);
+	newto = PyObject_GC_New(teeobject, &tee_type);
 	if (newto == NULL)
 		return NULL;
 	Py_INCREF(to->dataobj);
 	newto->dataobj = to->dataobj;
 	newto->index = to->index;
 	newto->weakreflist = NULL;
+	PyObject_GC_Track(newto);
 	return (PyObject *)newto;
 }
 
@@ -474,12 +518,13 @@
 		goto done;
 	}
 
-	to = PyObject_New(teeobject, &tee_type);
+	to = PyObject_GC_New(teeobject, &tee_type);
 	if (to == NULL) 
 		goto done;
 	to->dataobj = (teedataobject *)teedataobject_new(it);
 	to->index = 0;
 	to->weakreflist = NULL;
+	PyObject_GC_Track(to);
 done:
 	Py_XDECREF(it);
 	return (PyObject *)to;
@@ -495,13 +540,21 @@
 	return tee_fromiterable(iterable);
 }
 
-static void
-tee_dealloc(teeobject *to)
+static int
+tee_clear(teeobject *to)
 {
 	if (to->weakreflist != NULL)
 		PyObject_ClearWeakRefs((PyObject *) to);
-	Py_XDECREF(to->dataobj);
-	PyObject_Del(to);
+	Py_CLEAR(to->dataobj);
+	return 0;
+}
+
+static void
+tee_dealloc(teeobject *to)
+{
+	PyObject_GC_UnTrack(to);
+	tee_clear(to);
+	PyObject_GC_Del(to);
 }
 
 PyDoc_STRVAR(teeobject_doc,
@@ -534,10 +587,10 @@
 	0,				/* tp_getattro */
 	0,				/* tp_setattro */
 	0,				/* tp_as_buffer */
-	Py_TPFLAGS_DEFAULT,		/* tp_flags */
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC,	/* tp_flags */
 	teeobject_doc,			/* tp_doc */
-	0,				/* tp_traverse */
-	0,				/* tp_clear */
+	(traverseproc)tee_traverse,	/* tp_traverse */
+	(inquiry)tee_clear,		/* tp_clear */
 	0,				/* tp_richcompare */
 	offsetof(teeobject, weakreflist),	/* tp_weaklistoffset */
 	PyObject_SelfIter,		/* tp_iter */
@@ -553,7 +606,7 @@
 	0,				/* tp_init */
 	0,				/* tp_alloc */
 	tee_new,			/* tp_new */
-	PyObject_Del,			/* tp_free */
+	PyObject_GC_Del,		/* tp_free */
 };
 
 static PyObject *
diff --git a/Modules/main.c b/Modules/main.c
index b3ce16e..7326a27 100644
--- a/Modules/main.c
+++ b/Modules/main.c
@@ -28,6 +28,10 @@
     "Type \"help\", \"copyright\", \"credits\" or \"license\" " \
     "for more information."
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 /* For Py_GetArgcArgv(); set by main() */
 static char **orig_argv;
 static int  orig_argc;
@@ -206,7 +210,7 @@
 			/* -c is the last option; following arguments
 			   that look like options are left for the
 			   command to interpret. */
-			command = malloc(strlen(_PyOS_optarg) + 2);
+			command = (char *)malloc(strlen(_PyOS_optarg) + 2);
 			if (command == NULL)
 				Py_FatalError(
 				   "not enough memory to copy -c argument");
@@ -219,7 +223,7 @@
 			/* -m is the last option; following arguments
 			   that look like options are left for the
 			   module to interpret. */
-			module = malloc(strlen(_PyOS_optarg) + 2);
+			module = (char *)malloc(strlen(_PyOS_optarg) + 2);
 			if (module == NULL)
 				Py_FatalError(
 				   "not enough memory to copy -m argument");
@@ -509,3 +513,8 @@
 	*argc = orig_argc;
 	*argv = orig_argv;
 }
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Modules/md5.c b/Modules/md5.c
new file mode 100644
index 0000000..c35d96c
--- /dev/null
+++ b/Modules/md5.c
@@ -0,0 +1,381 @@
+/*
+  Copyright (C) 1999, 2000, 2002 Aladdin Enterprises.  All rights reserved.
+
+  This software is provided 'as-is', without any express or implied
+  warranty.  In no event will the authors be held liable for any damages
+  arising from the use of this software.
+
+  Permission is granted to anyone to use this software for any purpose,
+  including commercial applications, and to alter it and redistribute it
+  freely, subject to the following restrictions:
+
+  1. The origin of this software must not be misrepresented; you must not
+     claim that you wrote the original software. If you use this software
+     in a product, an acknowledgment in the product documentation would be
+     appreciated but is not required.
+  2. Altered source versions must be plainly marked as such, and must not be
+     misrepresented as being the original software.
+  3. This notice may not be removed or altered from any source distribution.
+
+  L. Peter Deutsch
+  ghost@aladdin.com
+
+ */
+/* $Id: md5.c,v 1.6 2002/04/13 19:20:28 lpd Exp $ */
+/*
+  Independent implementation of MD5 (RFC 1321).
+
+  This code implements the MD5 Algorithm defined in RFC 1321, whose
+  text is available at
+	http://www.ietf.org/rfc/rfc1321.txt
+  The code is derived from the text of the RFC, including the test suite
+  (section A.5) but excluding the rest of Appendix A.  It does not include
+  any code or documentation that is identified in the RFC as being
+  copyrighted.
+
+  The original and principal author of md5.c is L. Peter Deutsch
+  <ghost@aladdin.com>.  Other authors are noted in the change history
+  that follows (in reverse chronological order):
+
+  2002-04-13 lpd Clarified derivation from RFC 1321; now handles byte order
+	either statically or dynamically; added missing #include <string.h>
+	in library.
+  2002-03-11 lpd Corrected argument list for main(), and added int return
+	type, in test program and T value program.
+  2002-02-21 lpd Added missing #include <stdio.h> in test program.
+  2000-07-03 lpd Patched to eliminate warnings about "constant is
+	unsigned in ANSI C, signed in traditional"; made test program
+	self-checking.
+  1999-11-04 lpd Edited comments slightly for automatic TOC extraction.
+  1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5).
+  1999-05-03 lpd Original version.
+ */
+
+#include "md5.h"
+#include <string.h>
+
+#undef BYTE_ORDER	/* 1 = big-endian, -1 = little-endian, 0 = unknown */
+#ifdef ARCH_IS_BIG_ENDIAN
+#  define BYTE_ORDER (ARCH_IS_BIG_ENDIAN ? 1 : -1)
+#else
+#  define BYTE_ORDER 0
+#endif
+
+#define T_MASK ((md5_word_t)~0)
+#define T1 /* 0xd76aa478 */ (T_MASK ^ 0x28955b87)
+#define T2 /* 0xe8c7b756 */ (T_MASK ^ 0x173848a9)
+#define T3    0x242070db
+#define T4 /* 0xc1bdceee */ (T_MASK ^ 0x3e423111)
+#define T5 /* 0xf57c0faf */ (T_MASK ^ 0x0a83f050)
+#define T6    0x4787c62a
+#define T7 /* 0xa8304613 */ (T_MASK ^ 0x57cfb9ec)
+#define T8 /* 0xfd469501 */ (T_MASK ^ 0x02b96afe)
+#define T9    0x698098d8
+#define T10 /* 0x8b44f7af */ (T_MASK ^ 0x74bb0850)
+#define T11 /* 0xffff5bb1 */ (T_MASK ^ 0x0000a44e)
+#define T12 /* 0x895cd7be */ (T_MASK ^ 0x76a32841)
+#define T13    0x6b901122
+#define T14 /* 0xfd987193 */ (T_MASK ^ 0x02678e6c)
+#define T15 /* 0xa679438e */ (T_MASK ^ 0x5986bc71)
+#define T16    0x49b40821
+#define T17 /* 0xf61e2562 */ (T_MASK ^ 0x09e1da9d)
+#define T18 /* 0xc040b340 */ (T_MASK ^ 0x3fbf4cbf)
+#define T19    0x265e5a51
+#define T20 /* 0xe9b6c7aa */ (T_MASK ^ 0x16493855)
+#define T21 /* 0xd62f105d */ (T_MASK ^ 0x29d0efa2)
+#define T22    0x02441453
+#define T23 /* 0xd8a1e681 */ (T_MASK ^ 0x275e197e)
+#define T24 /* 0xe7d3fbc8 */ (T_MASK ^ 0x182c0437)
+#define T25    0x21e1cde6
+#define T26 /* 0xc33707d6 */ (T_MASK ^ 0x3cc8f829)
+#define T27 /* 0xf4d50d87 */ (T_MASK ^ 0x0b2af278)
+#define T28    0x455a14ed
+#define T29 /* 0xa9e3e905 */ (T_MASK ^ 0x561c16fa)
+#define T30 /* 0xfcefa3f8 */ (T_MASK ^ 0x03105c07)
+#define T31    0x676f02d9
+#define T32 /* 0x8d2a4c8a */ (T_MASK ^ 0x72d5b375)
+#define T33 /* 0xfffa3942 */ (T_MASK ^ 0x0005c6bd)
+#define T34 /* 0x8771f681 */ (T_MASK ^ 0x788e097e)
+#define T35    0x6d9d6122
+#define T36 /* 0xfde5380c */ (T_MASK ^ 0x021ac7f3)
+#define T37 /* 0xa4beea44 */ (T_MASK ^ 0x5b4115bb)
+#define T38    0x4bdecfa9
+#define T39 /* 0xf6bb4b60 */ (T_MASK ^ 0x0944b49f)
+#define T40 /* 0xbebfbc70 */ (T_MASK ^ 0x4140438f)
+#define T41    0x289b7ec6
+#define T42 /* 0xeaa127fa */ (T_MASK ^ 0x155ed805)
+#define T43 /* 0xd4ef3085 */ (T_MASK ^ 0x2b10cf7a)
+#define T44    0x04881d05
+#define T45 /* 0xd9d4d039 */ (T_MASK ^ 0x262b2fc6)
+#define T46 /* 0xe6db99e5 */ (T_MASK ^ 0x1924661a)
+#define T47    0x1fa27cf8
+#define T48 /* 0xc4ac5665 */ (T_MASK ^ 0x3b53a99a)
+#define T49 /* 0xf4292244 */ (T_MASK ^ 0x0bd6ddbb)
+#define T50    0x432aff97
+#define T51 /* 0xab9423a7 */ (T_MASK ^ 0x546bdc58)
+#define T52 /* 0xfc93a039 */ (T_MASK ^ 0x036c5fc6)
+#define T53    0x655b59c3
+#define T54 /* 0x8f0ccc92 */ (T_MASK ^ 0x70f3336d)
+#define T55 /* 0xffeff47d */ (T_MASK ^ 0x00100b82)
+#define T56 /* 0x85845dd1 */ (T_MASK ^ 0x7a7ba22e)
+#define T57    0x6fa87e4f
+#define T58 /* 0xfe2ce6e0 */ (T_MASK ^ 0x01d3191f)
+#define T59 /* 0xa3014314 */ (T_MASK ^ 0x5cfebceb)
+#define T60    0x4e0811a1
+#define T61 /* 0xf7537e82 */ (T_MASK ^ 0x08ac817d)
+#define T62 /* 0xbd3af235 */ (T_MASK ^ 0x42c50dca)
+#define T63    0x2ad7d2bb
+#define T64 /* 0xeb86d391 */ (T_MASK ^ 0x14792c6e)
+
+
+static void
+md5_process(md5_state_t *pms, const md5_byte_t *data /*[64]*/)
+{
+    md5_word_t
+	a = pms->abcd[0], b = pms->abcd[1],
+	c = pms->abcd[2], d = pms->abcd[3];
+    md5_word_t t;
+#if BYTE_ORDER > 0
+    /* Define storage only for big-endian CPUs. */
+    md5_word_t X[16];
+#else
+    /* Define storage for little-endian or both types of CPUs. */
+    md5_word_t xbuf[16];
+    const md5_word_t *X;
+#endif
+
+    {
+#if BYTE_ORDER == 0
+	/*
+	 * Determine dynamically whether this is a big-endian or
+	 * little-endian machine, since we can use a more efficient
+	 * algorithm on the latter.
+	 */
+	static const int w = 1;
+
+	if (*((const md5_byte_t *)&w)) /* dynamic little-endian */
+#endif
+#if BYTE_ORDER <= 0		/* little-endian */
+	{
+	    /*
+	     * On little-endian machines, we can process properly aligned
+	     * data without copying it.
+	     */
+	    if (!((data - (const md5_byte_t *)0) & 3)) {
+		/* data are properly aligned */
+		X = (const md5_word_t *)data;
+	    } else {
+		/* not aligned */
+		memcpy(xbuf, data, 64);
+		X = xbuf;
+	    }
+	}
+#endif
+#if BYTE_ORDER == 0
+	else			/* dynamic big-endian */
+#endif
+#if BYTE_ORDER >= 0		/* big-endian */
+	{
+	    /*
+	     * On big-endian machines, we must arrange the bytes in the
+	     * right order.
+	     */
+	    const md5_byte_t *xp = data;
+	    int i;
+
+#  if BYTE_ORDER == 0
+	    X = xbuf;		/* (dynamic only) */
+#  else
+#    define xbuf X		/* (static only) */
+#  endif
+	    for (i = 0; i < 16; ++i, xp += 4)
+		xbuf[i] = xp[0] + (xp[1] << 8) + (xp[2] << 16) + (xp[3] << 24);
+	}
+#endif
+    }
+
+#define ROTATE_LEFT(x, n) (((x) << (n)) | ((x) >> (32 - (n))))
+
+    /* Round 1. */
+    /* Let [abcd k s i] denote the operation
+       a = b + ((a + F(b,c,d) + X[k] + T[i]) <<< s). */
+#define F(x, y, z) (((x) & (y)) | (~(x) & (z)))
+#define SET(a, b, c, d, k, s, Ti)\
+  t = a + F(b,c,d) + X[k] + Ti;\
+  a = ROTATE_LEFT(t, s) + b
+    /* Do the following 16 operations. */
+    SET(a, b, c, d,  0,  7,  T1);
+    SET(d, a, b, c,  1, 12,  T2);
+    SET(c, d, a, b,  2, 17,  T3);
+    SET(b, c, d, a,  3, 22,  T4);
+    SET(a, b, c, d,  4,  7,  T5);
+    SET(d, a, b, c,  5, 12,  T6);
+    SET(c, d, a, b,  6, 17,  T7);
+    SET(b, c, d, a,  7, 22,  T8);
+    SET(a, b, c, d,  8,  7,  T9);
+    SET(d, a, b, c,  9, 12, T10);
+    SET(c, d, a, b, 10, 17, T11);
+    SET(b, c, d, a, 11, 22, T12);
+    SET(a, b, c, d, 12,  7, T13);
+    SET(d, a, b, c, 13, 12, T14);
+    SET(c, d, a, b, 14, 17, T15);
+    SET(b, c, d, a, 15, 22, T16);
+#undef SET
+
+     /* Round 2. */
+     /* Let [abcd k s i] denote the operation
+          a = b + ((a + G(b,c,d) + X[k] + T[i]) <<< s). */
+#define G(x, y, z) (((x) & (z)) | ((y) & ~(z)))
+#define SET(a, b, c, d, k, s, Ti)\
+  t = a + G(b,c,d) + X[k] + Ti;\
+  a = ROTATE_LEFT(t, s) + b
+     /* Do the following 16 operations. */
+    SET(a, b, c, d,  1,  5, T17);
+    SET(d, a, b, c,  6,  9, T18);
+    SET(c, d, a, b, 11, 14, T19);
+    SET(b, c, d, a,  0, 20, T20);
+    SET(a, b, c, d,  5,  5, T21);
+    SET(d, a, b, c, 10,  9, T22);
+    SET(c, d, a, b, 15, 14, T23);
+    SET(b, c, d, a,  4, 20, T24);
+    SET(a, b, c, d,  9,  5, T25);
+    SET(d, a, b, c, 14,  9, T26);
+    SET(c, d, a, b,  3, 14, T27);
+    SET(b, c, d, a,  8, 20, T28);
+    SET(a, b, c, d, 13,  5, T29);
+    SET(d, a, b, c,  2,  9, T30);
+    SET(c, d, a, b,  7, 14, T31);
+    SET(b, c, d, a, 12, 20, T32);
+#undef SET
+
+     /* Round 3. */
+     /* Let [abcd k s t] denote the operation
+          a = b + ((a + H(b,c,d) + X[k] + T[i]) <<< s). */
+#define H(x, y, z) ((x) ^ (y) ^ (z))
+#define SET(a, b, c, d, k, s, Ti)\
+  t = a + H(b,c,d) + X[k] + Ti;\
+  a = ROTATE_LEFT(t, s) + b
+     /* Do the following 16 operations. */
+    SET(a, b, c, d,  5,  4, T33);
+    SET(d, a, b, c,  8, 11, T34);
+    SET(c, d, a, b, 11, 16, T35);
+    SET(b, c, d, a, 14, 23, T36);
+    SET(a, b, c, d,  1,  4, T37);
+    SET(d, a, b, c,  4, 11, T38);
+    SET(c, d, a, b,  7, 16, T39);
+    SET(b, c, d, a, 10, 23, T40);
+    SET(a, b, c, d, 13,  4, T41);
+    SET(d, a, b, c,  0, 11, T42);
+    SET(c, d, a, b,  3, 16, T43);
+    SET(b, c, d, a,  6, 23, T44);
+    SET(a, b, c, d,  9,  4, T45);
+    SET(d, a, b, c, 12, 11, T46);
+    SET(c, d, a, b, 15, 16, T47);
+    SET(b, c, d, a,  2, 23, T48);
+#undef SET
+
+     /* Round 4. */
+     /* Let [abcd k s t] denote the operation
+          a = b + ((a + I(b,c,d) + X[k] + T[i]) <<< s). */
+#define I(x, y, z) ((y) ^ ((x) | ~(z)))
+#define SET(a, b, c, d, k, s, Ti)\
+  t = a + I(b,c,d) + X[k] + Ti;\
+  a = ROTATE_LEFT(t, s) + b
+     /* Do the following 16 operations. */
+    SET(a, b, c, d,  0,  6, T49);
+    SET(d, a, b, c,  7, 10, T50);
+    SET(c, d, a, b, 14, 15, T51);
+    SET(b, c, d, a,  5, 21, T52);
+    SET(a, b, c, d, 12,  6, T53);
+    SET(d, a, b, c,  3, 10, T54);
+    SET(c, d, a, b, 10, 15, T55);
+    SET(b, c, d, a,  1, 21, T56);
+    SET(a, b, c, d,  8,  6, T57);
+    SET(d, a, b, c, 15, 10, T58);
+    SET(c, d, a, b,  6, 15, T59);
+    SET(b, c, d, a, 13, 21, T60);
+    SET(a, b, c, d,  4,  6, T61);
+    SET(d, a, b, c, 11, 10, T62);
+    SET(c, d, a, b,  2, 15, T63);
+    SET(b, c, d, a,  9, 21, T64);
+#undef SET
+
+     /* Then perform the following additions. (That is increment each
+        of the four registers by the value it had before this block
+        was started.) */
+    pms->abcd[0] += a;
+    pms->abcd[1] += b;
+    pms->abcd[2] += c;
+    pms->abcd[3] += d;
+}
+
+void
+md5_init(md5_state_t *pms)
+{
+    pms->count[0] = pms->count[1] = 0;
+    pms->abcd[0] = 0x67452301;
+    pms->abcd[1] = /*0xefcdab89*/ T_MASK ^ 0x10325476;
+    pms->abcd[2] = /*0x98badcfe*/ T_MASK ^ 0x67452301;
+    pms->abcd[3] = 0x10325476;
+}
+
+void
+md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes)
+{
+    const md5_byte_t *p = data;
+    int left = nbytes;
+    int offset = (pms->count[0] >> 3) & 63;
+    md5_word_t nbits = (md5_word_t)(nbytes << 3);
+
+    if (nbytes <= 0)
+	return;
+
+    /* Update the message length. */
+    pms->count[1] += nbytes >> 29;
+    pms->count[0] += nbits;
+    if (pms->count[0] < nbits)
+	pms->count[1]++;
+
+    /* Process an initial partial block. */
+    if (offset) {
+	int copy = (offset + nbytes > 64 ? 64 - offset : nbytes);
+
+	memcpy(pms->buf + offset, p, copy);
+	if (offset + copy < 64)
+	    return;
+	p += copy;
+	left -= copy;
+	md5_process(pms, pms->buf);
+    }
+
+    /* Process full blocks. */
+    for (; left >= 64; p += 64, left -= 64)
+	md5_process(pms, p);
+
+    /* Process a final partial block. */
+    if (left)
+	memcpy(pms->buf, p, left);
+}
+
+void
+md5_finish(md5_state_t *pms, md5_byte_t digest[16])
+{
+    static const md5_byte_t pad[64] = {
+	0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
+	0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
+    };
+    md5_byte_t data[8];
+    int i;
+
+    /* Save the length before padding. */
+    for (i = 0; i < 8; ++i)
+	data[i] = (md5_byte_t)(pms->count[i >> 2] >> ((i & 3) << 3));
+    /* Pad to 56 bytes mod 64. */
+    md5_append(pms, pad, ((55 - (pms->count[0] >> 3)) & 63) + 1);
+    /* Append the length. */
+    md5_append(pms, data, 8);
+    for (i = 0; i < 16; ++i)
+	digest[i] = (md5_byte_t)(pms->abcd[i >> 2] >> ((i & 3) << 3));
+}
diff --git a/Modules/md5.h b/Modules/md5.h
index 13628df..5eb6d6c 100644
--- a/Modules/md5.h
+++ b/Modules/md5.h
@@ -1,62 +1,91 @@
-/* MD5.H - header file for MD5C.C
+/*
+  Copyright (C) 1999, 2002 Aladdin Enterprises.  All rights reserved.
+
+  This software is provided 'as-is', without any express or implied
+  warranty.  In no event will the authors be held liable for any damages
+  arising from the use of this software.
+
+  Permission is granted to anyone to use this software for any purpose,
+  including commercial applications, and to alter it and redistribute it
+  freely, subject to the following restrictions:
+
+  1. The origin of this software must not be misrepresented; you must not
+     claim that you wrote the original software. If you use this software
+     in a product, an acknowledgment in the product documentation would be
+     appreciated but is not required.
+  2. Altered source versions must be plainly marked as such, and must not be
+     misrepresented as being the original software.
+  3. This notice may not be removed or altered from any source distribution.
+
+  L. Peter Deutsch
+  ghost@aladdin.com
+
+ */
+/* $Id$ */
+/*
+  Independent implementation of MD5 (RFC 1321).
+
+  This code implements the MD5 Algorithm defined in RFC 1321, whose
+  text is available at
+	http://www.ietf.org/rfc/rfc1321.txt
+  The code is derived from the text of the RFC, including the test suite
+  (section A.5) but excluding the rest of Appendix A.  It does not include
+  any code or documentation that is identified in the RFC as being
+  copyrighted.
+
+  The original and principal author of md5.h is L. Peter Deutsch
+  <ghost@aladdin.com>.  Other authors are noted in the change history
+  that follows (in reverse chronological order):
+
+  2002-04-13 lpd Removed support for non-ANSI compilers; removed
+	references to Ghostscript; clarified derivation from RFC 1321;
+	now handles byte order either statically or dynamically.
+  1999-11-04 lpd Edited comments slightly for automatic TOC extraction.
+  1999-10-18 lpd Fixed typo in header comment (ansi2knr rather than md5);
+	added conditionalization for C++ compilation from Martin
+	Purschke <purschke@bnl.gov>.
+  1999-05-03 lpd Original version.
  */
 
-/* Copyright (C) 1991-2, RSA Data Security, Inc. Created 1991. All
-rights reserved.
+#ifndef md5_INCLUDED
+#  define md5_INCLUDED
 
-License to copy and use this software is granted provided that it
-is identified as the "RSA Data Security, Inc. MD5 Message-Digest
-Algorithm" in all material mentioning or referencing this software
-or this function.
-
-License is also granted to make and use derivative works provided
-that such works are identified as "derived from the RSA Data
-Security, Inc. MD5 Message-Digest Algorithm" in all material
-mentioning or referencing the derived work.
-
-RSA Data Security, Inc. makes no representations concerning either
-the merchantability of this software or the suitability of this
-software for any particular purpose. It is provided "as is"
-without express or implied warranty of any kind.
-
-These notices must be retained in any copies of any part of this
-documentation and/or software.
+/*
+ * This package supports both compile-time and run-time determination of CPU
+ * byte order.  If ARCH_IS_BIG_ENDIAN is defined as 0, the code will be
+ * compiled to run only on little-endian CPUs; if ARCH_IS_BIG_ENDIAN is
+ * defined as non-zero, the code will be compiled to run only on big-endian
+ * CPUs; if ARCH_IS_BIG_ENDIAN is not defined, the code will be compiled to
+ * run on either big- or little-endian CPUs, but will run slightly less
+ * efficiently on either one than if ARCH_IS_BIG_ENDIAN is defined.
  */
 
-/* ========== include global.h ========== */
-/* GLOBAL.H - RSAREF types and constants
- */
+typedef unsigned char md5_byte_t; /* 8-bit byte */
+typedef unsigned int md5_word_t; /* 32-bit word */
 
-/* POINTER defines a generic pointer type */
-typedef unsigned char *POINTER;
+/* Define the state of the MD5 Algorithm. */
+typedef struct md5_state_s {
+    md5_word_t count[2];	/* message length in bits, lsw first */
+    md5_word_t abcd[4];		/* digest buffer */
+    md5_byte_t buf[64];		/* accumulate block */
+} md5_state_t;
 
-/* UINT4 defines a four byte word */
-#if SIZEOF_LONG == 4
-typedef unsigned long int UINT4;
-#elif SIZEOF_SHORT == 4
-typedef unsigned short int UINT4;
-#elif INT_MAX == 2147483647
-typedef unsigned int UINT4;
-#else
-#error "Can't find a 4-byte integral type"
+#ifdef __cplusplus
+extern "C" 
+{
 #endif
 
-/* ========== End global.h; continue md5.h ========== */
+/* Initialize the algorithm. */
+void md5_init(md5_state_t *pms);
 
-/* MD5 context. */
-typedef struct {
-    UINT4 state[4];                                   /* state (ABCD) */
-    UINT4 count[2];        /* number of bits, modulo 2^64 (lsb first) */
-    unsigned char buffer[64];                         /* input buffer */
-} MD5_CTX;
+/* Append a string to the message. */
+void md5_append(md5_state_t *pms, const md5_byte_t *data, int nbytes);
 
-/* Rename all exported symbols to avoid conflicts with similarly named
-   symbols in some systems' standard C libraries... */
+/* Finish the message and return the digest. */
+void md5_finish(md5_state_t *pms, md5_byte_t digest[16]);
 
-#define MD5Init _Py_MD5Init
-#define MD5Update _Py_MD5Update
-#define MD5Final _Py_MD5Final
+#ifdef __cplusplus
+}  /* end extern "C" */
+#endif
 
-void MD5Init(MD5_CTX *);
-void MD5Update(MD5_CTX *, unsigned char *, unsigned int);
-void MD5Final(unsigned char [16], MD5_CTX *);
+#endif /* md5_INCLUDED */
diff --git a/Modules/md5c.c b/Modules/md5c.c
deleted file mode 100644
index 1b8dfdb..0000000
--- a/Modules/md5c.c
+++ /dev/null
@@ -1,289 +0,0 @@
-/* MD5C.C - RSA Data Security, Inc., MD5 message-digest algorithm
- */
-
-/* Copyright (C) 1991-2, RSA Data Security, Inc. Created 1991. All
-rights reserved.
-
-License to copy and use this software is granted provided that it
-is identified as the "RSA Data Security, Inc. MD5 Message-Digest
-Algorithm" in all material mentioning or referencing this software
-or this function.
-
-License is also granted to make and use derivative works provided
-that such works are identified as "derived from the RSA Data
-Security, Inc. MD5 Message-Digest Algorithm" in all material
-mentioning or referencing the derived work.
-
-RSA Data Security, Inc. makes no representations concerning either
-the merchantability of this software or the suitability of this
-software for any particular purpose. It is provided "as is"
-without express or implied warranty of any kind.
-
-These notices must be retained in any copies of any part of this
-documentation and/or software.
- */
-
-#include "Python.h"
-#include "md5.h"
-
-/* Constants for MD5Transform routine. */
-
-#define S11 7
-#define S12 12
-#define S13 17
-#define S14 22
-#define S21 5
-#define S22 9
-#define S23 14
-#define S24 20
-#define S31 4
-#define S32 11
-#define S33 16
-#define S34 23
-#define S41 6
-#define S42 10
-#define S43 15
-#define S44 21
-
-static void MD5Transform(UINT4[4], unsigned char[64]);
-
-
-/* Encodes input (UINT4) into output (unsigned char). Assumes len is
-   a multiple of 4.
- */
-static void
-Encode(unsigned char *output, UINT4 *input, unsigned int len)
-{
-    unsigned int i, j;
-
-    for (i = 0, j = 0; j < len; i++, j += 4) {
-        output[j] = (unsigned char)(input[i] & 0xff);
-        output[j+1] = (unsigned char)((input[i] >> 8) & 0xff);
-        output[j+2] = (unsigned char)((input[i] >> 16) & 0xff);
-        output[j+3] = (unsigned char)((input[i] >> 24) & 0xff);
-    }
-}
-
-
-/* Decodes input (unsigned char) into output (UINT4). Assumes len is
-   a multiple of 4.
- */
-static void
-Decode(UINT4 *output, unsigned char *input, unsigned int len)
-{
-    unsigned int i, j;
-
-    for (i = 0, j = 0; j < len; i++, j += 4) {
-        output[i] = ((UINT4)input[j]) | (((UINT4)input[j+1]) << 8) |
-            (((UINT4)input[j+2]) << 16) | (((UINT4)input[j+3]) << 24);
-    }
-}
-
-
-static unsigned char PADDING[64] = {
-    0x80, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
-    0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0
-};
-
-/* F, G, H and I are basic MD5 functions. */
-#define F(x, y, z) (((x) & (y)) | ((~x) & (z)))
-#define G(x, y, z) (((x) & (z)) | ((y) & (~z)))
-#define H(x, y, z) ((x) ^ (y) ^ (z))
-#define I(x, y, z) ((y) ^ ((x) | (~z)))
-
-/* ROTATE_LEFT rotates x left n bits. */
-#define ROTATE_LEFT(x, n) (((x) << (n)) | ((x) >> (32-(n))))
-
-/* FF, GG, HH, and II transformations for rounds 1, 2, 3, and 4.
-   Rotation is separate from addition to prevent recomputation.
- */
-#define FF(a, b, c, d, x, s, ac) { \
-     (a) += F ((b), (c), (d)) + (x) + (UINT4)(ac); \
-     (a) = ROTATE_LEFT ((a), (s)); \
-     (a) += (b); \
-    }
-#define GG(a, b, c, d, x, s, ac) { \
-     (a) += G ((b), (c), (d)) + (x) + (UINT4)(ac); \
-     (a) = ROTATE_LEFT ((a), (s)); \
-     (a) += (b); \
-    }
-#define HH(a, b, c, d, x, s, ac) { \
-     (a) += H ((b), (c), (d)) + (x) + (UINT4)(ac); \
-     (a) = ROTATE_LEFT ((a), (s)); \
-     (a) += (b); \
-    }
-#define II(a, b, c, d, x, s, ac) { \
-     (a) += I ((b), (c), (d)) + (x) + (UINT4)(ac); \
-     (a) = ROTATE_LEFT ((a), (s)); \
-     (a) += (b); \
-    }
-
-
-/* MD5 initialization. Begins an MD5 operation, writing a new context. */
-void
-MD5Init(MD5_CTX *context)
-{
-    context->count[0] = context->count[1] = 0;
-    /* Load magic initialization constants. */
-    context->state[0] = 0x67452301;
-    context->state[1] = 0xefcdab89;
-    context->state[2] = 0x98badcfe;
-    context->state[3] = 0x10325476;
-}
-
-
-/* MD5 block update operation. Continues an MD5 message-digest
-   operation, processing another message block, and updating the
-   context.
- */
-void
-MD5Update(MD5_CTX *context, unsigned char *input, unsigned int inputLen)
-{
-    unsigned int i, index, partLen;
-
-    /* Compute number of bytes mod 64 */
-    index = (unsigned int)((context->count[0] >> 3) & 0x3F);
-
-    /* Update number of bits */
-    if ((context->count[0] += ((UINT4)inputLen << 3))
-        < ((UINT4)inputLen << 3))
-        context->count[1]++;
-    context->count[1] += ((UINT4)inputLen >> 29);
-
-    partLen = 64 - index;
-
-    /* Transform as many times as possible. */
-    if (inputLen >= partLen) {
-        memcpy((POINTER)&context->buffer[index], (POINTER)input, partLen);
-        MD5Transform(context->state, context->buffer);
-
-        for (i = partLen; i + 63 < inputLen; i += 64)
-            MD5Transform(context->state, &input[i]);
-
-        index = 0;
-    }
-    else
-        i = 0;
-
-    /* Buffer remaining input */
-    memcpy((POINTER)&context->buffer[index],
-               (POINTER)&input[i], inputLen-i);
-}
-
-/* MD5 finalization. Ends an MD5 message-digest operation, writing the
-  message digest and zeroing the context.
- */
-void
-MD5Final(unsigned char digest[16], MD5_CTX *context)
-{
-    unsigned char bits[8];
-    unsigned int index, padLen;
-
-    /* Save number of bits */
-    Encode (bits, context->count, 8);
-
-    /* Pad out to 56 mod 64. */
-    index = (unsigned int)((context->count[0] >> 3) & 0x3f);
-    padLen = (index < 56) ? (56 - index) : (120 - index);
-    MD5Update(context, PADDING, padLen);
-
-    /* Append length (before padding) */
-    MD5Update(context, bits, 8);
-
-    /* Store state in digest */
-    Encode(digest, context->state, 16);
-
-    /* Zeroize sensitive information. */
-    memset((POINTER)context, 0, sizeof (*context));
-}
-
-
-/* MD5 basic transformation. Transforms state based on block. */
-static void
-MD5Transform(UINT4 state[4], unsigned char block[64])
-{
-    UINT4 a = state[0], b = state[1], c = state[2], d = state[3], x[16];
-
-    Decode (x, block, 64);
-
-    /* Round 1 */
-    FF(a, b, c, d, x[ 0], S11, 0xd76aa478); /* 1 */
-    FF(d, a, b, c, x[ 1], S12, 0xe8c7b756); /* 2 */
-    FF(c, d, a, b, x[ 2], S13, 0x242070db); /* 3 */
-    FF(b, c, d, a, x[ 3], S14, 0xc1bdceee); /* 4 */
-    FF(a, b, c, d, x[ 4], S11, 0xf57c0faf); /* 5 */
-    FF(d, a, b, c, x[ 5], S12, 0x4787c62a); /* 6 */
-    FF(c, d, a, b, x[ 6], S13, 0xa8304613); /* 7 */
-    FF(b, c, d, a, x[ 7], S14, 0xfd469501); /* 8 */
-    FF(a, b, c, d, x[ 8], S11, 0x698098d8); /* 9 */
-    FF(d, a, b, c, x[ 9], S12, 0x8b44f7af); /* 10 */
-    FF(c, d, a, b, x[10], S13, 0xffff5bb1); /* 11 */
-    FF(b, c, d, a, x[11], S14, 0x895cd7be); /* 12 */
-    FF(a, b, c, d, x[12], S11, 0x6b901122); /* 13 */
-    FF(d, a, b, c, x[13], S12, 0xfd987193); /* 14 */
-    FF(c, d, a, b, x[14], S13, 0xa679438e); /* 15 */
-    FF(b, c, d, a, x[15], S14, 0x49b40821); /* 16 */
-
-    /* Round 2 */
-    GG(a, b, c, d, x[ 1], S21, 0xf61e2562); /* 17 */
-    GG(d, a, b, c, x[ 6], S22, 0xc040b340); /* 18 */
-    GG(c, d, a, b, x[11], S23, 0x265e5a51); /* 19 */
-    GG(b, c, d, a, x[ 0], S24, 0xe9b6c7aa); /* 20 */
-    GG(a, b, c, d, x[ 5], S21, 0xd62f105d); /* 21 */
-    GG(d, a, b, c, x[10], S22,  0x2441453); /* 22 */
-    GG(c, d, a, b, x[15], S23, 0xd8a1e681); /* 23 */
-    GG(b, c, d, a, x[ 4], S24, 0xe7d3fbc8); /* 24 */
-    GG(a, b, c, d, x[ 9], S21, 0x21e1cde6); /* 25 */
-    GG(d, a, b, c, x[14], S22, 0xc33707d6); /* 26 */
-    GG(c, d, a, b, x[ 3], S23, 0xf4d50d87); /* 27 */
-    GG(b, c, d, a, x[ 8], S24, 0x455a14ed); /* 28 */
-    GG(a, b, c, d, x[13], S21, 0xa9e3e905); /* 29 */
-    GG(d, a, b, c, x[ 2], S22, 0xfcefa3f8); /* 30 */
-    GG(c, d, a, b, x[ 7], S23, 0x676f02d9); /* 31 */
-    GG(b, c, d, a, x[12], S24, 0x8d2a4c8a); /* 32 */
-
-    /* Round 3 */
-    HH(a, b, c, d, x[ 5], S31, 0xfffa3942); /* 33 */
-    HH(d, a, b, c, x[ 8], S32, 0x8771f681); /* 34 */
-    HH(c, d, a, b, x[11], S33, 0x6d9d6122); /* 35 */
-    HH(b, c, d, a, x[14], S34, 0xfde5380c); /* 36 */
-    HH(a, b, c, d, x[ 1], S31, 0xa4beea44); /* 37 */
-    HH(d, a, b, c, x[ 4], S32, 0x4bdecfa9); /* 38 */
-    HH(c, d, a, b, x[ 7], S33, 0xf6bb4b60); /* 39 */
-    HH(b, c, d, a, x[10], S34, 0xbebfbc70); /* 40 */
-    HH(a, b, c, d, x[13], S31, 0x289b7ec6); /* 41 */
-    HH(d, a, b, c, x[ 0], S32, 0xeaa127fa); /* 42 */
-    HH(c, d, a, b, x[ 3], S33, 0xd4ef3085); /* 43 */
-    HH(b, c, d, a, x[ 6], S34,  0x4881d05); /* 44 */
-    HH(a, b, c, d, x[ 9], S31, 0xd9d4d039); /* 45 */
-    HH(d, a, b, c, x[12], S32, 0xe6db99e5); /* 46 */
-    HH(c, d, a, b, x[15], S33, 0x1fa27cf8); /* 47 */
-    HH(b, c, d, a, x[ 2], S34, 0xc4ac5665); /* 48 */
-
-    /* Round 4 */
-    II(a, b, c, d, x[ 0], S41, 0xf4292244); /* 49 */
-    II(d, a, b, c, x[ 7], S42, 0x432aff97); /* 50 */
-    II(c, d, a, b, x[14], S43, 0xab9423a7); /* 51 */
-    II(b, c, d, a, x[ 5], S44, 0xfc93a039); /* 52 */
-    II(a, b, c, d, x[12], S41, 0x655b59c3); /* 53 */
-    II(d, a, b, c, x[ 3], S42, 0x8f0ccc92); /* 54 */
-    II(c, d, a, b, x[10], S43, 0xffeff47d); /* 55 */
-    II(b, c, d, a, x[ 1], S44, 0x85845dd1); /* 56 */
-    II(a, b, c, d, x[ 8], S41, 0x6fa87e4f); /* 57 */
-    II(d, a, b, c, x[15], S42, 0xfe2ce6e0); /* 58 */
-    II(c, d, a, b, x[ 6], S43, 0xa3014314); /* 59 */
-    II(b, c, d, a, x[13], S44, 0x4e0811a1); /* 60 */
-    II(a, b, c, d, x[ 4], S41, 0xf7537e82); /* 61 */
-    II(d, a, b, c, x[11], S42, 0xbd3af235); /* 62 */
-    II(c, d, a, b, x[ 2], S43, 0x2ad7d2bb); /* 63 */
-    II(b, c, d, a, x[ 9], S44, 0xeb86d391); /* 64 */
-
-    state[0] += a;
-    state[1] += b;
-    state[2] += c;
-    state[3] += d;
-
-    /* Zeroize sensitive information. */
-    memset((POINTER)x, 0, sizeof (x));
-}
diff --git a/Modules/md5module.c b/Modules/md5module.c
index e12bef8..5e4f116 100644
--- a/Modules/md5module.c
+++ b/Modules/md5module.c
@@ -15,7 +15,7 @@
 
 typedef struct {
 	PyObject_HEAD
-        MD5_CTX	md5;		/* the context holder */
+        md5_state_t	md5;		/* the context holder */
 } md5object;
 
 static PyTypeObject MD5type;
@@ -31,7 +31,7 @@
 	if (md5p == NULL)
 		return NULL;
 
-	MD5Init(&md5p->md5);	/* actual initialisation */
+	md5_init(&md5p->md5);	/* actual initialisation */
 	return md5p;
 }
 
@@ -56,7 +56,7 @@
 	if (!PyArg_ParseTuple(args, "s#:update", &cp, &len))
 		return NULL;
 
-	MD5Update(&self->md5, cp, len);
+	md5_append(&self->md5, cp, len);
 
 	Py_INCREF(Py_None);
 	return Py_None;
@@ -73,12 +73,12 @@
 static PyObject *
 md5_digest(md5object *self)
 {
- 	MD5_CTX mdContext;
+ 	md5_state_t mdContext;
 	unsigned char aDigest[16];
 
 	/* make a temporary copy, and perform the final */
 	mdContext = self->md5;
-	MD5Final(aDigest, &mdContext);
+	md5_finish(&mdContext, aDigest);
 
 	return PyString_FromStringAndSize((char *)aDigest, 16);
 }
@@ -94,14 +94,14 @@
 static PyObject *
 md5_hexdigest(md5object *self)
 {
- 	MD5_CTX mdContext;
+ 	md5_state_t mdContext;
 	unsigned char digest[16];
 	unsigned char hexdigest[32];
 	int i, j;
 
 	/* make a temporary copy, and perform the final */
 	mdContext = self->md5;
-	MD5Final(digest, &mdContext);
+	md5_finish(&mdContext, digest);
 
 	/* Make hex version of the digest */
 	for(i=j=0; i<16; i++) {
@@ -272,7 +272,7 @@
 		return NULL;
 
 	if (cp)
-		MD5Update(&md5p->md5, cp, len);
+		md5_append(&md5p->md5, cp, len);
 
 	return (PyObject *)md5p;
 }
diff --git a/Modules/operator.c b/Modules/operator.c
index 24f4e0a..cbce16e 100644
--- a/Modules/operator.c
+++ b/Modules/operator.c
@@ -354,8 +354,7 @@
 static int
 itemgetter_traverse(itemgetterobject *ig, visitproc visit, void *arg)
 {
-	if (ig->item)
-		return visit(ig->item, arg);
+	Py_VISIT(ig->item);
 	return 0;
 }
 
@@ -493,8 +492,7 @@
 static int
 attrgetter_traverse(attrgetterobject *ag, visitproc visit, void *arg)
 {
-	if (ag->attr)
-		return visit(ag->attr, arg);
+	Py_VISIT(ag->attr);
 	return 0;
 }
 
diff --git a/Modules/ossaudiodev.c b/Modules/ossaudiodev.c
index ce8a0d0..563620c 100644
--- a/Modules/ossaudiodev.c
+++ b/Modules/ossaudiodev.c
@@ -935,24 +935,32 @@
 
     labels = PyList_New(num_controls);
     names = PyList_New(num_controls);
+    if (labels == NULL || names == NULL)
+        goto error2;
     for (i = 0; i < num_controls; i++) {
         s = PyString_FromString(control_labels[i]);
         if (s == NULL)
-            return -1;
+            goto error2;
         PyList_SET_ITEM(labels, i, s);
 
         s = PyString_FromString(control_names[i]);
         if (s == NULL)
-            return -1;
+            goto error2;
         PyList_SET_ITEM(names, i, s);
     }
 
     if (PyModule_AddObject(module, "control_labels", labels) == -1)
-        return -1;
+        goto error2;
     if (PyModule_AddObject(module, "control_names", names) == -1)
-        return -1;
+        goto error1;
 
     return 0;
+
+error2:
+    Py_XDECREF(labels);
+error1:
+    Py_XDECREF(names);
+    return -1;
 }
 
 
diff --git a/Modules/parsermodule.c b/Modules/parsermodule.c
index 83165ba..c9edae6 100644
--- a/Modules/parsermodule.c
+++ b/Modules/parsermodule.c
@@ -657,9 +657,10 @@
             }
         }
         if (!ok) {
-            PyErr_SetObject(parser_error,
-                            Py_BuildValue("os", elem,
-                                          "Illegal node construct."));
+            PyObject *err = Py_BuildValue("os", elem,
+                                          "Illegal node construct.");
+            PyErr_SetObject(parser_error, err);
+            Py_XDECREF(err);
             Py_XDECREF(elem);
             return (0);
         }
@@ -700,7 +701,7 @@
                 }
             }
             len = PyString_GET_SIZE(temp) + 1;
-            strn = (char *)PyMem_MALLOC(len);
+            strn = (char *)PyObject_MALLOC(len);
             if (strn != NULL)
                 (void) memcpy(strn, PyString_AS_STRING(temp), len);
             Py_DECREF(temp);
@@ -710,18 +711,19 @@
              *  It has to be one or the other; this is an error.
              *  Throw an exception.
              */
-            PyErr_SetObject(parser_error,
-                            Py_BuildValue("os", elem, "unknown node type."));
+            PyObject *err = Py_BuildValue("os", elem, "unknown node type.");
+            PyErr_SetObject(parser_error, err);
+            Py_XDECREF(err);
             Py_XDECREF(elem);
             return (0);
         }
         err = PyNode_AddChild(root, type, strn, *line_num, 0);
         if (err == E_NOMEM) {
-            PyMem_DEL(strn);
+            PyObject_FREE(strn);
             return (node *) PyErr_NoMemory();
         }
         if (err == E_OVERFLOW) {
-            PyMem_DEL(strn);
+            PyObject_FREE(strn);
             PyErr_SetString(PyExc_ValueError,
                             "unsupported number of child nodes");
             return NULL;
@@ -740,7 +742,7 @@
         }
         Py_XDECREF(elem);
     }
-    return (root);
+    return root;
 }
 
 
@@ -762,6 +764,7 @@
         tuple = Py_BuildValue("os", tuple,
                     "Illegal syntax-tree; cannot start with terminal symbol.");
         PyErr_SetObject(parser_error, tuple);
+        Py_XDECREF(tuple);
     }
     else if (ISNONTERMINAL(num)) {
         /*
@@ -784,7 +787,7 @@
             if (res && encoding) {
                 Py_ssize_t len;
                 len = PyString_GET_SIZE(encoding) + 1;
-                res->n_str = (char *)PyMem_MALLOC(len);
+                res->n_str = (char *)PyObject_MALLOC(len);
                 if (res->n_str != NULL)
                     (void) memcpy(res->n_str, PyString_AS_STRING(encoding), len);
                 Py_DECREF(encoding);
@@ -792,14 +795,16 @@
             }
         }
     }
-    else
+    else {
         /*  The tuple is illegal -- if the number is neither TERMINAL nor
          *  NONTERMINAL, we can't use it.  Not sure the implementation
          *  allows this condition, but the API doesn't preclude it.
          */
-        PyErr_SetObject(parser_error,
-                        Py_BuildValue("os", tuple,
-                                      "Illegal component tuple."));
+        PyObject *err = Py_BuildValue("os", tuple,
+                                      "Illegal component tuple.");
+        PyErr_SetObject(parser_error, err);
+        Py_XDECREF(err);
+    }
 
     return (res);
 }
@@ -1321,7 +1326,7 @@
     return res;
 }
 
-/*  list_if:  'if' test [list_iter]
+/*  list_if:  'if' old_test [list_iter]
  */
 static int
 validate_list_if(node *tree)
@@ -1336,12 +1341,12 @@
 
     if (res)
         res = (validate_name(CHILD(tree, 0), "if")
-               && validate_test(CHILD(tree, 1)));
+               && validate_old_test(CHILD(tree, 1)));
 
     return res;
 }
 
-/*  gen_if:  'if' test [gen_iter]
+/*  gen_if:  'if' old_test [gen_iter]
  */
 static int
 validate_gen_if(node *tree)
@@ -1356,7 +1361,7 @@
     
     if (res)
         res = (validate_name(CHILD(tree, 0), "if")
-               && validate_test(CHILD(tree, 1)));
+               && validate_old_test(CHILD(tree, 1)));
 
     return res;
 }
diff --git a/Modules/posixmodule.c b/Modules/posixmodule.c
index 1fbc353..b51ba5d 100644
--- a/Modules/posixmodule.c
+++ b/Modules/posixmodule.c
@@ -22,6 +22,10 @@
 #    include <unixio.h>
 #endif /* defined(__VMS) */
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 PyDoc_STRVAR(posix__doc__,
 "This module provides access to operating system functionality that is\n\
 standardized by the C Standard and the POSIX standard (a thinly\n\
@@ -264,6 +268,12 @@
 #define WTERMSIG(u_wait) ((u_wait).w_termsig)
 #endif
 
+#define WAIT_TYPE union wait
+#define WAIT_STATUS_INT(s) (s.w_status)
+
+#else /* !UNION_WAIT */
+#define WAIT_TYPE int
+#define WAIT_STATUS_INT(s) (s)
 #endif /* UNION_WAIT */
 
 /* Don't use the "_r" form if we don't need it (also, won't have a
@@ -971,6 +981,7 @@
 	10
 };
 
+static int initialized;
 static PyTypeObject StatResultType;
 static PyTypeObject StatVFSResultType;
 static newfunc structseq_new;
@@ -1839,6 +1850,7 @@
 	struct dirent *ep;
 	int arg_is_unicode = 1;
 
+	errno = 0;
 	if (!PyArg_ParseTuple(args, "U:listdir", &v)) {
 		arg_is_unicode = 0;
 		PyErr_Clear();
@@ -1895,6 +1907,12 @@
 		}
 		Py_DECREF(v);
 	}
+	if (errno != 0 && d != NULL) {
+		/* readdir() returned NULL and set errno */
+		closedir(dirp);
+		Py_DECREF(d);
+		return posix_error_with_allocated_filename(name); 
+	}
 	closedir(dirp);
 	PyMem_Free(name);
 
@@ -1995,13 +2013,13 @@
 }
 
 
-#ifdef HAVE_NICE
-#if defined(HAVE_BROKEN_NICE) && defined(HAVE_SYS_RESOURCE_H)
-#if defined(HAVE_GETPRIORITY) && !defined(PRIO_PROCESS)
+/* sys/resource.h is needed for at least: wait3(), wait4(), broken nice. */
+#if defined(HAVE_SYS_RESOURCE_H)
 #include <sys/resource.h>
 #endif
-#endif
 
+
+#ifdef HAVE_NICE
 PyDoc_STRVAR(posix_nice__doc__,
 "nice(inc) -> new_priority\n\n\
 Decrease the priority of process by inc and return the new priority.");
@@ -3088,7 +3106,7 @@
 #if defined(HAVE_DEV_PTMX) && !defined(HAVE_OPENPTY) && !defined(HAVE__GETPTY)
 	PyOS_sighandler_t sig_saved;
 #ifdef sun
-	extern char *ptsname();
+	extern char *ptsname(int fildes);
 #endif
 #endif
 
@@ -5091,6 +5109,114 @@
 }
 #endif /* HAVE_SETGROUPS */
 
+#if defined(HAVE_WAIT3) || defined(HAVE_WAIT4)
+static PyObject *
+wait_helper(int pid, int status, struct rusage *ru)
+{
+	PyObject *result;
+   	static PyObject *struct_rusage;
+
+	if (pid == -1)
+		return posix_error();
+
+	if (struct_rusage == NULL) {
+		PyObject *m = PyImport_ImportModule("resource");
+		if (m == NULL)
+			return NULL;
+		struct_rusage = PyObject_GetAttrString(m, "struct_rusage");
+		Py_DECREF(m);
+		if (struct_rusage == NULL)
+			return NULL;
+	}
+
+	/* XXX(nnorwitz): Copied (w/mods) from resource.c, there should be only one. */
+	result = PyStructSequence_New((PyTypeObject*) struct_rusage);
+	if (!result)
+		return NULL;
+
+#ifndef doubletime
+#define doubletime(TV) ((double)(TV).tv_sec + (TV).tv_usec * 0.000001)
+#endif
+
+	PyStructSequence_SET_ITEM(result, 0,
+			PyFloat_FromDouble(doubletime(ru->ru_utime)));
+	PyStructSequence_SET_ITEM(result, 1,
+			PyFloat_FromDouble(doubletime(ru->ru_stime)));
+#define SET_INT(result, index, value)\
+		PyStructSequence_SET_ITEM(result, index, PyInt_FromLong(value))
+	SET_INT(result, 2, ru->ru_maxrss);
+	SET_INT(result, 3, ru->ru_ixrss);
+	SET_INT(result, 4, ru->ru_idrss);
+	SET_INT(result, 5, ru->ru_isrss);
+	SET_INT(result, 6, ru->ru_minflt);
+	SET_INT(result, 7, ru->ru_majflt);
+	SET_INT(result, 8, ru->ru_nswap);
+	SET_INT(result, 9, ru->ru_inblock);
+	SET_INT(result, 10, ru->ru_oublock);
+	SET_INT(result, 11, ru->ru_msgsnd);
+	SET_INT(result, 12, ru->ru_msgrcv);
+	SET_INT(result, 13, ru->ru_nsignals);
+	SET_INT(result, 14, ru->ru_nvcsw);
+	SET_INT(result, 15, ru->ru_nivcsw);
+#undef SET_INT
+
+	if (PyErr_Occurred()) {
+		Py_DECREF(result);
+		return NULL;
+	}
+
+	return Py_BuildValue("iiN", pid, status, result);
+}
+#endif /* HAVE_WAIT3 || HAVE_WAIT4 */
+
+#ifdef HAVE_WAIT3
+PyDoc_STRVAR(posix_wait3__doc__,
+"wait3(options) -> (pid, status, rusage)\n\n\
+Wait for completion of a child process.");
+
+static PyObject *
+posix_wait3(PyObject *self, PyObject *args)
+{
+	int pid, options;
+	struct rusage ru;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
+
+	if (!PyArg_ParseTuple(args, "i:wait3", &options))
+		return NULL;
+
+	Py_BEGIN_ALLOW_THREADS
+	pid = wait3(&status, options, &ru);
+	Py_END_ALLOW_THREADS
+
+	return wait_helper(pid, WAIT_STATUS_INT(status), &ru);
+}
+#endif /* HAVE_WAIT3 */
+
+#ifdef HAVE_WAIT4
+PyDoc_STRVAR(posix_wait4__doc__,
+"wait4(pid, options) -> (pid, status, rusage)\n\n\
+Wait for completion of a given child process.");
+
+static PyObject *
+posix_wait4(PyObject *self, PyObject *args)
+{
+	int pid, options;
+	struct rusage ru;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
+
+	if (!PyArg_ParseTuple(args, "ii:wait4", &pid, &options))
+		return NULL;
+
+	Py_BEGIN_ALLOW_THREADS
+	pid = wait4(pid, &status, options, &ru);
+	Py_END_ALLOW_THREADS
+
+	return wait_helper(pid, WAIT_STATUS_INT(status), &ru);
+}
+#endif /* HAVE_WAIT4 */
+
 #ifdef HAVE_WAITPID
 PyDoc_STRVAR(posix_waitpid__doc__,
 "waitpid(pid, options) -> (pid, status)\n\n\
@@ -5100,14 +5226,8 @@
 posix_waitpid(PyObject *self, PyObject *args)
 {
 	int pid, options;
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
-	status_i = 0;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
 	if (!PyArg_ParseTuple(args, "ii:waitpid", &pid, &options))
 		return NULL;
@@ -5116,8 +5236,8 @@
 	Py_END_ALLOW_THREADS
 	if (pid == -1)
 		return posix_error();
-	else
-		return Py_BuildValue("ii", pid, status_i);
+
+	return Py_BuildValue("ii", pid, WAIT_STATUS_INT(status));
 }
 
 #elif defined(HAVE_CWAIT)
@@ -5140,10 +5260,9 @@
 	Py_END_ALLOW_THREADS
 	if (pid == -1)
 		return posix_error();
-	else
-		/* shift the status left a byte so this is more like the
-		   POSIX waitpid */
-		return Py_BuildValue("ii", pid, status << 8);
+
+	/* shift the status left a byte so this is more like the POSIX waitpid */
+	return Py_BuildValue("ii", pid, status << 8);
 }
 #endif /* HAVE_WAITPID || HAVE_CWAIT */
 
@@ -5156,23 +5275,16 @@
 posix_wait(PyObject *self, PyObject *noargs)
 {
 	int pid;
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
-	status_i = 0;
 	Py_BEGIN_ALLOW_THREADS
 	pid = wait(&status);
 	Py_END_ALLOW_THREADS
 	if (pid == -1)
 		return posix_error();
-	else
-		return Py_BuildValue("ii", pid, status_i);
-#undef status_i
+
+	return Py_BuildValue("ii", pid, WAIT_STATUS_INT(status));
 }
 #endif
 
@@ -5668,9 +5780,24 @@
 			     "invalid file mode '%s'", mode);
 		return NULL;
 	}
-
 	Py_BEGIN_ALLOW_THREADS
+#if !defined(MS_WINDOWS) && defined(HAVE_FCNTL_H)
+	if (mode[0] == 'a') {
+		/* try to make sure the O_APPEND flag is set */
+		int flags;
+		flags = fcntl(fd, F_GETFL);
+		if (flags != -1)
+			fcntl(fd, F_SETFL, flags | O_APPEND);
+		fp = fdopen(fd, mode);
+		if (fp == NULL && flags != -1)
+			/* restore old mode if fdopen failed */
+			fcntl(fd, F_SETFL, flags);
+	} else {
+		fp = fdopen(fd, mode);
+	}
+#else
 	fp = fdopen(fd, mode);
+#endif
 	Py_END_ALLOW_THREADS
 	if (fp == NULL)
 		return posix_error();
@@ -5887,7 +6014,7 @@
 posix_putenv(PyObject *self, PyObject *args)
 {
         char *s1, *s2;
-        char *new;
+        char *newenv;
 	PyObject *newstr;
 	size_t len;
 
@@ -5918,9 +6045,9 @@
 	newstr = PyString_FromStringAndSize(NULL, (int)len - 1);
 	if (newstr == NULL)
 		return PyErr_NoMemory();
-	new = PyString_AS_STRING(newstr);
-	PyOS_snprintf(new, len, "%s=%s", s1, s2);
-	if (putenv(new)) {
+	newenv = PyString_AS_STRING(newstr);
+	PyOS_snprintf(newenv, len, "%s=%s", s1, s2);
+	if (putenv(newenv)) {
                 Py_DECREF(newstr);
                 posix_error();
                 return NULL;
@@ -6010,22 +6137,13 @@
 static PyObject *
 posix_WCOREDUMP(PyObject *self, PyObject *args)
 {
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
-	status_i = 0;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
-	if (!PyArg_ParseTuple(args, "i:WCOREDUMP", &status_i))
-	{
+	if (!PyArg_ParseTuple(args, "i:WCOREDUMP", &WAIT_STATUS_INT(status)))
 		return NULL;
-	}
 
 	return PyBool_FromLong(WCOREDUMP(status));
-#undef status_i
 }
 #endif /* WCOREDUMP */
 
@@ -6038,22 +6156,13 @@
 static PyObject *
 posix_WIFCONTINUED(PyObject *self, PyObject *args)
 {
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
-	status_i = 0;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
-	if (!PyArg_ParseTuple(args, "i:WCONTINUED", &status_i))
-	{
+	if (!PyArg_ParseTuple(args, "i:WCONTINUED", &WAIT_STATUS_INT(status)))
 		return NULL;
-	}
 
 	return PyBool_FromLong(WIFCONTINUED(status));
-#undef status_i
 }
 #endif /* WIFCONTINUED */
 
@@ -6065,22 +6174,13 @@
 static PyObject *
 posix_WIFSTOPPED(PyObject *self, PyObject *args)
 {
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
-	status_i = 0;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
-	if (!PyArg_ParseTuple(args, "i:WIFSTOPPED", &status_i))
-	{
+	if (!PyArg_ParseTuple(args, "i:WIFSTOPPED", &WAIT_STATUS_INT(status)))
 		return NULL;
-	}
 
 	return PyBool_FromLong(WIFSTOPPED(status));
-#undef status_i
 }
 #endif /* WIFSTOPPED */
 
@@ -6092,22 +6192,13 @@
 static PyObject *
 posix_WIFSIGNALED(PyObject *self, PyObject *args)
 {
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
-	status_i = 0;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
-	if (!PyArg_ParseTuple(args, "i:WIFSIGNALED", &status_i))
-	{
+	if (!PyArg_ParseTuple(args, "i:WIFSIGNALED", &WAIT_STATUS_INT(status)))
 		return NULL;
-	}
 
 	return PyBool_FromLong(WIFSIGNALED(status));
-#undef status_i
 }
 #endif /* WIFSIGNALED */
 
@@ -6120,22 +6211,13 @@
 static PyObject *
 posix_WIFEXITED(PyObject *self, PyObject *args)
 {
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
-	status_i = 0;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
-	if (!PyArg_ParseTuple(args, "i:WIFEXITED", &status_i))
-	{
+	if (!PyArg_ParseTuple(args, "i:WIFEXITED", &WAIT_STATUS_INT(status)))
 		return NULL;
-	}
 
 	return PyBool_FromLong(WIFEXITED(status));
-#undef status_i
 }
 #endif /* WIFEXITED */
 
@@ -6147,22 +6229,13 @@
 static PyObject *
 posix_WEXITSTATUS(PyObject *self, PyObject *args)
 {
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
-	status_i = 0;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
-	if (!PyArg_ParseTuple(args, "i:WEXITSTATUS", &status_i))
-	{
+	if (!PyArg_ParseTuple(args, "i:WEXITSTATUS", &WAIT_STATUS_INT(status)))
 		return NULL;
-	}
 
 	return Py_BuildValue("i", WEXITSTATUS(status));
-#undef status_i
 }
 #endif /* WEXITSTATUS */
 
@@ -6175,22 +6248,13 @@
 static PyObject *
 posix_WTERMSIG(PyObject *self, PyObject *args)
 {
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
-	status_i = 0;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
-	if (!PyArg_ParseTuple(args, "i:WTERMSIG", &status_i))
-	{
+	if (!PyArg_ParseTuple(args, "i:WTERMSIG", &WAIT_STATUS_INT(status)))
 		return NULL;
-	}
 
 	return Py_BuildValue("i", WTERMSIG(status));
-#undef status_i
 }
 #endif /* WTERMSIG */
 
@@ -6203,22 +6267,13 @@
 static PyObject *
 posix_WSTOPSIG(PyObject *self, PyObject *args)
 {
-#ifdef UNION_WAIT
-	union wait status;
-#define status_i (status.w_status)
-#else
-	int status;
-#define status_i status
-#endif
-	status_i = 0;
+	WAIT_TYPE status;
+	WAIT_STATUS_INT(status) = 0;
 
-	if (!PyArg_ParseTuple(args, "i:WSTOPSIG", &status_i))
-	{
+	if (!PyArg_ParseTuple(args, "i:WSTOPSIG", &WAIT_STATUS_INT(status)))
 		return NULL;
-	}
 
 	return Py_BuildValue("i", WSTOPSIG(status));
-#undef status_i
 }
 #endif /* WSTOPSIG */
 
@@ -6396,15 +6451,16 @@
     name = tmpnam(buffer);
 #endif
     if (name == NULL) {
-        PyErr_SetObject(PyExc_OSError,
-                        Py_BuildValue("is", 0,
+	PyObject *err = Py_BuildValue("is", 0,
 #ifdef USE_TMPNAM_R
                                       "unexpected NULL from tmpnam_r"
 #else
                                       "unexpected NULL from tmpnam"
 #endif
-                                      ));
-        return NULL;
+                                      );
+	PyErr_SetObject(PyExc_OSError, err);
+	Py_XDECREF(err);
+	return NULL;
     }
     return PyString_FromString(buffer);
 }
@@ -6753,26 +6809,30 @@
 {
     PyObject *result = NULL;
     int name;
-    char buffer[64];
+    char buffer[256];
 
     if (PyArg_ParseTuple(args, "O&:confstr", conv_confstr_confname, &name)) {
-        int len = confstr(name, buffer, sizeof(buffer));
+	int len;
 
         errno = 0;
-        if (len == 0) {
-            if (errno != 0)
-                posix_error();
-            else
-                result = PyString_FromString("");
+	len = confstr(name, buffer, sizeof(buffer));
+	if (len == 0) {
+	    if (errno) {
+		posix_error();
+	    }
+	    else {
+		result = Py_None;
+		Py_INCREF(Py_None);
+	    }
         }
         else {
-            if (len >= sizeof(buffer)) {
-                result = PyString_FromStringAndSize(NULL, len);
+	    if ((unsigned int)len >= sizeof(buffer)) {
+                result = PyString_FromStringAndSize(NULL, len-1);
                 if (result != NULL)
-                    confstr(name, PyString_AS_STRING(result), len+1);
+                    confstr(name, PyString_AS_STRING(result), len);
             }
             else
-                result = PyString_FromString(buffer);
+                result = PyString_FromStringAndSize(buffer, len-1);
         }
     }
     return result;
@@ -7423,6 +7483,44 @@
 	char *filepath;
 	char *operation = NULL;
 	HINSTANCE rc;
+#ifdef Py_WIN_WIDE_FILENAMES
+	if (unicode_file_names()) {
+		PyObject *unipath, *woperation = NULL;
+		if (!PyArg_ParseTuple(args, "U|s:startfile",
+				      &unipath, &operation)) {
+			PyErr_Clear();
+			goto normal;
+		}
+		
+
+		if (operation) {
+		    woperation = PyUnicode_DecodeASCII(operation, 
+						       strlen(operation), NULL);
+		    if (!woperation) {
+			    PyErr_Clear();
+			    operation = NULL;
+			    goto normal;
+		    }
+		}
+			
+		Py_BEGIN_ALLOW_THREADS
+		rc = ShellExecuteW((HWND)0, woperation ? PyUnicode_AS_UNICODE(woperation) : 0,
+			PyUnicode_AS_UNICODE(unipath),
+			NULL, NULL, SW_SHOWNORMAL);
+		Py_END_ALLOW_THREADS
+
+		Py_XDECREF(woperation);
+		if (rc <= (HINSTANCE)32) {
+			PyObject *errval = win32_error_unicode("startfile",
+						PyUnicode_AS_UNICODE(unipath));
+			return errval;
+		}
+		Py_INCREF(Py_None);
+		return Py_None;
+	}
+#endif
+
+normal:
 	if (!PyArg_ParseTuple(args, "et|s:startfile", 
 			      Py_FileSystemDefaultEncoding, &filepath, 
 			      &operation))
@@ -7695,6 +7793,12 @@
 #ifdef HAVE_WAIT
 	{"wait",	posix_wait, METH_NOARGS, posix_wait__doc__},
 #endif /* HAVE_WAIT */
+#ifdef HAVE_WAIT3
+        {"wait3",	posix_wait3, METH_VARARGS, posix_wait3__doc__},
+#endif /* HAVE_WAIT3 */
+#ifdef HAVE_WAIT4
+        {"wait4",	posix_wait4, METH_VARARGS, posix_wait4__doc__},
+#endif /* HAVE_WAIT4 */
 #if defined(HAVE_WAITPID) || defined(HAVE_CWAIT)
 	{"waitpid",	posix_waitpid, METH_VARARGS, posix_waitpid__doc__},
 #endif /* HAVE_WAITPID */
@@ -8142,19 +8246,27 @@
 		posix_putenv_garbage = PyDict_New();
 #endif
 
-	stat_result_desc.name = MODNAME ".stat_result";
-	stat_result_desc.fields[7].name = PyStructSequence_UnnamedField;
-	stat_result_desc.fields[8].name = PyStructSequence_UnnamedField;
-	stat_result_desc.fields[9].name = PyStructSequence_UnnamedField;
-	PyStructSequence_InitType(&StatResultType, &stat_result_desc);
-	structseq_new = StatResultType.tp_new;
-	StatResultType.tp_new = statresult_new;
+	if (!initialized) {
+		stat_result_desc.name = MODNAME ".stat_result";
+		stat_result_desc.fields[7].name = PyStructSequence_UnnamedField;
+		stat_result_desc.fields[8].name = PyStructSequence_UnnamedField;
+		stat_result_desc.fields[9].name = PyStructSequence_UnnamedField;
+		PyStructSequence_InitType(&StatResultType, &stat_result_desc);
+		structseq_new = StatResultType.tp_new;
+		StatResultType.tp_new = statresult_new;
+
+		statvfs_result_desc.name = MODNAME ".statvfs_result";
+		PyStructSequence_InitType(&StatVFSResultType, &statvfs_result_desc);
+	}
 	Py_INCREF((PyObject*) &StatResultType);
 	PyModule_AddObject(m, "stat_result", (PyObject*) &StatResultType);
-
-	statvfs_result_desc.name = MODNAME ".statvfs_result";
-	PyStructSequence_InitType(&StatVFSResultType, &statvfs_result_desc);
 	Py_INCREF((PyObject*) &StatVFSResultType);
 	PyModule_AddObject(m, "statvfs_result",
 			   (PyObject*) &StatVFSResultType);
+	initialized = 1;
 }
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Modules/pwdmodule.c b/Modules/pwdmodule.c
index 9e7b864..9e01f48 100644
--- a/Modules/pwdmodule.c
+++ b/Modules/pwdmodule.c
@@ -42,6 +42,7 @@
 exception is raised if the entry asked for cannot be found.");
 
       
+static int initialized;
 static PyTypeObject StructPwdType;
 
 static void
@@ -186,9 +187,12 @@
 	if (m == NULL)
     		return;
 
-	PyStructSequence_InitType(&StructPwdType, &struct_pwd_type_desc);
+	if (!initialized)
+		PyStructSequence_InitType(&StructPwdType, 
+					  &struct_pwd_type_desc);
 	Py_INCREF((PyObject *) &StructPwdType);
 	PyModule_AddObject(m, "struct_passwd", (PyObject *) &StructPwdType);
 	/* And for b/w compatibility (this was defined by mistake): */
 	PyModule_AddObject(m, "struct_pwent", (PyObject *) &StructPwdType);
+	initialized = 1;
 }
diff --git a/Modules/pyexpat.c b/Modules/pyexpat.c
index e4bf180..fe50e36 100644
--- a/Modules/pyexpat.c
+++ b/Modules/pyexpat.c
@@ -1519,6 +1519,8 @@
     if (strcmp(name, "__members__") == 0) {
         int i;
         PyObject *rc = PyList_New(0);
+	if (!rc)
+		return NULL;
         for (i = 0; handler_info[i].name != NULL; i++) {
             PyObject *o = get_handler_name(&handler_info[i]);
             if (o != NULL)
@@ -1652,14 +1654,9 @@
 static int
 xmlparse_traverse(xmlparseobject *op, visitproc visit, void *arg)
 {
-    int i, err;
-    for (i = 0; handler_info[i].name != NULL; i++) {
-        if (!op->handlers[i])
-            continue;
-        err = visit(op->handlers[i], arg);
-        if (err)
-            return err;
-    }
+    int i;
+    for (i = 0; handler_info[i].name != NULL; i++)
+        Py_VISIT(op->handlers[i]);
     return 0;
 }
 
@@ -1667,8 +1664,7 @@
 xmlparse_clear(xmlparseobject *op)
 {
     clear_handlers(op, 0);
-    Py_XDECREF(op->intern);
-    op->intern = 0;
+    Py_CLEAR(op->intern);
     return 0;
 }
 #endif
diff --git a/Modules/regexmodule.c b/Modules/regexmodule.c
deleted file mode 100644
index 2fb4198..0000000
--- a/Modules/regexmodule.c
+++ /dev/null
@@ -1,690 +0,0 @@
-/*
-XXX support range parameter on search
-XXX support mstop parameter on search
-*/
-
-
-/* Regular expression objects */
-/* This uses Tatu Ylonen's copyleft-free reimplementation of
-   GNU regular expressions */
-
-#include "Python.h"
-
-#include <ctype.h>
-
-#include "regexpr.h"
-
-static PyObject *RegexError;	/* Exception */	
-
-typedef struct {
-	PyObject_HEAD
-	struct re_pattern_buffer re_patbuf; /* The compiled expression */
-	struct re_registers re_regs; /* The registers from the last match */
-	char re_fastmap[256];	/* Storage for fastmap */
-	PyObject *re_translate;	/* String object for translate table */
-	PyObject *re_lastok;	/* String object last matched/searched */
-	PyObject *re_groupindex;	/* Group name to index dictionary */
-	PyObject *re_givenpat;	/* Pattern with symbolic groups */
-	PyObject *re_realpat;	/* Pattern without symbolic groups */
-} regexobject;
-
-/* Regex object methods */
-
-static void
-reg_dealloc(regexobject *re)
-{
-	if (re->re_patbuf.buffer)
-		free(re->re_patbuf.buffer);
-	Py_XDECREF(re->re_translate);
-	Py_XDECREF(re->re_lastok);
-	Py_XDECREF(re->re_groupindex);
-	Py_XDECREF(re->re_givenpat);
-	Py_XDECREF(re->re_realpat);
-	PyObject_Del(re);
-}
-
-static PyObject *
-makeresult(struct re_registers *regs)
-{
-	PyObject *v;
-	int i;
-	static PyObject *filler = NULL;
-
-	if (filler == NULL) {
-		filler = Py_BuildValue("(ii)", -1, -1);
-		if (filler == NULL)
-			return NULL;
-	}
-	v = PyTuple_New(RE_NREGS);
-	if (v == NULL)
-		return NULL;
-
-	for (i = 0; i < RE_NREGS; i++) {
-		int lo = regs->start[i];
-		int hi = regs->end[i];
-		PyObject *w;
-		if (lo == -1 && hi == -1) {
-			w = filler;
-			Py_INCREF(w);
-		}
-		else
-			w = Py_BuildValue("(ii)", lo, hi);
-		if (w == NULL || PyTuple_SetItem(v, i, w) < 0) {
-			Py_DECREF(v);
-			return NULL;
-		}
-	}
-	return v;
-}
-
-static PyObject *
-regobj_match(regexobject *re, PyObject *args)
-{
-	PyObject *argstring;
-	char *buffer;
-	int size;
-	int offset = 0;
-	int result;
-
-	if (!PyArg_ParseTuple(args, "O|i:match", &argstring, &offset))
-		return NULL;
-	if (!PyArg_Parse(argstring, "t#", &buffer, &size))
-		return NULL;
-
-	if (offset < 0 || offset > size) {
-		PyErr_SetString(RegexError, "match offset out of range");
-		return NULL;
-	}
-	Py_XDECREF(re->re_lastok);
-	re->re_lastok = NULL;
-	result = _Py_re_match(&re->re_patbuf, (unsigned char *)buffer, size, offset,
-			      &re->re_regs);
-	if (result < -1) {
-		/* Serious failure of some sort; if re_match didn't 
-		   set an exception, raise a generic error */
-	        if (!PyErr_Occurred())
-		        PyErr_SetString(RegexError, "match failure");
-		return NULL;
-	}
-	if (result >= 0) {
-		Py_INCREF(argstring);
-		re->re_lastok = argstring;
-	}
-	return PyInt_FromLong((long)result); /* Length of the match or -1 */
-}
-
-static PyObject *
-regobj_search(regexobject *re, PyObject *args)
-{
-	PyObject *argstring;
-	char *buffer;
-	int size;
-	int offset = 0;
-	int range;
-	int result;
-	
-	if (!PyArg_ParseTuple(args, "O|i:search", &argstring, &offset))
-		return NULL;
-	if (!PyArg_Parse(argstring, "t#:search", &buffer, &size))
-		return NULL;
-
-	if (offset < 0 || offset > size) {
-		PyErr_SetString(RegexError, "search offset out of range");
-		return NULL;
-	}
-	/* NB: In Emacs 18.57, the documentation for re_search[_2] and
-	   the implementation don't match: the documentation states that
-	   |range| positions are tried, while the code tries |range|+1
-	   positions.  It seems more productive to believe the code! */
-	range = size - offset;
-	Py_XDECREF(re->re_lastok);
-	re->re_lastok = NULL;
-	result = _Py_re_search(&re->re_patbuf, (unsigned char *)buffer, size, offset, range,
-			   &re->re_regs);
-	if (result < -1) {
-		/* Serious failure of some sort; if re_match didn't 
-		   set an exception, raise a generic error */
-	        if (!PyErr_Occurred())
-	  	        PyErr_SetString(RegexError, "match failure");
-		return NULL;
-	}
-	if (result >= 0) {
-		Py_INCREF(argstring);
-		re->re_lastok = argstring;
-	}
-	return PyInt_FromLong((long)result); /* Position of the match or -1 */
-}
-
-/* get the group from the regex where index can be a string (group name) or
-   an integer index [0 .. 99]
- */
-static PyObject*
-group_from_index(regexobject *re, PyObject *index)
-{
-	int i, a, b;
-	char *v;
-
-	if (PyString_Check(index))
-		if (re->re_groupindex == NULL ||
-		    !(index = PyDict_GetItem(re->re_groupindex, index)))
-		{
-			PyErr_SetString(RegexError,
-					"group() group name doesn't exist");
-			return NULL;
-		}
-
-	i = PyInt_AsLong(index);
-	if (i == -1 && PyErr_Occurred())
-		return NULL;
-
-	if (i < 0 || i >= RE_NREGS) {
-		PyErr_SetString(RegexError, "group() index out of range");
-		return NULL;
-	}
-	if (re->re_lastok == NULL) {
-		PyErr_SetString(RegexError,
-			   "group() only valid after successful match/search");
-		return NULL;
-	}
-	a = re->re_regs.start[i];
-	b = re->re_regs.end[i];
-	if (a < 0 || b < 0) {
-		Py_INCREF(Py_None);
-		return Py_None;
-	}
-	
-	if (!(v = PyString_AsString(re->re_lastok)))
-		return NULL;
-
-	return PyString_FromStringAndSize(v+a, b-a);
-}
-
-
-static PyObject *
-regobj_group(regexobject *re, PyObject *args)
-{
-	int n = PyTuple_Size(args);
-	int i;
-	PyObject *res = NULL;
-
-	if (n < 0)
-		return NULL;
-	if (n == 0) {
-		PyErr_SetString(PyExc_TypeError, "not enough arguments");
-		return NULL;
-	}
-	if (n == 1) {
-		/* return value is a single string */
-		PyObject *index = PyTuple_GetItem(args, 0);
-		if (!index)
-			return NULL;
-		
-		return group_from_index(re, index);
-	}
-
-	/* return value is a tuple */
-	if (!(res = PyTuple_New(n)))
-		return NULL;
-
-	for (i = 0; i < n; i++) {
-		PyObject *index = PyTuple_GetItem(args, i);
-		PyObject *group = NULL;
-
-		if (!index)
-			goto finally;
-		if (!(group = group_from_index(re, index)))
-			goto finally;
-		if (PyTuple_SetItem(res, i, group) < 0)
-			goto finally;
-	}
-	return res;
-
-  finally:
-	Py_DECREF(res);
-	return NULL;
-}
-
-
-static struct PyMethodDef reg_methods[] = {
-	{"match",	(PyCFunction)regobj_match, METH_VARARGS},
-	{"search",	(PyCFunction)regobj_search, METH_VARARGS},
-	{"group",	(PyCFunction)regobj_group, METH_VARARGS},
-	{NULL,		NULL}		/* sentinel */
-};
-
-
-
-static char* members[] = {
-	"last", "regs", "translate",
-	"groupindex", "realpat", "givenpat",
-	NULL
-};
-
-
-static PyObject *
-regobj_getattr(regexobject *re, char *name)
-{
-	if (strcmp(name, "regs") == 0) {
-		if (re->re_lastok == NULL) {
-			Py_INCREF(Py_None);
-			return Py_None;
-		}
-		return makeresult(&re->re_regs);
-	}
-	if (strcmp(name, "last") == 0) {
-		if (re->re_lastok == NULL) {
-			Py_INCREF(Py_None);
-			return Py_None;
-		}
-		Py_INCREF(re->re_lastok);
-		return re->re_lastok;
-	}
-	if (strcmp(name, "translate") == 0) {
-		if (re->re_translate == NULL) {
-			Py_INCREF(Py_None);
-			return Py_None;
-		}
-		Py_INCREF(re->re_translate);
-		return re->re_translate;
-	}
-	if (strcmp(name, "groupindex") == 0) {
-		if (re->re_groupindex == NULL) {
-			Py_INCREF(Py_None);
-			return Py_None;
-		}
-		Py_INCREF(re->re_groupindex);
-		return re->re_groupindex;
-	}
-	if (strcmp(name, "realpat") == 0) {
-		if (re->re_realpat == NULL) {
-			Py_INCREF(Py_None);
-			return Py_None;
-		}
-		Py_INCREF(re->re_realpat);
-		return re->re_realpat;
-	}
-	if (strcmp(name, "givenpat") == 0) {
-		if (re->re_givenpat == NULL) {
-			Py_INCREF(Py_None);
-			return Py_None;
-		}
-		Py_INCREF(re->re_givenpat);
-		return re->re_givenpat;
-	}
-	if (strcmp(name, "__members__") == 0) {
-		int i = 0;
-		PyObject *list = NULL;
-
-		/* okay, so it's unlikely this list will change that often.
-		   still, it's easier to change it in just one place.
-		 */
-		while (members[i])
-			i++;
-		if (!(list = PyList_New(i)))
-			return NULL;
-
-		i = 0;
-		while (members[i]) {
-			PyObject* v = PyString_FromString(members[i]);
-			if (!v || PyList_SetItem(list, i, v) < 0) {
-				Py_DECREF(list);
-				return NULL;
-			}
-			i++;
-		}
-		return list;
-	}
-	return Py_FindMethod(reg_methods, (PyObject *)re, name);
-}
-
-static PyTypeObject Regextype = {
-	PyObject_HEAD_INIT(NULL)
-	0,				     /*ob_size*/
-	"regex.regex",			     /*tp_name*/
-	sizeof(regexobject),		     /*tp_size*/
-	0,				     /*tp_itemsize*/
-	/* methods */
-	(destructor)reg_dealloc,	     /*tp_dealloc*/
-	0,				     /*tp_print*/
-	(getattrfunc)regobj_getattr,	     /*tp_getattr*/
-	0,				     /*tp_setattr*/
-	0,				     /*tp_compare*/
-	0,				     /*tp_repr*/
-};
-
-/* reference counting invariants:
-   pattern: borrowed
-   translate: borrowed
-   givenpat: borrowed
-   groupindex: transferred
-*/
-static PyObject *
-newregexobject(PyObject *pattern, PyObject *translate, PyObject *givenpat, PyObject *groupindex)
-{
-	regexobject *re;
-	char *pat;
-	int size;
-
-	if (!PyArg_Parse(pattern, "t#", &pat, &size))
-		return NULL;
-	
-	if (translate != NULL && PyString_Size(translate) != 256) {
-		PyErr_SetString(RegexError,
-				"translation table must be 256 bytes");
-		return NULL;
-	}
-	re = PyObject_New(regexobject, &Regextype);
-	if (re != NULL) {
-		char *error;
-		re->re_patbuf.buffer = NULL;
-		re->re_patbuf.allocated = 0;
-		re->re_patbuf.fastmap = (unsigned char *)re->re_fastmap;
-		if (translate) {
-			re->re_patbuf.translate = (unsigned char *)PyString_AsString(translate);
-			if (!re->re_patbuf.translate)
-				goto finally;
-			Py_INCREF(translate);
-		}
-		else
-			re->re_patbuf.translate = NULL;
-		re->re_translate = translate;
-		re->re_lastok = NULL;
-		re->re_groupindex = groupindex;
-		Py_INCREF(pattern);
-		re->re_realpat = pattern;
-		Py_INCREF(givenpat);
-		re->re_givenpat = givenpat;
-		error = _Py_re_compile_pattern((unsigned char *)pat, size, &re->re_patbuf);
-		if (error != NULL) {
-			PyErr_SetString(RegexError, error);
-			goto finally;
-		}
-	}
-	return (PyObject *)re;
-  finally:
-	Py_DECREF(re);
-	return NULL;
-}
-
-static PyObject *
-regex_compile(PyObject *self, PyObject *args)
-{
-	PyObject *pat = NULL;
-	PyObject *tran = NULL;
-
-	if (!PyArg_ParseTuple(args, "S|S:compile", &pat, &tran))
-		return NULL;
-	return newregexobject(pat, tran, pat, NULL);
-}
-
-static PyObject *
-symcomp(PyObject *pattern, PyObject *gdict)
-{
-	char *opat, *oend, *o, *n, *g, *v;
-	int group_count = 0;
-	int sz;
-	int escaped = 0;
-	char name_buf[128];
-	PyObject *npattern;
-	int require_escape = re_syntax & RE_NO_BK_PARENS ? 0 : 1;
-
-	if (!(opat = PyString_AsString(pattern)))
-		return NULL;
-
-	if ((sz = PyString_Size(pattern)) < 0)
-		return NULL;
-
-	oend = opat + sz;
-	o = opat;
-
-	if (oend == opat) {
-		Py_INCREF(pattern);
-		return pattern;
-	}
-
-	if (!(npattern = PyString_FromStringAndSize((char*)NULL, sz)) ||
-	    !(n = PyString_AsString(npattern)))
-		return NULL;
-
-	while (o < oend) {
-		if (*o == '(' && escaped == require_escape) {
-			char *backtrack;
-			escaped = 0;
-			++group_count;
-			*n++ = *o;
-			if (++o >= oend || *o != '<')
-				continue;
-			/* *o == '<' */
-			if (o+1 < oend && *(o+1) == '>')
-				continue;
-			backtrack = o;
-			g = name_buf;
-			for (++o; o < oend;) {
-				if (*o == '>') {
-				    PyObject *group_name = NULL;
-				    PyObject *group_index = NULL;
-				    *g++ = '\0';
-				    group_name = PyString_FromString(name_buf);
-				    group_index = PyInt_FromLong(group_count);
-				    if (group_name == NULL ||
-					group_index == NULL ||
-					PyDict_SetItem(gdict, group_name,
-						       group_index) != 0)
-				    {
-					    Py_XDECREF(group_name);
-					    Py_XDECREF(group_index);
-					    Py_XDECREF(npattern);
-					    return NULL;
-				    }
-				    Py_DECREF(group_name);
-				    Py_DECREF(group_index);
-				    ++o;     /* eat the '>' */
-				    break;
-				}
-				if (!isalnum(Py_CHARMASK(*o)) && *o != '_') {
-					o = backtrack;
-					break;
-				}
-				*g++ = *o++;
-			}
-		}
-		else if (*o == '[' && !escaped) {
-			*n++ = *o;
-			++o;		     /* eat the char following '[' */
-			*n++ = *o;
-			while (o < oend && *o != ']') {
-				++o;
-				*n++ = *o;
-			}
-			if (o < oend)
-				++o;
-		}
-		else if (*o == '\\') {
-			escaped = 1;
-			*n++ = *o;
-			++o;
-		}
-		else {
-			escaped = 0;
-			*n++ = *o;
-			++o;
-		}
-	}
-
-	if (!(v = PyString_AsString(npattern))) {
-		Py_DECREF(npattern);
-		return NULL;
-	}
-	/* _PyString_Resize() decrements npattern on failure */
-	_PyString_Resize(&npattern, n - v);
-	return npattern;
-
-}
-
-static PyObject *
-regex_symcomp(PyObject *self, PyObject *args)
-{
-	PyObject *pattern;
-	PyObject *tran = NULL;
-	PyObject *gdict = NULL;
-	PyObject *npattern;
-	PyObject *retval = NULL;
-
-	if (!PyArg_ParseTuple(args, "S|S:symcomp", &pattern, &tran))
-		return NULL;
-
-	gdict = PyDict_New();
-	if (gdict == NULL || (npattern = symcomp(pattern, gdict)) == NULL) {
-		Py_XDECREF(gdict);
-		return NULL;
-	}
-	retval = newregexobject(npattern, tran, pattern, gdict);
-	Py_DECREF(npattern);
-	return retval;
-}
-
-
-static PyObject *cache_pat;
-static PyObject *cache_prog;
-
-static int
-update_cache(PyObject *pat)
-{
-	PyObject *tuple = PyTuple_Pack(1, pat);
-	int status = 0;
-
-	if (!tuple)
-		return -1;
-
-	if (pat != cache_pat) {
-		Py_XDECREF(cache_pat);
-		cache_pat = NULL;
-		Py_XDECREF(cache_prog);
-		cache_prog = regex_compile((PyObject *)NULL, tuple);
-		if (cache_prog == NULL) {
-			status = -1;
-			goto finally;
-		}
-		cache_pat = pat;
-		Py_INCREF(cache_pat);
-	}
-  finally:
-	Py_DECREF(tuple);
-	return status;
-}
-
-static PyObject *
-regex_match(PyObject *self, PyObject *args)
-{
-	PyObject *pat, *string;
-	PyObject *tuple, *v;
-
-	if (!PyArg_ParseTuple(args, "SS:match", &pat, &string))
-		return NULL;
-	if (update_cache(pat) < 0)
-		return NULL;
-
-	if (!(tuple = Py_BuildValue("(S)", string)))
-		return NULL;
-	v = regobj_match((regexobject *)cache_prog, tuple);
-	Py_DECREF(tuple);
-	return v;
-}
-
-static PyObject *
-regex_search(PyObject *self, PyObject *args)
-{
-	PyObject *pat, *string;
-	PyObject *tuple, *v;
-
-	if (!PyArg_ParseTuple(args, "SS:search", &pat, &string))
-		return NULL;
-	if (update_cache(pat) < 0)
-		return NULL;
-
-	if (!(tuple = Py_BuildValue("(S)", string)))
-		return NULL;
-	v = regobj_search((regexobject *)cache_prog, tuple);
-	Py_DECREF(tuple);
-	return v;
-}
-
-static PyObject *
-regex_set_syntax(PyObject *self, PyObject *args)
-{
-	int syntax;
-	if (!PyArg_ParseTuple(args, "i:set_syntax", &syntax))
-		return NULL;
-	syntax = re_set_syntax(syntax);
-	/* wipe the global pattern cache */
-	Py_XDECREF(cache_pat);
-	cache_pat = NULL;
-	Py_XDECREF(cache_prog);
-	cache_prog = NULL;
-	return PyInt_FromLong((long)syntax);
-}
-
-static PyObject *
-regex_get_syntax(PyObject *self)
-{
-	return PyInt_FromLong((long)re_syntax);
-}
-
-
-static struct PyMethodDef regex_global_methods[] = {
-	{"compile",	regex_compile, METH_VARARGS},
-	{"symcomp",	regex_symcomp, METH_VARARGS},
-	{"match",	regex_match, METH_VARARGS},
-	{"search",	regex_search, METH_VARARGS},
-	{"set_syntax",	regex_set_syntax, METH_VARARGS},
-	{"get_syntax",  (PyCFunction)regex_get_syntax, METH_NOARGS},
-	{NULL,		NULL}		     /* sentinel */
-};
-
-PyMODINIT_FUNC
-initregex(void)
-{
-	PyObject *m, *d, *v;
-	int i;
-	char *s;
-	
-	/* Initialize object type */
-	Regextype.ob_type = &PyType_Type;
-
-	m = Py_InitModule("regex", regex_global_methods);
-	if (m == NULL)
-		return;
-	d = PyModule_GetDict(m);
-
-	if (PyErr_Warn(PyExc_DeprecationWarning,
-		       "the regex module is deprecated; "
-		       "please use the re module") < 0)
-		return;
-	
-	/* Initialize regex.error exception */
-	v = RegexError = PyErr_NewException("regex.error", NULL, NULL);
-	if (v == NULL || PyDict_SetItemString(d, "error", v) != 0)
-		goto finally;
-	
-	/* Initialize regex.casefold constant */
-	if (!(v = PyString_FromStringAndSize((char *)NULL, 256)))
-		goto finally;
-	
-	if (!(s = PyString_AsString(v)))
-		goto finally;
-
-	for (i = 0; i < 256; i++) {
-		if (isupper(i))
-			s[i] = tolower(i);
-		else
-			s[i] = i;
-	}
-	if (PyDict_SetItemString(d, "casefold", v) < 0)
-		goto finally;
-	Py_DECREF(v);
-
-	if (!PyErr_Occurred())
-		return;
-  finally:
-	/* Nothing */ ;
-}
diff --git a/Modules/regexpr.c b/Modules/regexpr.c
deleted file mode 100644
index e6a5417..0000000
--- a/Modules/regexpr.c
+++ /dev/null
@@ -1,2094 +0,0 @@
-/* regexpr.c
- *
- * Author: Tatu Ylonen <ylo@ngs.fi>
- *
- * Copyright (c) 1991 Tatu Ylonen, Espoo, Finland
- *
- * Permission to use, copy, modify, distribute, and sell this software
- * and its documentation for any purpose is hereby granted without
- * fee, provided that the above copyright notice appear in all copies.
- * This software is provided "as is" without express or implied
- * warranty.
- *
- * Created: Thu Sep 26 17:14:05 1991 ylo
- * Last modified: Mon Nov  4 17:06:48 1991 ylo
- * Ported to Think C: 19 Jan 1992 guido@cwi.nl
- *
- * This code draws many ideas from the regular expression packages by
- * Henry Spencer of the University of Toronto and Richard Stallman of
- * the Free Software Foundation.
- *
- * Emacs-specific code and syntax table code is almost directly borrowed
- * from GNU regexp.
- *
- * Bugs fixed and lots of reorganization by Jeffrey C. Ollie, April
- * 1997 Thanks for bug reports and ideas from Andrew Kuchling, Tim
- * Peters, Guido van Rossum, Ka-Ping Yee, Sjoerd Mullender, and
- * probably one or two others that I'm forgetting.
- *
- * $Id$ */
-
-#include "Python.h"
-#include "regexpr.h"
-
-/* The original code blithely assumed that sizeof(short) == 2.  Not
- * always true.  Original instances of "(short)x" were replaced by
- * SHORT(x), where SHORT is #defined below.  */
-
-#define SHORT(x) ((x) & 0x8000 ? (x) - 0x10000 : (x))
-
-/* The stack implementation is taken from an idea by Andrew Kuchling.
- * It's a doubly linked list of arrays. The advantages of this over a
- * simple linked list are that the number of mallocs required are
- * reduced. It also makes it possible to statically allocate enough
- * space so that small patterns don't ever need to call malloc.
- *
- * The advantages over a single array is that is periodically
- * realloced when more space is needed is that we avoid ever copying
- * the stack. */
-
-/* item_t is the basic stack element.  Defined as a union of
- * structures so that both registers, failure points, and counters can
- * be pushed/popped from the stack.  There's nothing built into the
- * item to keep track of whether a certain stack item is a register, a
- * failure point, or a counter. */
-
-typedef union item_t
-{
-	struct
-	{
-		int num;
-		int level;
-		unsigned char *start;
-		unsigned char *end;
-	} reg;
-	struct
-	{
-		int count;
-		int level;
-		int phantom;
-		unsigned char *code;
-		unsigned char *text;
-	} fail;
-	struct
-	{
-		int num;
-		int level;
-		int count;
-	} cntr;
-} item_t;
-
-#define STACK_PAGE_SIZE 256
-#define NUM_REGISTERS 256
-
-/* A 'page' of stack items. */
-
-typedef struct item_page_t
-{
-	item_t items[STACK_PAGE_SIZE];
-	struct item_page_t *prev;
-	struct item_page_t *next;
-} item_page_t;
-
-
-typedef struct match_state
-{
-	/* The number of registers that have been pushed onto the stack
-	 * since the last failure point. */
-
-	int count;
-
-	/* Used to control when registers need to be pushed onto the
-	 * stack. */
-	
-	int level;
-	
-	/* The number of failure points on the stack. */
-	
-	int point;
-	
-	/* Storage for the registers.  Each register consists of two
-	 * pointers to characters.  So register N is represented as
-	 * start[N] and end[N].  The pointers must be converted to
-	 * offsets from the beginning of the string before returning the
-	 * registers to the calling program. */
-	
-	unsigned char *start[NUM_REGISTERS];
-	unsigned char *end[NUM_REGISTERS];
-	
-	/* Keeps track of whether a register has changed recently. */
-	
-	int changed[NUM_REGISTERS];
-	
-	/* Structure to encapsulate the stack. */
-	struct
-	{
-		/* index into the current page.  If index == 0 and you need
-		 * to pop an item, move to the previous page and set index
-		 * = STACK_PAGE_SIZE - 1.  Otherwise decrement index to
-		 * push a page. If index == STACK_PAGE_SIZE and you need
-		 * to push a page move to the next page and set index =
-		 * 0. If there is no new next page, allocate a new page
-		 * and link it in. Otherwise, increment index to push a
-		 * page. */
-		
-		int index;
-		item_page_t *current; /* Pointer to the current page. */
-		item_page_t first; /* First page is statically allocated. */
-	} stack;
-} match_state;
-
-/* Initialize a state object */
-
-/* #define NEW_STATE(state) \ */
-/* memset(&state, 0, (void *)(&state.stack) - (void *)(&state)); \ */
-/* state.stack.current = &state.stack.first; \ */
-/* state.stack.first.prev = NULL; \ */
-/* state.stack.first.next = NULL; \ */
-/* state.stack.index = 0; \ */
-/* state.level = 1 */
-
-#define NEW_STATE(state, nregs) \
-{ \
-	int i; \
-	for (i = 0; i < nregs; i++) \
-	{ \
-		state.start[i] = NULL; \
-		state.end[i] = NULL; \
-		state.changed[i] = 0; \
-	} \
-	state.stack.current = &state.stack.first; \
-	state.stack.first.prev = NULL; \
-	state.stack.first.next = NULL; \
-	state.stack.index = 0; \
-	state.level = 1; \
-	state.count = 0; \
-	state.level = 0; \
-	state.point = 0; \
-}
-
-/* Free any memory that might have been malloc'd */
-
-#define FREE_STATE(state) \
-while(state.stack.first.next != NULL) \
-{ \
-	state.stack.current = state.stack.first.next; \
-	state.stack.first.next = state.stack.current->next; \
-	free(state.stack.current); \
-}
-
-/* Discard the top 'count' stack items. */
-
-#define STACK_DISCARD(stack, count, on_error) \
-stack.index -= count; \
-while (stack.index < 0) \
-{ \
-	if (stack.current->prev == NULL) \
-		on_error; \
-	stack.current = stack.current->prev; \
-	stack.index += STACK_PAGE_SIZE; \
-}
-
-/* Store a pointer to the previous item on the stack. Used to pop an
- * item off of the stack. */
-
-#define STACK_PREV(stack, top, on_error) \
-if (stack.index == 0) \
-{ \
-	if (stack.current->prev == NULL) \
-		on_error; \
-	stack.current = stack.current->prev; \
-	stack.index = STACK_PAGE_SIZE - 1; \
-} \
-else \
-{ \
-	stack.index--; \
-} \
-top = &(stack.current->items[stack.index])
-
-/* Store a pointer to the next item on the stack. Used to push an item
- * on to the stack. */
-
-#define STACK_NEXT(stack, top, on_error) \
-if (stack.index == STACK_PAGE_SIZE) \
-{ \
-	if (stack.current->next == NULL) \
-	{ \
-		stack.current->next = (item_page_t *)malloc(sizeof(item_page_t)); \
-		if (stack.current->next == NULL) \
-			on_error; \
-		stack.current->next->prev = stack.current; \
-		stack.current->next->next = NULL; \
-	} \
-	stack.current = stack.current->next; \
-	stack.index = 0; \
-} \
-top = &(stack.current->items[stack.index++])
-
-/* Store a pointer to the item that is 'count' items back in the
- * stack. STACK_BACK(stack, top, 1, on_error) is equivalent to
- * STACK_TOP(stack, top, on_error).  */
-
-#define STACK_BACK(stack, top, count, on_error) \
-{ \
-	int index; \
-	item_page_t *current; \
-	current = stack.current; \
-	index = stack.index - (count); \
-	while (index < 0) \
-	{ \
-		if (current->prev == NULL) \
-			on_error; \
-		current = current->prev; \
-		index += STACK_PAGE_SIZE; \
-	} \
-	top = &(current->items[index]); \
-}
-
-/* Store a pointer to the top item on the stack. Execute the
- * 'on_error' code if there are no items on the stack. */
-
-#define STACK_TOP(stack, top, on_error) \
-if (stack.index == 0) \
-{ \
-	if (stack.current->prev == NULL) \
-		on_error; \
-	top = &(stack.current->prev->items[STACK_PAGE_SIZE - 1]); \
-} \
-else \
-{ \
-	top = &(stack.current->items[stack.index - 1]); \
-}
-
-/* Test to see if the stack is empty */
-
-#define STACK_EMPTY(stack) ((stack.index == 0) && \
-			    (stack.current->prev == NULL))
-
-/* Return the start of register 'reg' */
-
-#define GET_REG_START(state, reg) (state.start[reg])
-
-/* Return the end of register 'reg' */
-
-#define GET_REG_END(state, reg) (state.end[reg])
-
-/* Set the start of register 'reg'. If the state of the register needs
- * saving, push it on the stack. */
-
-#define SET_REG_START(state, reg, text, on_error) \
-if(state.changed[reg] < state.level) \
-{ \
-	item_t *item; \
-	STACK_NEXT(state.stack, item, on_error); \
-	item->reg.num = reg; \
-	item->reg.start = state.start[reg]; \
-	item->reg.end = state.end[reg]; \
-	item->reg.level = state.changed[reg]; \
-	state.changed[reg] = state.level; \
-	state.count++; \
-} \
-state.start[reg] = text
-
-/* Set the end of register 'reg'. If the state of the register needs
- * saving, push it on the stack. */
-
-#define SET_REG_END(state, reg, text, on_error) \
-if(state.changed[reg] < state.level) \
-{ \
-	item_t *item; \
-	STACK_NEXT(state.stack, item, on_error); \
-	item->reg.num = reg; \
-	item->reg.start = state.start[reg]; \
-	item->reg.end = state.end[reg]; \
-	item->reg.level = state.changed[reg]; \
-	state.changed[reg] = state.level; \
-	state.count++; \
-} \
-state.end[reg] = text
-
-#define PUSH_FAILURE(state, xcode, xtext, on_error) \
-{ \
-	item_t *item; \
-	STACK_NEXT(state.stack, item, on_error); \
-	item->fail.code = xcode; \
-	item->fail.text = xtext; \
-	item->fail.count = state.count; \
-	item->fail.level = state.level; \
-	item->fail.phantom = 0; \
-	state.count = 0; \
-	state.level++; \
-	state.point++; \
-}
-
-/* Update the last failure point with a new position in the text. */
-
-#define UPDATE_FAILURE(state, xtext, on_error) \
-{ \
-	item_t *item; \
-	STACK_BACK(state.stack, item, state.count + 1, on_error); \
-	if (!item->fail.phantom) \
-	{ \
-		item_t *item2; \
-		STACK_NEXT(state.stack, item2, on_error); \
-		item2->fail.code = item->fail.code; \
-		item2->fail.text = xtext; \
-		item2->fail.count = state.count; \
-		item2->fail.level = state.level; \
-		item2->fail.phantom = 1; \
-		state.count = 0; \
-		state.level++; \
-		state.point++; \
-	} \
-	else \
-	{ \
-		STACK_DISCARD(state.stack, state.count, on_error); \
-		STACK_TOP(state.stack, item, on_error); \
-		item->fail.text = xtext; \
-		state.count = 0; \
-		state.level++; \
-	} \
-}
-
-#define POP_FAILURE(state, xcode, xtext, on_empty, on_error) \
-{ \
-	item_t *item; \
-	do \
-	{ \
-		while(state.count > 0) \
-		{ \
-			STACK_PREV(state.stack, item, on_error); \
-			state.start[item->reg.num] = item->reg.start; \
-			state.end[item->reg.num] = item->reg.end; \
-			state.changed[item->reg.num] = item->reg.level; \
-			state.count--; \
-		} \
-		STACK_PREV(state.stack, item, on_empty); \
-		xcode = item->fail.code; \
-		xtext = item->fail.text; \
-		state.count = item->fail.count; \
-		state.level = item->fail.level; \
-		state.point--; \
-	} \
-	while (item->fail.text == NULL); \
-}
-
-enum regexp_compiled_ops /* opcodes for compiled regexp */
-{
-	Cend,		      /* end of pattern reached */
-	Cbol,		      /* beginning of line */
-	Ceol,		      /* end of line */
-	Cset,		      /* character set.  Followed by 32 bytes of set. */
-	Cexact,		      /* followed by a byte to match */
-	Canychar,	      /* matches any character except newline */
-	Cstart_memory,	      /* set register start addr (followed by reg number) */
-	Cend_memory,	      /* set register end addr (followed by reg number) */
-	Cmatch_memory,	      /* match a duplicate of reg contents (regnum follows)*/
-	Cjump,		      /* followed by two bytes (lsb,msb) of displacement. */
-	Cstar_jump,	      /* will change to jump/update_failure_jump at runtime */
-	Cfailure_jump,	      /* jump to addr on failure */
-	Cupdate_failure_jump, /* update topmost failure point and jump */
-	Cdummy_failure_jump,  /* push a dummy failure point and jump */
-	Cbegbuf,	      /* match at beginning of buffer */
-	Cendbuf,	      /* match at end of buffer */
-	Cwordbeg,	      /* match at beginning of word */
-	Cwordend,	      /* match at end of word */
-	Cwordbound,	      /* match if at word boundary */
-	Cnotwordbound,        /* match if not at word boundary */
-	Csyntaxspec,	      /* matches syntax code (1 byte follows) */
-	Cnotsyntaxspec,       /* matches if syntax code does not match (1 byte follows) */
-	Crepeat1
-};
-
-enum regexp_syntax_op	/* syntax codes for plain and quoted characters */
-{
-	Rend,		  /* special code for end of regexp */
-	Rnormal,	  /* normal character */
-	Ranychar,	  /* any character except newline */
-	Rquote,		  /* the quote character */
-	Rbol,		  /* match beginning of line */
-	Reol,		  /* match end of line */
-	Roptional,	  /* match preceding expression optionally */
-	Rstar,		  /* match preceding expr zero or more times */
-	Rplus,		  /* match preceding expr one or more times */
-	Ror,		  /* match either of alternatives */
-	Ropenpar,	  /* opening parenthesis */
-	Rclosepar,	  /* closing parenthesis */
-	Rmemory,	  /* match memory register */
-	Rextended_memory, /* \vnn to match registers 10-99 */
-	Ropenset,	  /* open set.  Internal syntax hard-coded below. */
-	/* the following are gnu extensions to "normal" regexp syntax */
-	Rbegbuf,	  /* beginning of buffer */
-	Rendbuf,	  /* end of buffer */
-	Rwordchar,	  /* word character */
-	Rnotwordchar,	  /* not word character */
-	Rwordbeg,	  /* beginning of word */
-	Rwordend,	  /* end of word */
-	Rwordbound,	  /* word bound */
-	Rnotwordbound,	  /* not word bound */
-	Rnum_ops
-};
-
-static int re_compile_initialized = 0;
-static int regexp_syntax = 0;
-int re_syntax = 0; /* Exported copy of regexp_syntax */
-static unsigned char regexp_plain_ops[256];
-static unsigned char regexp_quoted_ops[256];
-static unsigned char regexp_precedences[Rnum_ops];
-static int regexp_context_indep_ops;
-static int regexp_ansi_sequences;
-
-#define NUM_LEVELS  5    /* number of precedence levels in use */
-#define MAX_NESTING 100  /* max nesting level of operators */
-
-#define SYNTAX(ch) re_syntax_table[(unsigned char)(ch)]
-
-unsigned char re_syntax_table[256];
-
-void re_compile_initialize(void)
-{
-	int a;
-  
-	static int syntax_table_inited = 0;
-
-	if (!syntax_table_inited)
-	{
-		syntax_table_inited = 1;
-		memset(re_syntax_table, 0, 256);
-		for (a = 'a'; a <= 'z'; a++)
-			re_syntax_table[a] = Sword;
-		for (a = 'A'; a <= 'Z'; a++)
-			re_syntax_table[a] = Sword;
-		for (a = '0'; a <= '9'; a++)
-			re_syntax_table[a] = Sword | Sdigit | Shexdigit;
-		for (a = '0'; a <= '7'; a++)
-			re_syntax_table[a] |= Soctaldigit;
-		for (a = 'A'; a <= 'F'; a++)
-			re_syntax_table[a] |= Shexdigit;
-		for (a = 'a'; a <= 'f'; a++)
-			re_syntax_table[a] |= Shexdigit;
-		re_syntax_table['_'] = Sword;
-		for (a = 9; a <= 13; a++)
-			re_syntax_table[a] = Swhitespace;
-		re_syntax_table[' '] = Swhitespace;
-	}
-	re_compile_initialized = 1;
-	for (a = 0; a < 256; a++)
-	{
-		regexp_plain_ops[a] = Rnormal;
-		regexp_quoted_ops[a] = Rnormal;
-	}
-	for (a = '0'; a <= '9'; a++)
-		regexp_quoted_ops[a] = Rmemory;
-	regexp_plain_ops['\134'] = Rquote;
-	if (regexp_syntax & RE_NO_BK_PARENS)
-	{
-		regexp_plain_ops['('] = Ropenpar;
-		regexp_plain_ops[')'] = Rclosepar;
-	}
-	else
-	{
-		regexp_quoted_ops['('] = Ropenpar;
-		regexp_quoted_ops[')'] = Rclosepar;
-	}
-	if (regexp_syntax & RE_NO_BK_VBAR)
-		regexp_plain_ops['\174'] = Ror;
-	else
-		regexp_quoted_ops['\174'] = Ror;
-	regexp_plain_ops['*'] = Rstar;
-	if (regexp_syntax & RE_BK_PLUS_QM)
-	{
-		regexp_quoted_ops['+'] = Rplus;
-		regexp_quoted_ops['?'] = Roptional;
-	}
-	else
-	{
-		regexp_plain_ops['+'] = Rplus;
-		regexp_plain_ops['?'] = Roptional;
-	}
-	if (regexp_syntax & RE_NEWLINE_OR)
-		regexp_plain_ops['\n'] = Ror;
-	regexp_plain_ops['\133'] = Ropenset;
-	regexp_plain_ops['\136'] = Rbol;
-	regexp_plain_ops['$'] = Reol;
-	regexp_plain_ops['.'] = Ranychar;
-	if (!(regexp_syntax & RE_NO_GNU_EXTENSIONS))
-	{
-		regexp_quoted_ops['w'] = Rwordchar;
-		regexp_quoted_ops['W'] = Rnotwordchar;
-		regexp_quoted_ops['<'] = Rwordbeg;
-		regexp_quoted_ops['>'] = Rwordend;
-		regexp_quoted_ops['b'] = Rwordbound;
-		regexp_quoted_ops['B'] = Rnotwordbound;
-		regexp_quoted_ops['`'] = Rbegbuf;
-		regexp_quoted_ops['\''] = Rendbuf;
-	}
-	if (regexp_syntax & RE_ANSI_HEX)
-		regexp_quoted_ops['v'] = Rextended_memory;
-	for (a = 0; a < Rnum_ops; a++)
-		regexp_precedences[a] = 4;
-	if (regexp_syntax & RE_TIGHT_VBAR)
-	{
-		regexp_precedences[Ror] = 3;
-		regexp_precedences[Rbol] = 2;
-		regexp_precedences[Reol] = 2;
-	}
-	else
-	{
-		regexp_precedences[Ror] = 2;
-		regexp_precedences[Rbol] = 3;
-		regexp_precedences[Reol] = 3;
-	}
-	regexp_precedences[Rclosepar] = 1;
-	regexp_precedences[Rend] = 0;
-	regexp_context_indep_ops = (regexp_syntax & RE_CONTEXT_INDEP_OPS) != 0;
-	regexp_ansi_sequences = (regexp_syntax & RE_ANSI_HEX) != 0;
-}
-
-int re_set_syntax(int syntax)
-{
-	int ret;
-	
-	ret = regexp_syntax;
-	regexp_syntax = syntax;
-	re_syntax = syntax; /* Exported copy */
-	re_compile_initialize();
-	return ret;
-}
-
-static int hex_char_to_decimal(int ch)
-{
-	if (ch >= '0' && ch <= '9')
-		return ch - '0';
-	if (ch >= 'a' && ch <= 'f')
-		return ch - 'a' + 10;
-	if (ch >= 'A' && ch <= 'F')
-		return ch - 'A' + 10;
-	return 16;
-}
-
-static void re_compile_fastmap_aux(unsigned char *code, int pos,
-                                   unsigned char *visited,
-                                   unsigned char *can_be_null,
-                                   unsigned char *fastmap)
-{
-	int a;
-	int b;
-	int syntaxcode;
-	
-	if (visited[pos])
-		return;  /* we have already been here */
-	visited[pos] = 1;
-	for (;;)
-		switch (code[pos++]) {
-		case Cend:
-			{
-				*can_be_null = 1;
-				return;
-			}
-		case Cbol:
-		case Cbegbuf:
-		case Cendbuf:
-		case Cwordbeg:
-		case Cwordend:
-		case Cwordbound:
-		case Cnotwordbound:
-		{
-			for (a = 0; a < 256; a++)
-				fastmap[a] = 1;
-			break;
-		}
-		case Csyntaxspec:
-		{
-			syntaxcode = code[pos++];
-			for (a = 0; a < 256; a++)
-				if (SYNTAX(a) & syntaxcode) 
-					fastmap[a] = 1;
-			return;
-		}
-		case Cnotsyntaxspec:
-		{
-			syntaxcode = code[pos++];
-			for (a = 0; a < 256; a++)
-				if (!(SYNTAX(a) & syntaxcode) )
-					fastmap[a] = 1;
-			return;
-		}
-		case Ceol:
-		{
-			fastmap['\n'] = 1;
-			if (*can_be_null == 0)
-				*can_be_null = 2; /* can match null, but only at end of buffer*/
-			return;
-		}
-		case Cset:
-		{
-			for (a = 0; a < 256/8; a++)
-				if (code[pos + a] != 0)
-					for (b = 0; b < 8; b++)
-						if (code[pos + a] & (1 << b))
-							fastmap[(a << 3) + b] = 1;
-			pos += 256/8;
-			return;
-		}
-		case Cexact:
-		{
-			fastmap[(unsigned char)code[pos]] = 1;
-			return;
-		}
-		case Canychar:
-		{
-			for (a = 0; a < 256; a++)
-				if (a != '\n')
-					fastmap[a] = 1;
-			return;
-		}
-		case Cstart_memory:
-		case Cend_memory:
-		{
-			pos++;
-			break;
-		}
-		case Cmatch_memory:
-		{
-			for (a = 0; a < 256; a++)
-				fastmap[a] = 1;
-			*can_be_null = 1;
-			return;
-		}
-		case Cjump:
-		case Cdummy_failure_jump:
-		case Cupdate_failure_jump:
-		case Cstar_jump:
-		{
-			a = (unsigned char)code[pos++];
-			a |= (unsigned char)code[pos++] << 8;
-			pos += (int)SHORT(a);
-			if (visited[pos])
-			{
-				/* argh... the regexp contains empty loops.  This is not
-				   good, as this may cause a failure stack overflow when
-				   matching.  Oh well. */
-				/* this path leads nowhere; pursue other paths. */
-				return;
-			}
-			visited[pos] = 1;
-			break;
-		}
-		case Cfailure_jump:
-		{
-			a = (unsigned char)code[pos++];
-			a |= (unsigned char)code[pos++] << 8;
-			a = pos + (int)SHORT(a);
-			re_compile_fastmap_aux(code, a, visited, can_be_null, fastmap);
-			break;
-		}
-		case Crepeat1:
-		{
-			pos += 2;
-			break;
-		}
-		default:
-		{
-		        PyErr_SetString(PyExc_SystemError, "Unknown regex opcode: memory corrupted?");
-		        return;
-			/*NOTREACHED*/
-		}
-		}
-}
-
-static int re_do_compile_fastmap(unsigned char *buffer, int used, int pos,
-                                 unsigned char *can_be_null,
-                                 unsigned char *fastmap)
-{
-	unsigned char small_visited[512], *visited;
-   
-	if (used <= sizeof(small_visited))
-		visited = small_visited;
-	else
-	{
-		visited = malloc(used);
-		if (!visited)
-			return 0;
-	}
-	*can_be_null = 0;
-	memset(fastmap, 0, 256);
-	memset(visited, 0, used);
-	re_compile_fastmap_aux(buffer, pos, visited, can_be_null, fastmap);
-	if (visited != small_visited)
-		free(visited);
-	return 1;
-}
-
-void re_compile_fastmap(regexp_t bufp)
-{
-	if (!bufp->fastmap || bufp->fastmap_accurate)
-		return;
-	assert(bufp->used > 0);
-	if (!re_do_compile_fastmap(bufp->buffer,
-				   bufp->used,
-				   0,
-				   &bufp->can_be_null,
-				   bufp->fastmap))
-		return;
-	if (PyErr_Occurred()) return;
-	if (bufp->buffer[0] == Cbol)
-		bufp->anchor = 1;   /* begline */
-	else
-		if (bufp->buffer[0] == Cbegbuf)
-			bufp->anchor = 2; /* begbuf */
-		else
-			bufp->anchor = 0; /* none */
-	bufp->fastmap_accurate = 1;
-}
-
-/* 
- * star is coded as:
- * 1: failure_jump 2
- *    ... code for operand of star
- *    star_jump 1
- * 2: ... code after star
- *
- * We change the star_jump to update_failure_jump if we can determine
- * that it is safe to do so; otherwise we change it to an ordinary
- * jump.
- *
- * plus is coded as
- *
- *    jump 2
- * 1: failure_jump 3
- * 2: ... code for operand of plus
- *    star_jump 1
- * 3: ... code after plus
- *
- * For star_jump considerations this is processed identically to star.
- *
- */
-
-static int re_optimize_star_jump(regexp_t bufp, unsigned char *code)
-{
-	unsigned char map[256];
-	unsigned char can_be_null;
-	unsigned char *p1;
-	unsigned char *p2;
-	unsigned char ch;
-	int a;
-	int b;
-	int num_instructions = 0;
-
-	a = (unsigned char)*code++;
-	a |= (unsigned char)*code++ << 8;
-	a = (int)SHORT(a);
-	
-	p1 = code + a + 3; /* skip the failure_jump */
-	/* Check that the jump is within the pattern */
-	if (p1<bufp->buffer || bufp->buffer+bufp->used<p1)
-	  {
-	    PyErr_SetString(PyExc_SystemError, "Regex VM jump out of bounds (failure_jump opt)");
-	    return 0;
-	  }
-	
-	assert(p1[-3] == Cfailure_jump);
-	p2 = code;
-	/* p1 points inside loop, p2 points to after loop */
-	if (!re_do_compile_fastmap(bufp->buffer, bufp->used,
-				   (int)(p2 - bufp->buffer),
-				   &can_be_null, map))
-		goto make_normal_jump;
-	
-	/* If we might introduce a new update point inside the
-	 * loop, we can't optimize because then update_jump would
-	 * update a wrong failure point.  Thus we have to be
-	 * quite careful here.
-	 */
-	
-	/* loop until we find something that consumes a character */
-  loop_p1:
-	num_instructions++;
-	switch (*p1++)
-	{
-	case Cbol:
-	case Ceol:
-	case Cbegbuf:
-	case Cendbuf:
-	case Cwordbeg:
-	case Cwordend:
-	case Cwordbound:
-	case Cnotwordbound:
-	{
-		goto loop_p1;
-	}
-	case Cstart_memory:
-	case Cend_memory:
-	{
-		p1++;
-		goto loop_p1;
-	}
-	case Cexact:
-	{
-		ch = (unsigned char)*p1++;
-		if (map[(int)ch])
-			goto make_normal_jump;
-		break;
-	}
-	case Canychar:
-	{
-		for (b = 0; b < 256; b++)
-			if (b != '\n' && map[b])
-				goto make_normal_jump;
-		break;
-	}
-	case Cset:
-	{
-		for (b = 0; b < 256; b++)
-			if ((p1[b >> 3] & (1 << (b & 7))) && map[b])
-				goto make_normal_jump;
-		p1 += 256/8;
-		break;
-	}
-	default:
-	{
-		goto make_normal_jump;
-	}
-	}
-	/* now we know that we can't backtrack. */
-	while (p1 != p2 - 3)
-	{
-		num_instructions++;
-		switch (*p1++)
-		{
-		case Cend:
-		{
-			return 0;
-		}
-		case Cbol:
-		case Ceol:
-		case Canychar:
-		case Cbegbuf:
-		case Cendbuf:
-		case Cwordbeg:
-		case Cwordend:
-		case Cwordbound:
-		case Cnotwordbound:
-		{
-			break;
-		}
-		case Cset:
-		{
-			p1 += 256/8;
-			break;
-		}
-		case Cexact:
-		case Cstart_memory:
-		case Cend_memory:
-		case Cmatch_memory:
-		case Csyntaxspec:
-		case Cnotsyntaxspec:
-		{
-			p1++;
-			break;
-		}
-		case Cjump:
-		case Cstar_jump:
-		case Cfailure_jump:
-		case Cupdate_failure_jump:
-		case Cdummy_failure_jump:
-		{
-			goto make_normal_jump;
-		}
-		default:
-		{
-			return 0;
-		}
-		}
-	}
-	
-	/* make_update_jump: */
-	code -= 3;
-	a += 3;  /* jump to after the Cfailure_jump */
-	code[0] = Cupdate_failure_jump;
-	code[1] = a & 0xff;
-	code[2] = a >> 8;
-	if (num_instructions > 1)
-		return 1;
-	assert(num_instructions == 1);
-	/* if the only instruction matches a single character, we can do
-	 * better */
-	p1 = code + 3 + a;   /* start of sole instruction */
-	if (*p1 == Cset || *p1 == Cexact || *p1 == Canychar ||
-	    *p1 == Csyntaxspec || *p1 == Cnotsyntaxspec)
-		code[0] = Crepeat1;
-	return 1;
-	
-  make_normal_jump:
-	code -= 3;
-	*code = Cjump;
-	return 1;
-}
-
-static int re_optimize(regexp_t bufp)
-{
-	unsigned char *code;
-	
-	code = bufp->buffer;
-	
-	while(1)
-	{
-		switch (*code++)
-		{
-		case Cend:
-		{
-			return 1;
-		}
-		case Canychar:
-		case Cbol:
-		case Ceol:
-		case Cbegbuf:
-		case Cendbuf:
-		case Cwordbeg:
-		case Cwordend:
-		case Cwordbound:
-		case Cnotwordbound:
-		{
-			break;
-		}
-		case Cset:
-		{
-			code += 256/8;
-			break;
-		}
-		case Cexact:
-		case Cstart_memory:
-		case Cend_memory:
-		case Cmatch_memory:
-		case Csyntaxspec:
-		case Cnotsyntaxspec:
-		{
-			code++;
-			break;
-		}
-		case Cstar_jump:
-		{
-			if (!re_optimize_star_jump(bufp, code))
-			{
-				return 0;
-			}
-			/* fall through */
-		}
-		case Cupdate_failure_jump:
-		case Cjump:
-		case Cdummy_failure_jump:
-		case Cfailure_jump:
-		case Crepeat1:
-		{
-			code += 2;
-			break;
-		}
-		default:
-		{
-			return 0;
-		}
-		}
-	}
-}
-
-#define NEXTCHAR(var) \
-{ \
-	if (pos >= size) \
-		goto ends_prematurely; \
-	(var) = regex[pos]; \
-	pos++; \
-}
-
-#define ALLOC(amount) \
-{ \
-	  if (pattern_offset+(amount) > alloc) \
-	  { \
-		  alloc += 256 + (amount); \
-		  pattern = realloc(pattern, alloc); \
-		  if (!pattern) \
-			  goto out_of_memory; \
-	  } \
-}
-
-#define STORE(ch) pattern[pattern_offset++] = (ch)
-
-#define CURRENT_LEVEL_START (starts[starts_base + current_level])
-
-#define SET_LEVEL_START starts[starts_base + current_level] = pattern_offset
-
-#define PUSH_LEVEL_STARTS \
-if (starts_base < (MAX_NESTING-1)*NUM_LEVELS) \
-	starts_base += NUM_LEVELS; \
-else \
-	goto too_complex \
-
-#define POP_LEVEL_STARTS starts_base -= NUM_LEVELS
-
-#define PUT_ADDR(offset,addr) \
-{ \
-	int disp = (addr) - (offset) - 2; \
-	pattern[(offset)] = disp & 0xff; \
-	pattern[(offset)+1] = (disp>>8) & 0xff; \
-}
-
-#define INSERT_JUMP(pos,type,addr) \
-{ \
-	int a, p = (pos), t = (type), ad = (addr); \
-	for (a = pattern_offset - 1; a >= p; a--) \
-		pattern[a + 3] = pattern[a]; \
-	pattern[p] = t; \
-	PUT_ADDR(p+1,ad); \
-	pattern_offset += 3; \
-}
-
-#define SETBIT(buf,offset,bit) (buf)[(offset)+(bit)/8] |= (1<<((bit) & 7))
-
-#define SET_FIELDS \
-{ \
-	bufp->allocated = alloc; \
-	bufp->buffer = pattern; \
-	bufp->used = pattern_offset; \
-}
-    
-#define GETHEX(var) \
-{ \
-	unsigned char gethex_ch, gethex_value; \
-	NEXTCHAR(gethex_ch); \
-	gethex_value = hex_char_to_decimal(gethex_ch); \
-	if (gethex_value == 16) \
-		goto hex_error; \
-	NEXTCHAR(gethex_ch); \
-	gethex_ch = hex_char_to_decimal(gethex_ch); \
-	if (gethex_ch == 16) \
-		goto hex_error; \
-	(var) = gethex_value * 16 + gethex_ch; \
-}
-
-#define ANSI_TRANSLATE(ch) \
-{ \
-	switch (ch) \
-	{ \
-	case 'a': \
-	case 'A': \
-	{ \
-		ch = 7; /* audible bell */ \
-		break; \
-	} \
-	case 'b': \
-	case 'B': \
-	{ \
-		ch = 8; /* backspace */ \
-		break; \
-	} \
-	case 'f': \
-	case 'F': \
-	{ \
-		ch = 12; /* form feed */ \
-		break; \
-	} \
-	case 'n': \
-	case 'N': \
-	{ \
-		ch = 10; /* line feed */ \
-		break; \
-	} \
-	case 'r': \
-	case 'R': \
-	{ \
-		ch = 13; /* carriage return */ \
-		break; \
-	} \
-	case 't': \
-	case 'T': \
-	{ \
-	      ch = 9; /* tab */ \
-	      break; \
-	} \
-	case 'v': \
-	case 'V': \
-	{ \
-		ch = 11; /* vertical tab */ \
-		break; \
-	} \
-	case 'x': /* hex code */ \
-	case 'X': \
-	{ \
-		GETHEX(ch); \
-		break; \
-	} \
-	default: \
-	{ \
-		/* other characters passed through */ \
-		if (translate) \
-			ch = translate[(unsigned char)ch]; \
-		break; \
-	} \
-	} \
-}
-
-char *re_compile_pattern(unsigned char *regex, int size, regexp_t bufp)
-{
-	int a;
-	int pos;
-	int op;
-	int current_level;
-	int level;
-	int opcode;
-	int pattern_offset = 0, alloc;
-	int starts[NUM_LEVELS * MAX_NESTING];
-	int starts_base;
-	int future_jumps[MAX_NESTING];
-	int num_jumps;
-	unsigned char ch = '\0';
-	unsigned char *pattern;
-	unsigned char *translate;
-	int next_register;
-	int paren_depth;
-	int num_open_registers;
-	int open_registers[RE_NREGS];
-	int beginning_context;
-	
-	if (!re_compile_initialized)
-		re_compile_initialize();
-	bufp->used = 0;
-	bufp->fastmap_accurate = 0;
-	bufp->uses_registers = 1;
-	bufp->num_registers = 1;
-	translate = bufp->translate;
-	pattern = bufp->buffer;
-	alloc = bufp->allocated;
-	if (alloc == 0 || pattern == NULL)
-	{
-		alloc = 256;
-		pattern = malloc(alloc);
-		if (!pattern)
-			goto out_of_memory;
-	}
-	pattern_offset = 0;
-	starts_base = 0;
-	num_jumps = 0;
-	current_level = 0;
-	SET_LEVEL_START;
-	num_open_registers = 0;
-	next_register = 1;
-	paren_depth = 0;
-	beginning_context = 1;
-	op = -1;
-	/* we use Rend dummy to ensure that pending jumps are updated
-	   (due to low priority of Rend) before exiting the loop. */
-	pos = 0;
-	while (op != Rend)
-	{
-		if (pos >= size)
-			op = Rend;
-		else
-		{
-			NEXTCHAR(ch);
-			if (translate)
-				ch = translate[(unsigned char)ch];
-			op = regexp_plain_ops[(unsigned char)ch];
-			if (op == Rquote)
-			{
-				NEXTCHAR(ch);
-				op = regexp_quoted_ops[(unsigned char)ch];
-				if (op == Rnormal && regexp_ansi_sequences)
-					ANSI_TRANSLATE(ch);
-			}
-		}
-		level = regexp_precedences[op];
-		/* printf("ch='%c' op=%d level=%d current_level=%d
-		   curlevstart=%d\n", ch, op, level, current_level,
-		   CURRENT_LEVEL_START); */
-		if (level > current_level)
-		{
-			for (current_level++; current_level < level; current_level++)
-				SET_LEVEL_START;
-			SET_LEVEL_START;
-		}
-		else
-			if (level < current_level)
-			{
-				current_level = level;
-				for (;num_jumps > 0 &&
-					     future_jumps[num_jumps-1] >= CURRENT_LEVEL_START;
-				     num_jumps--)
-					PUT_ADDR(future_jumps[num_jumps-1], pattern_offset);
-			}
-		switch (op)
-		{
-		case Rend:
-		{
-			break;
-		}
-		case Rnormal:
-		{
-		  normal_char:
-			opcode = Cexact;
-		  store_opcode_and_arg: /* opcode & ch must be set */
-			SET_LEVEL_START;
-			ALLOC(2);
-			STORE(opcode);
-			STORE(ch);
-			break;
-		}
-		case Ranychar:
-		{
-			opcode = Canychar;
-		  store_opcode:
-			SET_LEVEL_START;
-			ALLOC(1);
-			STORE(opcode);
-			break;
-		}
-		case Rquote:
-		{
-			Py_FatalError("Rquote");
-			/*NOTREACHED*/
-		}
-		case Rbol:
-		{
-			if (!beginning_context) {
-				if (regexp_context_indep_ops)
-					goto op_error;
-				else
-					goto normal_char;
-			}
-			opcode = Cbol;
-			goto store_opcode;
-		}
-		case Reol:
-		{
-			if (!((pos >= size) ||
-			      ((regexp_syntax & RE_NO_BK_VBAR) ?
-			       (regex[pos] == '\174') :
-			       (pos+1 < size && regex[pos] == '\134' &&
-				regex[pos+1] == '\174')) ||
-			      ((regexp_syntax & RE_NO_BK_PARENS)?
-			       (regex[pos] == ')'):
-			       (pos+1 < size && regex[pos] == '\134' &&
-				regex[pos+1] == ')')))) {
-				if (regexp_context_indep_ops)
-					goto op_error;
-				else
-					goto normal_char;
-			}
-			opcode = Ceol;
-			goto store_opcode;
-			/* NOTREACHED */
-			break;
-		}
-		case Roptional:
-		{
-			if (beginning_context) {
-				if (regexp_context_indep_ops)
-					goto op_error;
-				else
-					goto normal_char;
-			}
-			if (CURRENT_LEVEL_START == pattern_offset)
-				break; /* ignore empty patterns for ? */
-			ALLOC(3);
-			INSERT_JUMP(CURRENT_LEVEL_START, Cfailure_jump,
-				    pattern_offset + 3);
-			break;
-		}
-		case Rstar:
-		case Rplus:
-		{
-			if (beginning_context) {
-				if (regexp_context_indep_ops)
-					goto op_error;
-				else
-					goto normal_char;
-			}
-			if (CURRENT_LEVEL_START == pattern_offset)
-				break; /* ignore empty patterns for + and * */
-			ALLOC(9);
-			INSERT_JUMP(CURRENT_LEVEL_START, Cfailure_jump,
-				    pattern_offset + 6);
-			INSERT_JUMP(pattern_offset, Cstar_jump, CURRENT_LEVEL_START);
-			if (op == Rplus)  /* jump over initial failure_jump */
-				INSERT_JUMP(CURRENT_LEVEL_START, Cdummy_failure_jump,
-					    CURRENT_LEVEL_START + 6);
-			break;
-		}
-		case Ror:
-		{
-			ALLOC(6);
-			INSERT_JUMP(CURRENT_LEVEL_START, Cfailure_jump,
-				    pattern_offset + 6);
-			if (num_jumps >= MAX_NESTING)
-				goto too_complex;
-			STORE(Cjump);
-			future_jumps[num_jumps++] = pattern_offset;
-			STORE(0);
-			STORE(0);
-			SET_LEVEL_START;
-			break;
-		}
-		case Ropenpar:
-		{
-			SET_LEVEL_START;
-			if (next_register < RE_NREGS)
-			{
-				bufp->uses_registers = 1;
-				ALLOC(2);
-				STORE(Cstart_memory);
-				STORE(next_register);
-				open_registers[num_open_registers++] = next_register;
-				bufp->num_registers++;
-				next_register++;
-			}
-			paren_depth++;
-			PUSH_LEVEL_STARTS;
-			current_level = 0;
-			SET_LEVEL_START;
-			break;
-		}
-		case Rclosepar:
-		{
-			if (paren_depth <= 0)
-				goto parenthesis_error;
-			POP_LEVEL_STARTS;
-			current_level = regexp_precedences[Ropenpar];
-			paren_depth--;
-			if (paren_depth < num_open_registers)
-			{
-				bufp->uses_registers = 1;
-				ALLOC(2);
-				STORE(Cend_memory);
-				num_open_registers--;
-				STORE(open_registers[num_open_registers]);
-			}
-			break;
-		}
-		case Rmemory:
-		{
-			if (ch == '0')
-				goto bad_match_register;
-			assert(ch >= '0' && ch <= '9');
-			bufp->uses_registers = 1;
-			opcode = Cmatch_memory;
-			ch -= '0';
-			goto store_opcode_and_arg;
-		}
-		case Rextended_memory:
-		{
-			NEXTCHAR(ch);
-			if (ch < '0' || ch > '9')
-				goto bad_match_register;
-			NEXTCHAR(a);
-			if (a < '0' || a > '9')
-				goto bad_match_register;
-			ch = 10 * (a - '0') + ch - '0';
-			if (ch == 0 || ch >= RE_NREGS)
-				goto bad_match_register;
-			bufp->uses_registers = 1;
-			opcode = Cmatch_memory;
-			goto store_opcode_and_arg;
-		}
-		case Ropenset:
-		{
-			int complement;
-			int prev;
-			int offset;
-			int range;
-                        int firstchar;
-                        
-			SET_LEVEL_START;
-			ALLOC(1+256/8);
-			STORE(Cset);
-			offset = pattern_offset;
-			for (a = 0; a < 256/8; a++)
-				STORE(0);
-			NEXTCHAR(ch);
-			if (translate)
-				ch = translate[(unsigned char)ch];
-			if (ch == '\136')
-			{
-				complement = 1;
-				NEXTCHAR(ch);
-				if (translate)
-					ch = translate[(unsigned char)ch];
-			}
-			else
-				complement = 0;
-			prev = -1;
-			range = 0;
-			firstchar = 1;
-			while (ch != '\135' || firstchar)
-			{
-				firstchar = 0;
-				if (regexp_ansi_sequences && ch == '\134')
-				{
-					NEXTCHAR(ch);
-					ANSI_TRANSLATE(ch);
-				}
-				if (range)
-				{
-					for (a = prev; a <= (int)ch; a++)
-						SETBIT(pattern, offset, a);
-					prev = -1;
-					range = 0;
-				}
-				else
-					if (prev != -1 && ch == '-')
-						range = 1;
-					else
-					{
-						SETBIT(pattern, offset, ch);
-						prev = ch;
-					}
-				NEXTCHAR(ch);
-				if (translate)
-					ch = translate[(unsigned char)ch];
-			}
-			if (range)
-				SETBIT(pattern, offset, '-');
-			if (complement)
-			{
-				for (a = 0; a < 256/8; a++)
-					pattern[offset+a] ^= 0xff;
-			}
-			break;
-		}
-		case Rbegbuf:
-		{
-			opcode = Cbegbuf;
-			goto store_opcode;
-		}
-		case Rendbuf:
-		{
-			opcode = Cendbuf;
-			goto store_opcode;
-		}
-		case Rwordchar:
-		{
-			opcode = Csyntaxspec;
-			ch = Sword;
-			goto store_opcode_and_arg;
-		}
-		case Rnotwordchar:
-		{
-			opcode = Cnotsyntaxspec;
-			ch = Sword;
-			goto store_opcode_and_arg;
-		}
-		case Rwordbeg:
-		{
-			opcode = Cwordbeg;
-			goto store_opcode;
-		}
-		case Rwordend:
-		{
-			opcode = Cwordend;
-			goto store_opcode;
-		}
-		case Rwordbound:
-		{
-			opcode = Cwordbound;
-			goto store_opcode;
-		}
-		case Rnotwordbound:
-		{
-			opcode = Cnotwordbound;
-			goto store_opcode;
-		}
-		default:
-		{
-			abort();
-		}
-		}
-		beginning_context = (op == Ropenpar || op == Ror);
-	}
-	if (starts_base != 0)
-		goto parenthesis_error;
-	assert(num_jumps == 0);
-	ALLOC(1);
-	STORE(Cend);
-	SET_FIELDS;
-	if(!re_optimize(bufp))
-		return "Optimization error";
-	return NULL;
-
-  op_error:
-	SET_FIELDS;
-	return "Badly placed special character";
-
-  bad_match_register:
-	SET_FIELDS;
-	return "Bad match register number";
-   
-  hex_error:
-	SET_FIELDS;
-	return "Bad hexadecimal number";
-   
-  parenthesis_error:
-	SET_FIELDS;
-	return "Badly placed parenthesis";
-   
-  out_of_memory:
-	SET_FIELDS;
-	return "Out of memory";
-   
-  ends_prematurely:
-	SET_FIELDS;
-	return "Regular expression ends prematurely";
-
-  too_complex:
-	SET_FIELDS;
-	return "Regular expression too complex";
-}
-
-#undef CHARAT
-#undef NEXTCHAR
-#undef GETHEX
-#undef ALLOC
-#undef STORE
-#undef CURRENT_LEVEL_START
-#undef SET_LEVEL_START
-#undef PUSH_LEVEL_STARTS
-#undef POP_LEVEL_STARTS
-#undef PUT_ADDR
-#undef INSERT_JUMP
-#undef SETBIT
-#undef SET_FIELDS
-
-#define PREFETCH if (text == textend) goto fail
-
-#define NEXTCHAR(var) \
-PREFETCH; \
-var = (unsigned char)*text++; \
-if (translate) \
-	var = translate[var]
-
-int re_match(regexp_t bufp, unsigned char *string, int size, int pos,
-             regexp_registers_t old_regs)
-{
-	unsigned char *code;
-	unsigned char *translate;
-	unsigned char *text;
-	unsigned char *textstart;
-	unsigned char *textend;
-	int a;
-	int b;
-	int ch;
-	int reg;
-	int match_end;
-	unsigned char *regstart;
-	unsigned char *regend;
-	int regsize;
-	match_state state;
-  
-	assert(pos >= 0 && size >= 0);
-	assert(pos <= size);
-  
-	text = string + pos;
-	textstart = string;
-	textend = string + size;
-  
-	code = bufp->buffer;
-  
-	translate = bufp->translate;
-  
-	NEW_STATE(state, bufp->num_registers);
-
-  continue_matching:
-	switch (*code++)
-	{
-	case Cend:
-	{
-		match_end = text - textstart;
-		if (old_regs)
-		{
-			old_regs->start[0] = pos;
-			old_regs->end[0] = match_end;
-			if (!bufp->uses_registers)
-			{
-				for (a = 1; a < RE_NREGS; a++)
-				{
-					old_regs->start[a] = -1;
-					old_regs->end[a] = -1;
-				}
-			}
-			else
-			{
-				for (a = 1; a < bufp->num_registers; a++)
-				{
-					if ((GET_REG_START(state, a) == NULL) ||
-					    (GET_REG_END(state, a) == NULL))
-					{
-						old_regs->start[a] = -1;
-						old_regs->end[a] = -1;
-						continue;
-					}
-					old_regs->start[a] = GET_REG_START(state, a) - textstart;
-					old_regs->end[a] = GET_REG_END(state, a) - textstart;
-				}
-				for (; a < RE_NREGS; a++)
-				{
-					old_regs->start[a] = -1;
-					old_regs->end[a] = -1;
-				}
-			}
-		}
-		FREE_STATE(state);
-		return match_end - pos;
-	}
-	case Cbol:
-	{
-		if (text == textstart || text[-1] == '\n')
-			goto continue_matching;
-		goto fail;
-	}
-	case Ceol:
-	{
-		if (text == textend || *text == '\n')
-			goto continue_matching;
-		goto fail;
-	}
-	case Cset:
-	{
-		NEXTCHAR(ch);
-		if (code[ch/8] & (1<<(ch & 7)))
-		{
-			code += 256/8;
-			goto continue_matching;
-		}
-		goto fail;
-	}
-	case Cexact:
-	{
-		NEXTCHAR(ch);
-		if (ch != (unsigned char)*code++)
-			goto fail;
-		goto continue_matching;
-	}
-	case Canychar:
-	{
-		NEXTCHAR(ch);
-		if (ch == '\n')
-			goto fail;
-		goto continue_matching;
-	}
-	case Cstart_memory:
-	{
-		reg = *code++;
-		SET_REG_START(state, reg, text, goto error);
-		goto continue_matching;
-	}
-	case Cend_memory:
-	{
-		reg = *code++;
-		SET_REG_END(state, reg, text, goto error);
-		goto continue_matching;
-	}
-	case Cmatch_memory:
-	{
-		reg = *code++;
-		regstart = GET_REG_START(state, reg);
-		regend = GET_REG_END(state, reg);
-		if ((regstart == NULL) || (regend == NULL))
-			goto fail;  /* or should we just match nothing? */
-		regsize = regend - regstart;
-
-		if (regsize > (textend - text))
-			goto fail;
-		if(translate)
-		{
-			for (; regstart < regend; regstart++, text++)
-				if (translate[*regstart] != translate[*text])
-					goto fail;
-		}
-		else
-			for (; regstart < regend; regstart++, text++)
-				if (*regstart != *text)
-					goto fail;
-		goto continue_matching;
-	}
-	case Cupdate_failure_jump:
-	{
-		UPDATE_FAILURE(state, text, goto error);
-		/* fall to next case */
-	}
-	/* treat Cstar_jump just like Cjump if it hasn't been optimized */
-	case Cstar_jump:
-	case Cjump:
-	{
-		a = (unsigned char)*code++;
-		a |= (unsigned char)*code++ << 8;
-		code += (int)SHORT(a);
-		if (code<bufp->buffer || bufp->buffer+bufp->used<code) {
-		        PyErr_SetString(PyExc_SystemError, "Regex VM jump out of bounds (Cjump)");
-			FREE_STATE(state);
-            	        return -2;
-         	}
-		goto continue_matching;
-	}
-	case Cdummy_failure_jump:
-	{
-                unsigned char *failuredest;
-	  
-		a = (unsigned char)*code++;
-		a |= (unsigned char)*code++ << 8;
-		a = (int)SHORT(a);
-		assert(*code == Cfailure_jump);
-		b = (unsigned char)code[1];
-		b |= (unsigned char)code[2] << 8;
-                failuredest = code + (int)SHORT(b) + 3;
-		if (failuredest<bufp->buffer || bufp->buffer+bufp->used < failuredest) {
-		        PyErr_SetString(PyExc_SystemError, "Regex VM jump out of bounds (Cdummy_failure_jump failuredest)");
-			FREE_STATE(state);
-            	        return -2;
-		}
-		PUSH_FAILURE(state, failuredest, NULL, goto error);
-		code += a;
-		if (code<bufp->buffer || bufp->buffer+bufp->used < code) {
-		        PyErr_SetString(PyExc_SystemError, "Regex VM jump out of bounds (Cdummy_failure_jump code)");
-			FREE_STATE(state);
-            	        return -2;
-         	}
-		goto continue_matching;
-	}
-	case Cfailure_jump:
-	{
-		a = (unsigned char)*code++;
-		a |= (unsigned char)*code++ << 8;
-		a = (int)SHORT(a);
-		if (code+a<bufp->buffer || bufp->buffer+bufp->used < code+a) {
-		        PyErr_SetString(PyExc_SystemError, "Regex VM jump out of bounds (Cfailure_jump)");
-			FREE_STATE(state);
-            	        return -2;
-         	}
-		PUSH_FAILURE(state, code + a, text, goto error);
-		goto continue_matching;
-	}
-	case Crepeat1:
-	{
-		unsigned char *pinst;
-		a = (unsigned char)*code++;
-		a |= (unsigned char)*code++ << 8;
-		a = (int)SHORT(a);
-		pinst = code + a;
-		if (pinst<bufp->buffer || bufp->buffer+bufp->used<pinst) {
-		        PyErr_SetString(PyExc_SystemError, "Regex VM jump out of bounds (Crepeat1)");
-			FREE_STATE(state);
-            	        return -2;
-         	}
-		/* pinst is sole instruction in loop, and it matches a
-		 * single character.  Since Crepeat1 was originally a
-		 * Cupdate_failure_jump, we also know that backtracking
-		 * is useless: so long as the single-character
-		 * expression matches, it must be used.  Also, in the
-		 * case of +, we've already matched one character, so +
-		 * can't fail: nothing here can cause a failure.  */
-		switch (*pinst++)
-		{
-		case Cset:
-		  {
-		        if (translate)
-			{
-				while (text < textend)
-				{
-					ch = translate[(unsigned char)*text];
-					if (pinst[ch/8] & (1<<(ch & 7)))
-						text++;
-					else
-						break;
-				}
-			}
-			else
-			{
-				while (text < textend)
-				{
-					ch = (unsigned char)*text;
-					if (pinst[ch/8] & (1<<(ch & 7)))
-						text++;
-					else
-						break;
-				}
-			}
-			break;
-                }
-		case Cexact:
-		{
-			ch = (unsigned char)*pinst;
-			if (translate)
-			{
-				while (text < textend &&
-				       translate[(unsigned char)*text] == ch)
-					text++;
-			}
-			else
-			{
-				while (text < textend && (unsigned char)*text == ch)
-					text++;
-			}
-			break;
-		}
-		case Canychar:
-		{
-			while (text < textend && (unsigned char)*text != '\n')
-				text++;
-			break;
-		}
-		case Csyntaxspec:
-		{
-			a = (unsigned char)*pinst;
-			if (translate)
-			{
-				while (text < textend &&
-				       (SYNTAX(translate[*text]) & a) )
-					text++;
-			}
-			else
-			{
-				while (text < textend && (SYNTAX(*text) & a) )
-					text++;
-			}
-			break;
-		}
-		case Cnotsyntaxspec:
-		{
-			a = (unsigned char)*pinst;
-			if (translate)
-			{
-				while (text < textend &&
-				       !(SYNTAX(translate[*text]) & a) )
-					text++;
-			}
-			else
-			{
-				while (text < textend && !(SYNTAX(*text) & a) )
-					text++;
-			}
-			break;
-		}
-		default:
-		{
-		        FREE_STATE(state);
-		        PyErr_SetString(PyExc_SystemError, "Unknown regex opcode: memory corrupted?");
-		        return -2;
-			/*NOTREACHED*/
-		}
-		}
-		/* due to the funky way + and * are compiled, the top
-		 * failure- stack entry at this point is actually a
-		 * success entry -- update it & pop it */
-		UPDATE_FAILURE(state, text, goto error);
-		goto fail;      /* i.e., succeed <wink/sigh> */
-	}
-	case Cbegbuf:
-	{
-		if (text == textstart)
-			goto continue_matching;
-		goto fail;
-	}
-	case Cendbuf:
-	{
-		if (text == textend)
-			goto continue_matching;
-		goto fail;
-	}
-	case Cwordbeg:
-	{
-		if (text == textend)
-			goto fail;
-		if (!(SYNTAX(*text) & Sword)) 
-			goto fail;
-		if (text == textstart)
-			goto continue_matching;
-		if (!(SYNTAX(text[-1]) & Sword))
-			goto continue_matching;
-		goto fail;
-	}
-	case Cwordend:
-	{
-		if (text == textstart)
-			goto fail;
-		if (!(SYNTAX(text[-1]) & Sword))
-			goto fail;
-		if (text == textend)
-			goto continue_matching;
-		if (!(SYNTAX(*text) & Sword))
-		        goto continue_matching;
-                goto fail;
-	}
-	case Cwordbound:
-	{
-		/* Note: as in gnu regexp, this also matches at the
-		 * beginning and end of buffer.  */
-
-		if (text == textstart || text == textend)
-			goto continue_matching;
-		if ((SYNTAX(text[-1]) & Sword) ^ (SYNTAX(*text) & Sword))
-			goto continue_matching;
-		goto fail;
-	}
-	case Cnotwordbound:
-	{
-		/* Note: as in gnu regexp, this never matches at the
-		 * beginning and end of buffer.  */
-		if (text == textstart || text == textend)
-			goto fail;
-		if (!((SYNTAX(text[-1]) & Sword) ^ (SYNTAX(*text) & Sword)))
-      		        goto continue_matching;
-		goto fail;
-	}
-	case Csyntaxspec:
-	{
-		NEXTCHAR(ch);
-		if (!(SYNTAX(ch) & (unsigned char)*code++))
-			goto fail;
-		goto continue_matching;
-	}
-	case Cnotsyntaxspec:
-	{
-		NEXTCHAR(ch);
-		if (SYNTAX(ch) & (unsigned char)*code++)
-			goto fail;
-		goto continue_matching;
-	}
-	default:
-	{
-	        FREE_STATE(state);
-	        PyErr_SetString(PyExc_SystemError, "Unknown regex opcode: memory corrupted?");
-		return -2;
-		/*NOTREACHED*/
-	}
-	}
-	
-	
-
-#if 0 /* This line is never reached --Guido */
-	abort();
-#endif
-	/*
-	 *NOTREACHED
-	 */
-
-	/* Using "break;" in the above switch statement is equivalent to "goto fail;" */
-  fail:
-	POP_FAILURE(state, code, text, goto done_matching, goto error);
-	goto continue_matching;
-  
-  done_matching:
-/*   if(translated != NULL) */
-/*      free(translated); */
-	FREE_STATE(state);
-	return -1;
-
-  error:
-/*   if (translated != NULL) */
-/*      free(translated); */
-	FREE_STATE(state);
-	return -2;
-}
-	
-
-#undef PREFETCH
-#undef NEXTCHAR
-
-int re_search(regexp_t bufp, unsigned char *string, int size, int pos,
-              int range, regexp_registers_t regs)
-{
-	unsigned char *fastmap;
-	unsigned char *translate;
-	unsigned char *text;
-	unsigned char *partstart;
-	unsigned char *partend;
-	int dir;
-	int ret;
-	unsigned char anchor;
-  
-	assert(size >= 0 && pos >= 0);
-	assert(pos + range >= 0 && pos + range <= size); /* Bugfix by ylo */
-  
-	fastmap = bufp->fastmap;
-	translate = bufp->translate;
-	if (fastmap && !bufp->fastmap_accurate) {
-                re_compile_fastmap(bufp);
-	        if (PyErr_Occurred()) return -2;
-	}
-	
-	anchor = bufp->anchor;
-	if (bufp->can_be_null == 1) /* can_be_null == 2: can match null at eob */
-		fastmap = NULL;
-
-	if (range < 0)
-	{
-		dir = -1;
-		range = -range;
-	}
-	else
-		dir = 1;
-
-	if (anchor == 2) {
-		if (pos != 0)
-			return -1;
-		else
-			range = 0;
-	}
-
-	for (; range >= 0; range--, pos += dir)
-	{
-		if (fastmap)
-		{
-			if (dir == 1)
-			{ /* searching forwards */
-
-				text = string + pos;
-				partend = string + size;
-				partstart = text;
-				if (translate)
-					while (text != partend &&
-					       !fastmap[(unsigned char) translate[(unsigned char)*text]])
-						text++;
-				else
-					while (text != partend && !fastmap[(unsigned char)*text])
-						text++;
-				pos += text - partstart;
-				range -= text - partstart;
-				if (pos == size && bufp->can_be_null == 0)
-					return -1;
-			}
-			else
-			{ /* searching backwards */
-				text = string + pos;
-				partstart = string + pos - range;
-				partend = text;
-				if (translate)
-					while (text != partstart &&
-					       !fastmap[(unsigned char)
-						       translate[(unsigned char)*text]])
-						text--;
-				else
-					while (text != partstart &&
-					       !fastmap[(unsigned char)*text])
-						text--;
-				pos -= partend - text;
-				range -= partend - text;
-			}
-		}
-		if (anchor == 1)
-		{ /* anchored to begline */
-			if (pos > 0 && (string[pos - 1] != '\n'))
-				continue;
-		}
-		assert(pos >= 0 && pos <= size);
-		ret = re_match(bufp, string, size, pos, regs);
-		if (ret >= 0)
-			return pos;
-		if (ret == -2)
-			return -2;
-	}
-	return -1;
-}
-
-/*
-** Local Variables:
-** mode: c
-** c-file-style: "python"
-** End:
-*/
diff --git a/Modules/regexpr.h b/Modules/regexpr.h
deleted file mode 100644
index 2aee62d..0000000
--- a/Modules/regexpr.h
+++ /dev/null
@@ -1,155 +0,0 @@
-/*
- * -*- mode: c-mode; c-file-style: python -*-
- */
-
-#ifndef Py_REGEXPR_H
-#define Py_REGEXPR_H
-#ifdef __cplusplus
-extern "C" {
-#endif
-
-/*
- * regexpr.h
- *
- * Author: Tatu Ylonen <ylo@ngs.fi>
- *
- * Copyright (c) 1991 Tatu Ylonen, Espoo, Finland
- *
- * Permission to use, copy, modify, distribute, and sell this software
- * and its documentation for any purpose is hereby granted without fee,
- * provided that the above copyright notice appear in all copies.  This
- * software is provided "as is" without express or implied warranty.
- *
- * Created: Thu Sep 26 17:15:36 1991 ylo
- * Last modified: Mon Nov  4 15:49:46 1991 ylo
- */
-
-/* $Id$ */
-
-#ifndef REGEXPR_H
-#define REGEXPR_H
-
-#define RE_NREGS	100  /* number of registers available */
-
-typedef struct re_pattern_buffer
-{
-	unsigned char *buffer;          /* compiled pattern */
-	int allocated;         /* allocated size of compiled pattern */
-	int used;              /* actual length of compiled pattern */
-	unsigned char *fastmap;         /* fastmap[ch] is true if ch can start pattern */
-	unsigned char *translate;       /* translation to apply during compilation/matching */
-	unsigned char fastmap_accurate; /* true if fastmap is valid */
-	unsigned char can_be_null;      /* true if can match empty string */
-	unsigned char uses_registers;   /* registers are used and need to be initialized */
-	int num_registers;     /* number of registers used */
-	unsigned char anchor;           /* anchor: 0=none 1=begline 2=begbuf */
-} *regexp_t;
-
-typedef struct re_registers
-{
-	int start[RE_NREGS];  /* start offset of region */
-	int end[RE_NREGS];    /* end offset of region */
-} *regexp_registers_t;
-
-/* bit definitions for syntax */
-#define RE_NO_BK_PARENS		1    /* no quoting for parentheses */
-#define RE_NO_BK_VBAR		2    /* no quoting for vertical bar */
-#define RE_BK_PLUS_QM		4    /* quoting needed for + and ? */
-#define RE_TIGHT_VBAR		8    /* | binds tighter than ^ and $ */
-#define RE_NEWLINE_OR		16   /* treat newline as or */
-#define RE_CONTEXT_INDEP_OPS	32   /* ^$?*+ are special in all contexts */
-#define RE_ANSI_HEX		64   /* ansi sequences (\n etc) and \xhh */
-#define RE_NO_GNU_EXTENSIONS   128   /* no gnu extensions */
-
-/* definitions for some common regexp styles */
-#define RE_SYNTAX_AWK	(RE_NO_BK_PARENS|RE_NO_BK_VBAR|RE_CONTEXT_INDEP_OPS)
-#define RE_SYNTAX_EGREP	(RE_SYNTAX_AWK|RE_NEWLINE_OR)
-#define RE_SYNTAX_GREP	(RE_BK_PLUS_QM|RE_NEWLINE_OR)
-#define RE_SYNTAX_EMACS	0
-
-#define Sword       1
-#define Swhitespace 2
-#define Sdigit      4
-#define Soctaldigit 8
-#define Shexdigit   16
-
-/* Rename all exported symbols to avoid conflicts with similarly named
-   symbols in some systems' standard C libraries... */
-
-#define re_syntax _Py_re_syntax
-#define re_syntax_table _Py_re_syntax_table
-#define re_compile_initialize _Py_re_compile_initialize
-#define re_set_syntax _Py_re_set_syntax
-#define re_compile_pattern _Py_re_compile_pattern
-#define re_match _Py_re_match
-#define re_search _Py_re_search
-#define re_compile_fastmap _Py_re_compile_fastmap
-#define re_comp _Py_re_comp
-#define re_exec _Py_re_exec
-
-#ifdef HAVE_PROTOTYPES
-
-extern int re_syntax;
-/* This is the actual syntax mask.  It was added so that Python could do
- * syntax-dependent munging of patterns before compilation. */
-
-extern unsigned char re_syntax_table[256];
-
-void re_compile_initialize(void);
-
-int re_set_syntax(int syntax);
-/* This sets the syntax to use and returns the previous syntax.  The
- * syntax is specified by a bit mask of the above defined bits. */
-
-char *re_compile_pattern(unsigned char *regex, int regex_size, regexp_t compiled);
-/* This compiles the regexp (given in regex and length in regex_size).
- * This returns NULL if the regexp compiled successfully, and an error
- * message if an error was encountered.  The buffer field must be
- * initialized to a memory area allocated by malloc (or to NULL) before
- * use, and the allocated field must be set to its length (or 0 if
- * buffer is NULL).  Also, the translate field must be set to point to a
- * valid translation table, or NULL if it is not used. */
-
-int re_match(regexp_t compiled, unsigned char *string, int size, int pos,
-	     regexp_registers_t old_regs);
-/* This tries to match the regexp against the string.  This returns the
- * length of the matched portion, or -1 if the pattern could not be
- * matched and -2 if an error (such as failure stack overflow) is
- * encountered. */
-
-int re_search(regexp_t compiled, unsigned char *string, int size, int startpos,
-	      int range, regexp_registers_t regs);
-/* This searches for a substring matching the regexp.  This returns the
- * first index at which a match is found.  range specifies at how many
- * positions to try matching; positive values indicate searching
- * forwards, and negative values indicate searching backwards.  mstop
- * specifies the offset beyond which a match must not go.  This returns
- * -1 if no match is found, and -2 if an error (such as failure stack
- * overflow) is encountered. */
-
-void re_compile_fastmap(regexp_t compiled);
-/* This computes the fastmap for the regexp.  For this to have any effect,
- * the calling program must have initialized the fastmap field to point
- * to an array of 256 characters. */
-
-#else /* HAVE_PROTOTYPES */
-
-extern int re_syntax;
-extern unsigned char re_syntax_table[256];
-void re_compile_initialize();
-int re_set_syntax();
-char *re_compile_pattern();
-int re_match();
-int re_search();
-void re_compile_fastmap();
-
-#endif /* HAVE_PROTOTYPES */
-
-#endif /* REGEXPR_H */
-
-
-
-#ifdef __cplusplus
-}
-#endif
-#endif /* !Py_REGEXPR_H */
diff --git a/Modules/resource.c b/Modules/resource.c
index 7cbd2c9..e73c878 100644
--- a/Modules/resource.c
+++ b/Modules/resource.c
@@ -55,6 +55,7 @@
 	16	/* n_in_sequence */
 };
 
+static int initialized;
 static PyTypeObject StructRUsageType;
 
 static PyObject *
@@ -244,7 +245,10 @@
 	}
 	Py_INCREF(ResourceError);
 	PyModule_AddObject(m, "error", ResourceError);
- 	PyStructSequence_InitType(&StructRUsageType, &struct_rusage_desc);
+	if (!initialized)
+		PyStructSequence_InitType(&StructRUsageType, 
+					  &struct_rusage_desc);
+	Py_INCREF(&StructRUsageType);
  	PyModule_AddObject(m, "struct_rusage", 
 			   (PyObject*) &StructRUsageType);
 
@@ -320,4 +324,5 @@
 	if (v) {
 		PyModule_AddObject(m, "RLIM_INFINITY", v);
 	}
+	initialized = 1;
 }
diff --git a/Modules/socketmodule.c b/Modules/socketmodule.c
index c526d75..c9dd4a3 100644
--- a/Modules/socketmodule.c
+++ b/Modules/socketmodule.c
@@ -62,6 +62,7 @@
 */
 
 #include "Python.h"
+#include "structmember.h"
 
 #undef MAX
 #define MAX(x, y) ((x) < (y) ? (y) : (x))
@@ -967,7 +968,18 @@
 	case AF_UNIX:
 	{
 		struct sockaddr_un *a = (struct sockaddr_un *) addr;
-		return PyString_FromString(a->sun_path);
+#ifdef linux
+		if (a->sun_path[0] == 0) {  /* Linux abstract namespace */
+			addrlen -= (sizeof(*a) - sizeof(a->sun_path));
+			return PyString_FromStringAndSize(a->sun_path,
+							  addrlen);
+		}
+		else
+#endif /* linux */
+		{
+			/* regular NULL-terminated string */
+			return PyString_FromString(a->sun_path);
+		}
 	}
 #endif /* AF_UNIX */
 
@@ -1097,14 +1109,28 @@
 		addr = (struct sockaddr_un*)&(s->sock_addr).un;
 		if (!PyArg_Parse(args, "t#", &path, &len))
 			return 0;
-		if (len > sizeof addr->sun_path) {
-			PyErr_SetString(socket_error,
-					"AF_UNIX path too long");
-			return 0;
+#ifdef linux
+		if (len > 0 && path[0] == 0) {
+			/* Linux abstract namespace extension */
+			if (len > sizeof addr->sun_path) {
+				PyErr_SetString(socket_error,
+						"AF_UNIX path too long");
+				return 0;
+			}
+		}
+		else
+#endif /* linux */
+                {
+			/* regular NULL-terminated string */
+			if (len >= sizeof addr->sun_path) {
+				PyErr_SetString(socket_error,
+						"AF_UNIX path too long");
+				return 0;
+			}
+			addr->sun_path[len] = 0;
 		}
 		addr->sun_family = s->sock_family;
 		memcpy(addr->sun_path, path, len);
-		addr->sun_path[len] = 0;
 		*addr_ret = (struct sockaddr *) addr;
 #if defined(PYOS_OS2)
 		*len_ret = sizeof(*addr);
@@ -2207,18 +2233,20 @@
 	Py_BEGIN_ALLOW_THREADS
 	memset(&addrbuf, 0, addrlen);
 	timeout = internal_select(s, 0);
-	if (!timeout)
-		n = recvfrom(s->sock_fd, PyString_AS_STRING(buf), len, flags,
+	if (!timeout) {
 #ifndef MS_WINDOWS
 #if defined(PYOS_OS2) && !defined(PYCC_GCC)
-			     (struct sockaddr *) &addrbuf, &addrlen
+		n = recvfrom(s->sock_fd, PyString_AS_STRING(buf), len, flags,
+			     (struct sockaddr *) &addrbuf, &addrlen);
 #else
-			     (void *) &addrbuf, &addrlen
+		n = recvfrom(s->sock_fd, PyString_AS_STRING(buf), len, flags,
+			     (void *) &addrbuf, &addrlen);
 #endif
 #else
-			     (struct sockaddr *) &addrbuf, &addrlen
+		n = recvfrom(s->sock_fd, PyString_AS_STRING(buf), len, flags,
+			     (struct sockaddr *) &addrbuf, &addrlen);
 #endif
-			);
+	}
 	Py_END_ALLOW_THREADS
 
 	if (timeout) {
@@ -2502,6 +2530,14 @@
 	{NULL,			NULL}		/* sentinel */
 };
 
+/* SockObject members */
+static PyMemberDef sock_memberlist[] = {
+       {"family", T_INT, offsetof(PySocketSockObject, sock_family), READONLY, "the socket family"},
+       {"type", T_INT, offsetof(PySocketSockObject, sock_type), READONLY, "the socket type"},
+       {"proto", T_INT, offsetof(PySocketSockObject, sock_proto), READONLY, "the socket protocol"},
+       {"timeout", T_DOUBLE, offsetof(PySocketSockObject, sock_timeout), READONLY, "the socket timeout"},
+       {0},
+};
 
 /* Deallocate a socket object in response to the last Py_DECREF().
    First close the file description. */
@@ -2625,7 +2661,7 @@
 	0,					/* tp_iter */
 	0,					/* tp_iternext */
 	sock_methods,				/* tp_methods */
-	0,					/* tp_members */
+	sock_memberlist,			/* tp_members */
 	0,					/* tp_getset */
 	0,					/* tp_base */
 	0,					/* tp_dict */
@@ -3159,7 +3195,8 @@
 PyDoc_STRVAR(fromfd_doc,
 "fromfd(fd, family, type[, proto]) -> socket object\n\
 \n\
-Create a socket object from the given file descriptor.\n\
+Create a socket object from a duplicate of the given\n\
+file descriptor.\n\
 The remaining arguments are the same as for socket().");
 
 #endif /* NO_DUP */
@@ -4026,7 +4063,12 @@
 	/*  */
 	PyModule_AddIntConstant(m, "AF_NETLINK", AF_NETLINK);
 	PyModule_AddIntConstant(m, "NETLINK_ROUTE", NETLINK_ROUTE);
+#ifdef NETLINK_SKIP
 	PyModule_AddIntConstant(m, "NETLINK_SKIP", NETLINK_SKIP);
+#endif
+#ifdef NETLINK_W1
+	PyModule_AddIntConstant(m, "NETLINK_W1", NETLINK_W1);
+#endif
 	PyModule_AddIntConstant(m, "NETLINK_USERSOCK", NETLINK_USERSOCK);
 	PyModule_AddIntConstant(m, "NETLINK_FIREWALL", NETLINK_FIREWALL);
 #ifdef NETLINK_TCPDIAG
@@ -4038,12 +4080,18 @@
 #ifdef NETLINK_XFRM
 	PyModule_AddIntConstant(m, "NETLINK_XFRM", NETLINK_XFRM);
 #endif
+#ifdef NETLINK_ARPD
 	PyModule_AddIntConstant(m, "NETLINK_ARPD", NETLINK_ARPD);
+#endif
+#ifdef NETLINK_ROUTE6
 	PyModule_AddIntConstant(m, "NETLINK_ROUTE6", NETLINK_ROUTE6);
+#endif
 	PyModule_AddIntConstant(m, "NETLINK_IP6_FW", NETLINK_IP6_FW);
 	PyModule_AddIntConstant(m, "NETLINK_DNRTMSG", NETLINK_DNRTMSG);
+#ifdef NETLINK_TAPBASE
 	PyModule_AddIntConstant(m, "NETLINK_TAPBASE", NETLINK_TAPBASE);
 #endif
+#endif /* AF_NETLINK */
 #ifdef AF_ROUTE
 	/* Alias to emulate 4.4BSD */
 	PyModule_AddIntConstant(m, "AF_ROUTE", AF_ROUTE);
diff --git a/Modules/spwdmodule.c b/Modules/spwdmodule.c
index 7c618e7..b7bf20e 100644
--- a/Modules/spwdmodule.c
+++ b/Modules/spwdmodule.c
@@ -52,6 +52,7 @@
 	9,
 };
 
+static int initialized;
 static PyTypeObject StructSpwdType;
 
 
@@ -173,7 +174,10 @@
 	m=Py_InitModule3("spwd", spwd_methods, spwd__doc__);
 	if (m == NULL)
 		return;
-	PyStructSequence_InitType(&StructSpwdType, &struct_spwd_type_desc);
+	if (!initialized)
+		PyStructSequence_InitType(&StructSpwdType, 
+					  &struct_spwd_type_desc);
 	Py_INCREF((PyObject *) &StructSpwdType);
 	PyModule_AddObject(m, "struct_spwd", (PyObject *) &StructSpwdType);
+	initialized = 1;
 }
diff --git a/Modules/stropmodule.c b/Modules/stropmodule.c
index cffef3a..8b00fed 100644
--- a/Modules/stropmodule.c
+++ b/Modules/stropmodule.c
@@ -333,7 +333,7 @@
 {
 	char *s, *sub;
 	Py_ssize_t len, n, j;
-	Py_ssize_t i = 0, last = INT_MAX;
+	Py_ssize_t i = 0, last = PY_SSIZE_T_MAX;
 
 	WARN;
 	if (!PyArg_ParseTuple(args, "t#t#|nn:rfind", &s, &len, &sub, &n, &i, &last))
@@ -446,16 +446,16 @@
 {
 	char *s, *s_new;
 	Py_ssize_t i, n;
-	PyObject *new;
+	PyObject *newstr;
 	int changed;
 
 	WARN;
 	if (PyString_AsStringAndSize(args, &s, &n))
 		return NULL;
-	new = PyString_FromStringAndSize(NULL, n);
-	if (new == NULL)
+	newstr = PyString_FromStringAndSize(NULL, n);
+	if (newstr == NULL)
 		return NULL;
-	s_new = PyString_AsString(new);
+	s_new = PyString_AsString(newstr);
 	changed = 0;
 	for (i = 0; i < n; i++) {
 		int c = Py_CHARMASK(*s++);
@@ -467,11 +467,11 @@
 		s_new++;
 	}
 	if (!changed) {
-		Py_DECREF(new);
+		Py_DECREF(newstr);
 		Py_INCREF(args);
 		return args;
 	}
-	return new;
+	return newstr;
 }
 
 
@@ -485,16 +485,16 @@
 {
 	char *s, *s_new;
 	Py_ssize_t i, n;
-	PyObject *new;
+	PyObject *newstr;
 	int changed;
 
 	WARN;
 	if (PyString_AsStringAndSize(args, &s, &n))
 		return NULL;
-	new = PyString_FromStringAndSize(NULL, n);
-	if (new == NULL)
+	newstr = PyString_FromStringAndSize(NULL, n);
+	if (newstr == NULL)
 		return NULL;
-	s_new = PyString_AsString(new);
+	s_new = PyString_AsString(newstr);
 	changed = 0;
 	for (i = 0; i < n; i++) {
 		int c = Py_CHARMASK(*s++);
@@ -506,11 +506,11 @@
 		s_new++;
 	}
 	if (!changed) {
-		Py_DECREF(new);
+		Py_DECREF(newstr);
 		Py_INCREF(args);
 		return args;
 	}
-	return new;
+	return newstr;
 }
 
 
@@ -525,16 +525,16 @@
 {
 	char *s, *s_new;
 	Py_ssize_t i, n;
-	PyObject *new;
+	PyObject *newstr;
 	int changed;
 
 	WARN;
 	if (PyString_AsStringAndSize(args, &s, &n))
 		return NULL;
-	new = PyString_FromStringAndSize(NULL, n);
-	if (new == NULL)
+	newstr = PyString_FromStringAndSize(NULL, n);
+	if (newstr == NULL)
 		return NULL;
-	s_new = PyString_AsString(new);
+	s_new = PyString_AsString(newstr);
 	changed = 0;
 	if (0 < n) {
 		int c = Py_CHARMASK(*s++);
@@ -555,11 +555,11 @@
 		s_new++;
 	}
 	if (!changed) {
-		Py_DECREF(new);
+		Py_DECREF(newstr);
 		Py_INCREF(args);
 		return args;
 	}
-	return new;
+	return newstr;
 }
 
 
@@ -647,7 +647,7 @@
 {
 	char *s, *sub;
 	Py_ssize_t len, n;
-	Py_ssize_t i = 0, last = INT_MAX;
+	Py_ssize_t i = 0, last = PY_SSIZE_T_MAX;
 	Py_ssize_t m, r;
 
 	WARN;
@@ -691,16 +691,16 @@
 {
 	char *s, *s_new;
 	Py_ssize_t i, n;
-	PyObject *new;
+	PyObject *newstr;
 	int changed;
 
 	WARN;
 	if (PyString_AsStringAndSize(args, &s, &n))
 		return NULL;
-	new = PyString_FromStringAndSize(NULL, n);
-	if (new == NULL)
+	newstr = PyString_FromStringAndSize(NULL, n);
+	if (newstr == NULL)
 		return NULL;
-	s_new = PyString_AsString(new);
+	s_new = PyString_AsString(newstr);
 	changed = 0;
 	for (i = 0; i < n; i++) {
 		int c = Py_CHARMASK(*s++);
@@ -717,11 +717,11 @@
 		s_new++;
 	}
 	if (!changed) {
-		Py_DECREF(new);
+		Py_DECREF(newstr);
 		Py_INCREF(args);
 		return args;
 	}
-	return new;
+	return newstr;
 }
 
 
@@ -942,7 +942,7 @@
 	}
 
 	table = table1;
-	inlen = PyString_Size(input_obj);
+	inlen = PyString_GET_SIZE(input_obj);
 	result = PyString_FromStringAndSize((char *)NULL, inlen);
 	if (result == NULL)
 		return NULL;
@@ -1078,7 +1078,7 @@
 	/* find length of output string */
 	nfound = mymemcnt(str, len, pat, pat_len);
 	if (count < 0)
-		count = INT_MAX;
+		count = PY_SSIZE_T_MAX;
 	else if (nfound > count)
 		nfound = count;
 	if (nfound == 0)
@@ -1141,7 +1141,7 @@
 	char *str, *pat,*sub,*new_s;
 	Py_ssize_t len,pat_len,sub_len,out_len;
 	Py_ssize_t count = -1;
-	PyObject *new;
+	PyObject *newstr;
 
 	WARN;
 	if (!PyArg_ParseTuple(args, "t#t#t#|n:replace",
@@ -1165,14 +1165,14 @@
 	}
 	if (out_len == -1) {
 		/* we're returning another reference to the input string */
-		new = PyTuple_GetItem(args, 0);
-		Py_XINCREF(new);
+		newstr = PyTuple_GetItem(args, 0);
+		Py_XINCREF(newstr);
 	}
 	else {
-		new = PyString_FromStringAndSize(new_s, out_len);
+		newstr = PyString_FromStringAndSize(new_s, out_len);
 		PyMem_FREE(new_s);
 	}
-	return new;
+	return newstr;
 }
 
 
diff --git a/Modules/threadmodule.c b/Modules/threadmodule.c
index 9a6c5d8..83313df 100644
--- a/Modules/threadmodule.c
+++ b/Modules/threadmodule.c
@@ -22,24 +22,6 @@
 	PyThread_type_lock lock_lock;
 } lockobject;
 
-static PyTypeObject Locktype;
-
-static lockobject *
-newlockobject(void)
-{
-	lockobject *self;
-	self = PyObject_New(lockobject, &Locktype);
-	if (self == NULL)
-		return NULL;
-	self->lock_lock = PyThread_allocate_lock();
-	if (self->lock_lock == NULL) {
-		PyObject_Del(self);
-		self = NULL;
-		PyErr_SetString(ThreadError, "can't allocate lock");
-	}
-	return self;
-}
-
 static void
 lock_dealloc(lockobject *self)
 {
@@ -166,6 +148,22 @@
 	0,				/*tp_repr*/
 };
 
+static lockobject *
+newlockobject(void)
+{
+	lockobject *self;
+	self = PyObject_New(lockobject, &Locktype);
+	if (self == NULL)
+		return NULL;
+	self->lock_lock = PyThread_allocate_lock();
+	if (self->lock_lock == NULL) {
+		PyObject_Del(self);
+		self = NULL;
+		PyErr_SetString(ThreadError, "can't allocate lock");
+	}
+	return self;
+}
+
 /* Thread-local objects */
 
 #include "structmember.h"
@@ -178,8 +176,6 @@
 	PyObject *dict;
 } localobject;
 
-static PyTypeObject localtype;
-
 static PyObject *
 local_new(PyTypeObject *type, PyObject *args, PyObject *kw)
 {
@@ -315,29 +311,6 @@
 	return ldict;
 }
 
-static PyObject *
-local_getattro(localobject *self, PyObject *name)
-{
-	PyObject *ldict, *value;
-
-	ldict = _ldict(self);
-	if (ldict == NULL) 
-		return NULL;
-
-	if (self->ob_type != &localtype)
-		/* use generic lookup for subtypes */
-		return PyObject_GenericGetAttr((PyObject *)self, name);
-
-	/* Optimization: just look in dict ourselves */
-	value = PyDict_GetItem(ldict, name);
-	if (value == NULL) 
-		/* Fall back on generic to get __class__ and __dict__ */
-		return PyObject_GenericGetAttr((PyObject *)self, name);
-
-	Py_INCREF(value);
-	return value;
-}
-
 static int
 local_setattro(localobject *self, PyObject *name, PyObject *v)
 {
@@ -368,6 +341,8 @@
 	{NULL}  /* Sentinel */
 };
 
+static PyObject *local_getattro(localobject *, PyObject *);
+
 static PyTypeObject localtype = {
 	PyObject_HEAD_INIT(NULL)
 	/* ob_size           */ 0,
@@ -375,17 +350,17 @@
 	/* tp_basicsize      */ sizeof(localobject),
 	/* tp_itemsize       */ 0,
 	/* tp_dealloc        */ (destructor)local_dealloc,
-	/* tp_print          */ (printfunc)0,
-	/* tp_getattr        */ (getattrfunc)0,
-	/* tp_setattr        */ (setattrfunc)0,
-	/* tp_compare        */ (cmpfunc)0,
-	/* tp_repr           */ (reprfunc)0,
+	/* tp_print          */ 0,
+	/* tp_getattr        */ 0,
+	/* tp_setattr        */ 0,
+	/* tp_compare        */ 0,
+	/* tp_repr           */ 0,
 	/* tp_as_number      */ 0,
 	/* tp_as_sequence    */ 0,
 	/* tp_as_mapping     */ 0,
-	/* tp_hash           */ (hashfunc)0,
-	/* tp_call           */ (ternaryfunc)0,
-	/* tp_str            */ (reprfunc)0,
+	/* tp_hash           */ 0,
+	/* tp_call           */ 0,
+	/* tp_str            */ 0,
 	/* tp_getattro       */ (getattrofunc)local_getattro,
 	/* tp_setattro       */ (setattrofunc)local_setattro,
 	/* tp_as_buffer      */ 0,
@@ -393,25 +368,47 @@
 	/* tp_doc            */ "Thread-local data",
 	/* tp_traverse       */ (traverseproc)local_traverse,
 	/* tp_clear          */ (inquiry)local_clear,
-	/* tp_richcompare    */ (richcmpfunc)0,
-	/* tp_weaklistoffset */ (long)0,
-	/* tp_iter           */ (getiterfunc)0,
-	/* tp_iternext       */ (iternextfunc)0,
+	/* tp_richcompare    */ 0,
+	/* tp_weaklistoffset */ 0,
+	/* tp_iter           */ 0,
+	/* tp_iternext       */ 0,
 	/* tp_methods        */ 0,
 	/* tp_members        */ 0,
 	/* tp_getset         */ local_getset,
 	/* tp_base           */ 0,
 	/* tp_dict           */ 0, /* internal use */
-	/* tp_descr_get      */ (descrgetfunc)0,
-	/* tp_descr_set      */ (descrsetfunc)0,
+	/* tp_descr_get      */ 0,
+	/* tp_descr_set      */ 0,
 	/* tp_dictoffset     */ offsetof(localobject, dict),
-	/* tp_init           */ (initproc)0,
-	/* tp_alloc          */ (allocfunc)0,
-	/* tp_new            */ (newfunc)local_new,
+	/* tp_init           */ 0,
+	/* tp_alloc          */ 0,
+	/* tp_new            */ local_new,
 	/* tp_free           */ 0, /* Low-level free-mem routine */
-	/* tp_is_gc          */ (inquiry)0, /* For PyObject_IS_GC */
+	/* tp_is_gc          */ 0, /* For PyObject_IS_GC */
 };
 
+static PyObject *
+local_getattro(localobject *self, PyObject *name)
+{
+	PyObject *ldict, *value;
+
+	ldict = _ldict(self);
+	if (ldict == NULL) 
+		return NULL;
+
+	if (self->ob_type != &localtype)
+		/* use generic lookup for subtypes */
+		return PyObject_GenericGetAttr((PyObject *)self, name);
+
+	/* Optimization: just look in dict ourselves */
+	value = PyDict_GetItem(ldict, name);
+	if (value == NULL) 
+		/* Fall back on generic to get __class__ and __dict__ */
+		return PyObject_GenericGetAttr((PyObject *)self, name);
+
+	Py_INCREF(value);
+	return value;
+}
 
 /* Module functions */
 
@@ -560,6 +557,8 @@
 }
 #endif
 
+static lockobject *newlockobject(void);
+
 static PyObject *
 thread_PyThread_allocate_lock(PyObject *self)
 {
diff --git a/Modules/timemodule.c b/Modules/timemodule.c
index ba93957..08d28a1 100644
--- a/Modules/timemodule.c
+++ b/Modules/timemodule.c
@@ -228,6 +228,7 @@
 	9,
 };
 
+static int initialized;
 static PyTypeObject StructTimeType;
 
 static PyObject *
@@ -443,7 +444,7 @@
 	 * will be ahead of time...
 	 */
 	for (i = 1024; ; i += i) {
-		outbuf = malloc(i);
+		outbuf = (char *)malloc(i);
 		if (outbuf == NULL) {
 			return PyErr_NoMemory();
 		}
@@ -807,9 +808,13 @@
 	hInterruptEvent = CreateEvent(NULL, TRUE, FALSE, NULL);
 	SetConsoleCtrlHandler( PyCtrlHandler, TRUE);
 #endif /* MS_WINDOWS */
-        PyStructSequence_InitType(&StructTimeType, &struct_time_type_desc);
+	if (!initialized) {
+		PyStructSequence_InitType(&StructTimeType, 
+					  &struct_time_type_desc);
+	}
 	Py_INCREF(&StructTimeType);
 	PyModule_AddObject(m, "struct_time", (PyObject*) &StructTimeType);
+	initialized = 1;
 }
 
 
diff --git a/Modules/unicodedata.c b/Modules/unicodedata.c
index 9eda653..297611c 100644
--- a/Modules/unicodedata.c
+++ b/Modules/unicodedata.c
@@ -446,7 +446,7 @@
     return PyString_FromString(decomp);
 }
 
-void
+static void
 get_decomp_record(PyObject *self, Py_UCS4 code, int *index, int *prefix, int *count)
 {
     if (code >= 0x110000) {
@@ -486,8 +486,8 @@
     Py_UNICODE *i, *end, *o;
     /* Longest decomposition in Unicode 3.2: U+FDFA */
     Py_UNICODE stack[20]; 
-    int space, stackptr, isize;
-    int index, prefix, count;
+    Py_ssize_t space, isize;
+    int index, prefix, count, stackptr;
     unsigned char prev, cur;
 	
     stackptr = 0;
@@ -508,7 +508,7 @@
             /* Hangul Decomposition adds three characters in
                a single step, so we need atleast that much room. */
             if (space < 3) {
-                int newsize = PyString_GET_SIZE(result) + 10;
+                Py_ssize_t newsize = PyString_GET_SIZE(result) + 10;
                 space += 10;
                 if (PyUnicode_Resize(&result, newsize) == -1)
                     return NULL;
@@ -759,7 +759,7 @@
     unsigned long h = 0;
     unsigned long ix;
     for (i = 0; i < len; i++) {
-        h = (h * scale) + (unsigned char) toupper(s[i]);
+        h = (h * scale) + (unsigned char) toupper(Py_CHARMASK(s[i]));
         ix = h & 0xff000000;
         if (ix)
             h = (h ^ ((ix>>24) & 0xff)) & 0x00ffffff;
@@ -906,7 +906,7 @@
     if (!_getucname(self, code, buffer, sizeof(buffer)))
         return 0;
     for (i = 0; i < namelen; i++) {
-        if (toupper(name[i]) != buffer[i])
+        if (toupper(Py_CHARMASK(name[i])) != buffer[i])
             return 0;
     }
     return buffer[namelen] == '\0';
diff --git a/Modules/xxsubtype.c b/Modules/xxsubtype.c
index ffbc72b..88ce6c5 100644
--- a/Modules/xxsubtype.c
+++ b/Modules/xxsubtype.c
@@ -79,8 +79,6 @@
 	{NULL,	NULL},
 };
 
-static PyTypeObject spamlist_type;
-
 static int
 spamlist_init(spamlistobject *self, PyObject *args, PyObject *kwds)
 {
@@ -179,8 +177,6 @@
 	{NULL,	NULL},
 };
 
-static PyTypeObject spamdict_type;
-
 static int
 spamdict_init(spamdictobject *self, PyObject *args, PyObject *kwds)
 {
diff --git a/Modules/zipimport.c b/Modules/zipimport.c
index 637dc48..d59ebd8 100644
--- a/Modules/zipimport.c
+++ b/Modules/zipimport.c
@@ -40,7 +40,6 @@
 	PyObject *files;    /* dict with file info {path: toc_entry} */
 };
 
-static PyTypeObject ZipImporter_Type;
 static PyObject *ZipImportError;
 static PyObject *zip_directory_cache = NULL;
 
@@ -171,13 +170,7 @@
 zipimporter_traverse(PyObject *obj, visitproc visit, void *arg)
 {
 	ZipImporter *self = (ZipImporter *)obj;
-	int err;
-
-	if (self->files != NULL) {
-		err = visit(self->files, arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(self->files);
 	return 0;
 }
 
@@ -958,7 +951,7 @@
 	PyObject *fixed_source;
 
 	/* one char extra for trailing \n and one for terminating \0 */
-	buf = PyMem_Malloc(PyString_Size(source) + 2);
+	buf = (char *)PyMem_Malloc(PyString_Size(source) + 2);
 	if (buf == NULL) {
 		PyErr_SetString(PyExc_MemoryError,
 				"zipimport: no memory to allocate "
diff --git a/Modules/zlibmodule.c b/Modules/zlibmodule.c
index 725755d..35b8c32 100644
--- a/Modules/zlibmodule.c
+++ b/Modules/zlibmodule.c
@@ -654,7 +654,9 @@
 }
 
 PyDoc_STRVAR(decomp_flush__doc__,
-"flush() -- Return a string containing any remaining decompressed data.\n"
+"flush( [length] ) -- Return a string containing any remaining\n"
+"decompressed data. length, if given, is the initial size of the\n"
+"output buffer.\n"
 "\n"
 "The decompressor object can no longer be used after this call.");
 
diff --git a/Objects/abstract.c b/Objects/abstract.c
index c755654..13a9473 100644
--- a/Objects/abstract.c
+++ b/Objects/abstract.c
@@ -10,6 +10,7 @@
 
 #define HASINDEX(o) PyType_HasFeature((o)->ob_type, Py_TPFLAGS_HAVE_INDEX)
 
+
 /* Shorthands to return certain errors */
 
 static PyObject *
@@ -940,8 +941,9 @@
 		value = nb->nb_index(item);
 	}
 	else {
-		PyErr_SetString(PyExc_IndexError, 
-				"object cannot be interpreted as an index");
+		PyErr_Format(PyExc_TypeError,
+			     "'%.200s' object cannot be interpreted "
+			     "as an index", item->ob_type->tp_name);
 	}
 	return value;
 }
@@ -1245,24 +1247,6 @@
 	return type_error("unindexable object");
 }
 
-static PyObject *
-sliceobj_from_intint(Py_ssize_t i, Py_ssize_t j)
-{
-	PyObject *start, *end, *slice;
-	start = PyInt_FromLong((long)i);
-	if (!start)
-		return NULL;
-	end = PyInt_FromLong((long)j);
-	if (!end) {
-		Py_DECREF(start);
-		return NULL;
-	}
-	slice = PySlice_New(start, end, NULL);
-	Py_DECREF(start);
-	Py_DECREF(end);
-	return slice;
-}
-
 PyObject *
 PySequence_GetSlice(PyObject *s, Py_ssize_t i1, Py_ssize_t i2)
 {
@@ -1287,7 +1271,7 @@
 		return m->sq_slice(s, i1, i2);
 	} else if ((mp = s->ob_type->tp_as_mapping) && mp->mp_subscript) {
 		PyObject *res;
-		PyObject *slice = sliceobj_from_intint(i1, i2);
+		PyObject *slice = _PySlice_FromIndices(i1, i2);
 		if (!slice)
 			return NULL;
 		res = mp->mp_subscript(s, slice);
@@ -1379,7 +1363,7 @@
 		return m->sq_ass_slice(s, i1, i2, o);
 	} else if ((mp = s->ob_type->tp_as_mapping) && mp->mp_ass_subscript) {
 		int res;
-		PyObject *slice = sliceobj_from_intint(i1, i2);
+		PyObject *slice = _PySlice_FromIndices(i1, i2);
 		if (!slice)
 			return -1;
 		res = mp->mp_ass_subscript(s, slice, o);
@@ -1815,11 +1799,37 @@
 	return NULL;
 }
 
+static PyObject*
+call_function_tail(PyObject *callable, PyObject *args)
+{
+	PyObject *retval;
+
+	if (args == NULL)
+		return NULL;
+
+	if (!PyTuple_Check(args)) {
+		PyObject *a;
+
+		a = PyTuple_New(1);
+		if (a == NULL) {
+			Py_DECREF(args);
+			return NULL;
+		}
+		PyTuple_SET_ITEM(a, 0, args);
+		args = a;
+	}
+	retval = PyObject_Call(callable, args, NULL);
+
+	Py_DECREF(args);
+
+	return retval;
+}
+
 PyObject *
 PyObject_CallFunction(PyObject *callable, char *format, ...)
 {
 	va_list va;
-	PyObject *args, *retval;
+	PyObject *args;
 
 	if (callable == NULL)
 		return null_error();
@@ -1832,31 +1842,34 @@
 	else
 		args = PyTuple_New(0);
 
-	if (args == NULL)
-		return NULL;
+	return call_function_tail(callable, args);
+}
 
-	if (!PyTuple_Check(args)) {
-		PyObject *a;
+PyObject *
+_PyObject_CallFunction_SizeT(PyObject *callable, char *format, ...)
+{
+	va_list va;
+	PyObject *args;
 
-		a = PyTuple_New(1);
-		if (a == NULL)
-			return NULL;
-		if (PyTuple_SetItem(a, 0, args) < 0)
-			return NULL;
-		args = a;
+	if (callable == NULL)
+		return null_error();
+
+	if (format && *format) {
+		va_start(va, format);
+		args = _Py_VaBuildValue_SizeT(format, va);
+		va_end(va);
 	}
-	retval = PyObject_Call(callable, args, NULL);
+	else
+		args = PyTuple_New(0);
 
-	Py_DECREF(args);
-
-	return retval;
+	return call_function_tail(callable, args);
 }
 
 PyObject *
 PyObject_CallMethod(PyObject *o, char *name, char *format, ...)
 {
 	va_list va;
-	PyObject *args = NULL;
+	PyObject *args;
 	PyObject *func = NULL;
 	PyObject *retval = NULL;
 
@@ -1882,24 +1895,49 @@
 	else
 		args = PyTuple_New(0);
 
-	if (!args)
-		goto exit;
-
-	if (!PyTuple_Check(args)) {
-		PyObject *a;
-
-		a = PyTuple_New(1);
-		if (a == NULL)
-			goto exit;
-		if (PyTuple_SetItem(a, 0, args) < 0)
-			goto exit;
-		args = a;
-	}
-
-	retval = PyObject_Call(func, args, NULL);
+	retval = call_function_tail(func, args);
 
   exit:
-	Py_XDECREF(args);
+	/* args gets consumed in call_function_tail */
+	Py_XDECREF(func);
+
+	return retval;
+}
+
+PyObject *
+_PyObject_CallMethod_SizeT(PyObject *o, char *name, char *format, ...)
+{
+	va_list va;
+	PyObject *args;
+	PyObject *func = NULL;
+	PyObject *retval = NULL;
+
+	if (o == NULL || name == NULL)
+		return null_error();
+
+	func = PyObject_GetAttrString(o, name);
+	if (func == NULL) {
+		PyErr_SetString(PyExc_AttributeError, name);
+		return 0;
+	}
+
+	if (!PyCallable_Check(func)) {
+		type_error("call of non-callable attribute"); 
+		goto exit;
+	}
+
+	if (format && *format) {
+		va_start(va, format);
+		args = _Py_VaBuildValue_SizeT(format, va);
+		va_end(va);
+	}
+	else
+		args = PyTuple_New(0);
+
+	retval = call_function_tail(func, args);
+
+  exit:
+	/* args gets consumed in call_function_tail */
 	Py_XDECREF(func);
 
 	return retval;
diff --git a/Objects/boolobject.c b/Objects/boolobject.c
index 05784e5..79be184 100644
--- a/Objects/boolobject.c
+++ b/Objects/boolobject.c
@@ -103,42 +103,42 @@
 /* Arithmetic methods -- only so we can override &, |, ^. */
 
 static PyNumberMethods bool_as_number = {
-	0,					/* nb_add */
-	0,					/* nb_subtract */
-	0,					/* nb_multiply */
-	0,					/* nb_remainder */
-	0,					/* nb_divmod */
-	0,					/* nb_power */
-	0,					/* nb_negative */
-	0,					/* nb_positive */
-	0,					/* nb_absolute */
-	0,					/* nb_nonzero */
-	0,					/* nb_invert */
-	0,					/* nb_lshift */
-	0,					/* nb_rshift */
-	(binaryfunc)bool_and,			/* nb_and */
-	(binaryfunc)bool_xor,			/* nb_xor */
-	(binaryfunc)bool_or,			/* nb_or */
-	0,					/* nb_coerce */
-	0,					/* nb_int */
-	0,					/* nb_long */
-	0,					/* nb_float */
-	0,					/* nb_oct */
-	0,		 			/* nb_hex */
-	0,					/* nb_inplace_add */
-	0,					/* nb_inplace_subtract */
-	0,					/* nb_inplace_multiply */
-	0,					/* nb_inplace_remainder */
-	0,					/* nb_inplace_power */
-	0,					/* nb_inplace_lshift */
-	0,					/* nb_inplace_rshift */
-	0,					/* nb_inplace_and */
-	0,					/* nb_inplace_xor */
-	0,					/* nb_inplace_or */
-	0,					/* nb_floor_divide */
-	0,					/* nb_true_divide */
-	0,					/* nb_inplace_floor_divide */
-	0,					/* nb_inplace_true_divide */
+	0,			/* nb_add */
+	0,			/* nb_subtract */
+	0,			/* nb_multiply */
+	0,			/* nb_remainder */
+	0,			/* nb_divmod */
+	0,			/* nb_power */
+	0,			/* nb_negative */
+	0,			/* nb_positive */
+	0,			/* nb_absolute */
+	0,			/* nb_nonzero */
+	0,			/* nb_invert */
+	0,			/* nb_lshift */
+	0,			/* nb_rshift */
+	bool_and,		/* nb_and */
+	bool_xor,		/* nb_xor */
+	bool_or,		/* nb_or */
+	0,			/* nb_coerce */
+	0,			/* nb_int */
+	0,			/* nb_long */
+	0,			/* nb_float */
+	0,			/* nb_oct */
+	0,		 	/* nb_hex */
+	0,			/* nb_inplace_add */
+	0,			/* nb_inplace_subtract */
+	0,			/* nb_inplace_multiply */
+	0,			/* nb_inplace_remainder */
+	0,			/* nb_inplace_power */
+	0,			/* nb_inplace_lshift */
+	0,			/* nb_inplace_rshift */
+	0,			/* nb_inplace_and */
+	0,			/* nb_inplace_xor */
+	0,			/* nb_inplace_or */
+	0,			/* nb_floor_divide */
+	0,			/* nb_true_divide */
+	0,			/* nb_inplace_floor_divide */
+	0,			/* nb_inplace_true_divide */
 };
 
 /* The type object for bool.  Note that this cannot be subclassed! */
diff --git a/Objects/bufferobject.c b/Objects/bufferobject.c
index eff06aa..d2597b9 100644
--- a/Objects/bufferobject.c
+++ b/Objects/bufferobject.c
@@ -169,7 +169,7 @@
 	}
 	/* XXX: check for overflow in multiply */
 	/* Inline PyObject_New */
-	o = PyObject_MALLOC(sizeof(*b) + size);
+	o = (PyObject *)PyObject_MALLOC(sizeof(*b) + size);
 	if ( o == NULL )
 		return PyErr_NoMemory();
 	b = (PyBufferObject *) PyObject_INIT(o, &PyBuffer_Type);
@@ -305,7 +305,7 @@
 	Py_ssize_t size;
 	if (!get_buf(self, &ptr, &size))
 		return NULL;
-	return PyString_FromStringAndSize(ptr, size);
+	return PyString_FromStringAndSize((const char *)ptr, size);
 }
 
 /* Sequence methods */
diff --git a/Objects/cellobject.c b/Objects/cellobject.c
index 3b87093..da48dea 100644
--- a/Objects/cellobject.c
+++ b/Objects/cellobject.c
@@ -73,19 +73,29 @@
 static int
 cell_traverse(PyCellObject *op, visitproc visit, void *arg)
 {
-	if (op->ob_ref)
-		return visit(op->ob_ref, arg);
+	Py_VISIT(op->ob_ref);
 	return 0;
 }
 
 static int
 cell_clear(PyCellObject *op)
 {
-	Py_XDECREF(op->ob_ref);
-	op->ob_ref = NULL;
+	Py_CLEAR(op->ob_ref);
 	return 0;
 }
 
+static PyObject *
+cell_get_contents(PyCellObject *op, void *closure)
+{
+	Py_XINCREF(op->ob_ref);
+	return op->ob_ref;
+}
+
+static PyGetSetDef cell_getsetlist[] = {
+	{"cell_contents", (getter)cell_get_contents, NULL},
+	{NULL} /* sentinel */
+};
+
 PyTypeObject PyCell_Type = {
 	PyObject_HEAD_INIT(&PyType_Type)
 	0,
@@ -111,4 +121,11 @@
  	0,					/* tp_doc */
  	(traverseproc)cell_traverse,		/* tp_traverse */
  	(inquiry)cell_clear,			/* tp_clear */
+	0,					/* tp_richcompare */
+	0,					/* tp_weaklistoffset */
+	0, 					/* tp_iter */
+	0,					/* tp_iternext */
+	0,					/* tp_methods */
+	0,					/* tp_members */
+	cell_getsetlist,			/* tp_getset */
 };
diff --git a/Objects/classobject.c b/Objects/classobject.c
index 93acb50..594de11 100644
--- a/Objects/classobject.c
+++ b/Objects/classobject.c
@@ -208,7 +208,7 @@
 {
 	register PyObject *v;
 	register char *sname = PyString_AsString(name);
-	PyClassObject *class;
+	PyClassObject *klass;
 	descrgetfunc f;
 
 	if (sname[0] == '_' && sname[1] == '_') {
@@ -234,7 +234,7 @@
 			return v;
 		}
 	}
-	v = class_lookup(op, name, &class);
+	v = class_lookup(op, name, &klass);
 	if (v == NULL) {
 		PyErr_Format(PyExc_AttributeError,
 			     "class %.50s has no attribute '%.400s'",
@@ -388,15 +388,15 @@
 		Py_INCREF(name);
 		return name;
 	}
-	m = PyString_Size(mod);
-	n = PyString_Size(name);
+	m = PyString_GET_SIZE(mod);
+	n = PyString_GET_SIZE(name);
 	res = PyString_FromStringAndSize((char *)NULL, m+1+n);
 	if (res != NULL) {
-		char *s = PyString_AsString(res);
-		memcpy(s, PyString_AsString(mod), m);
+		char *s = PyString_AS_STRING(res);
+		memcpy(s, PyString_AS_STRING(mod), m);
 		s += m;
 		*s++ = '.';
-		memcpy(s, PyString_AsString(name), n);
+		memcpy(s, PyString_AS_STRING(name), n);
 	}
 	return res;
 }
@@ -404,37 +404,12 @@
 static int
 class_traverse(PyClassObject *o, visitproc visit, void *arg)
 {
-	int err;
-	if (o->cl_bases) {
-		err = visit(o->cl_bases, arg);
-		if (err)
-			return err;
-	}
-	if (o->cl_dict) {
-		err = visit(o->cl_dict, arg);
-		if (err)
-			return err;
-	}
-	if (o->cl_name) {
-		err = visit(o->cl_name, arg);
-		if (err)
-			return err;
-	}
-	if (o->cl_getattr) {
-		err = visit(o->cl_getattr, arg);
-		if (err)
-			return err;
-	}
-	if (o->cl_setattr) {
-		err = visit(o->cl_setattr, arg);
-		if (err)
-			return err;
-	}
-	if (o->cl_delattr) {
-		err = visit(o->cl_delattr, arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(o->cl_bases);
+	Py_VISIT(o->cl_dict);
+	Py_VISIT(o->cl_name);
+	Py_VISIT(o->cl_getattr);
+	Py_VISIT(o->cl_setattr);
+	Py_VISIT(o->cl_delattr);
 	return 0;
 }
 
@@ -481,23 +456,23 @@
 };
 
 int
-PyClass_IsSubclass(PyObject *class, PyObject *base)
+PyClass_IsSubclass(PyObject *klass, PyObject *base)
 {
 	Py_ssize_t i, n;
 	PyClassObject *cp;
-	if (class == base)
+	if (klass == base)
 		return 1;
 	if (PyTuple_Check(base)) {
 		n = PyTuple_GET_SIZE(base);
 		for (i = 0; i < n; i++) {
-			if (PyClass_IsSubclass(class, PyTuple_GET_ITEM(base, i)))
+			if (PyClass_IsSubclass(klass, PyTuple_GET_ITEM(base, i)))
 				return 1;
 		}
 		return 0;
 	}
-	if (class == NULL || !PyClass_Check(class))
+	if (klass == NULL || !PyClass_Check(klass))
 		return 0;
-	cp = (PyClassObject *)class;
+	cp = (PyClassObject *)klass;
 	n = PyTuple_Size(cp->cl_bases);
 	for (i = 0; i < n; i++) {
 		if (PyClass_IsSubclass(PyTuple_GetItem(cp->cl_bases, i), base))
@@ -719,7 +694,7 @@
 instance_getattr2(register PyInstanceObject *inst, PyObject *name)
 {
 	register PyObject *v;
-	PyClassObject *class;
+	PyClassObject *klass;
 	descrgetfunc f;
 
 	v = PyDict_GetItem(inst->in_dict, name);
@@ -727,7 +702,7 @@
 		Py_INCREF(v);
 		return v;
 	}
-	v = class_lookup(inst->in_class, name, &class);
+	v = class_lookup(inst->in_class, name, &klass);
 	if (v != NULL) {
 		Py_INCREF(v);
 		f = TP_DESCR_GET(v->ob_type);
@@ -767,7 +742,7 @@
 _PyInstance_Lookup(PyObject *pinst, PyObject *name)
 {
 	PyObject *v;
-	PyClassObject *class;
+	PyClassObject *klass;
 	PyInstanceObject *inst;	/* pinst cast to the right type */
 
 	assert(PyInstance_Check(pinst));
@@ -777,7 +752,7 @@
 
  	v = PyDict_GetItem(inst->in_dict, name);
 	if (v == NULL)
-		v = class_lookup(inst->in_class, name, &class);
+		v = class_lookup(inst->in_class, name, &klass);
 	return v;
 }
 
@@ -979,17 +954,8 @@
 static int
 instance_traverse(PyInstanceObject *o, visitproc visit, void *arg)
 {
-	int err;
-	if (o->in_class) {
-		err = visit((PyObject *)(o->in_class), arg);
-		if (err)
-			return err;
-	}
-	if (o->in_dict) {
-		err = visit(o->in_dict, arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(o->in_class);
+	Py_VISIT(o->in_dict);
 	return 0;
 }
 
@@ -1128,27 +1094,6 @@
 }
 
 static PyObject *
-sliceobj_from_intint(Py_ssize_t i, Py_ssize_t j)
-{
-	PyObject *start, *end, *res;
-
-	start = PyInt_FromLong((long)i);
-	if (!start)
-		return NULL;
-
-	end = PyInt_FromLong((long)j);
-	if (!end) {
-		Py_DECREF(start);
-		return NULL;
-	}
-	res = PySlice_New(start, end, NULL);
-	Py_DECREF(start);
-	Py_DECREF(end);
-	return res;
-}
-
-
-static PyObject *
 instance_slice(PyInstanceObject *inst, Py_ssize_t i, Py_ssize_t j)
 {
 	PyObject *func, *arg, *res;
@@ -1168,7 +1113,7 @@
 		func = instance_getattr(inst, getitemstr);
 		if (func == NULL)
 			return NULL;
-		arg = Py_BuildValue("(N)", sliceobj_from_intint(i, j));
+		arg = Py_BuildValue("(N)", _PySlice_FromIndices(i, j));
 	} else
 		arg = Py_BuildValue("(nn)", i, j);
 
@@ -1239,7 +1184,7 @@
 				return -1;
 
 			arg = Py_BuildValue("(N)",
-					    sliceobj_from_intint(i, j));
+					    _PySlice_FromIndices(i, j));
 		} else
 			arg = Py_BuildValue("(nn)", i, j);
 	}
@@ -1260,7 +1205,7 @@
 				return -1;
 
 			arg = Py_BuildValue("(NO)",
-					    sliceobj_from_intint(i, j), value);
+					    _PySlice_FromIndices(i, j), value);
 		} else
 			arg = Py_BuildValue("(nnO)", i, j, value);
 	}
@@ -2049,43 +1994,43 @@
 
 
 static PyNumberMethods instance_as_number = {
-	(binaryfunc)instance_add,		/* nb_add */
-	(binaryfunc)instance_sub,		/* nb_subtract */
-	(binaryfunc)instance_mul,		/* nb_multiply */
-	(binaryfunc)instance_mod,		/* nb_remainder */
-	(binaryfunc)instance_divmod,		/* nb_divmod */
-	(ternaryfunc)instance_pow,		/* nb_power */
-	(unaryfunc)instance_neg,		/* nb_negative */
-	(unaryfunc)instance_pos,		/* nb_positive */
-	(unaryfunc)instance_abs,		/* nb_absolute */
-	(inquiry)instance_nonzero,		/* nb_nonzero */
-	(unaryfunc)instance_invert,		/* nb_invert */
-	(binaryfunc)instance_lshift,		/* nb_lshift */
-	(binaryfunc)instance_rshift,		/* nb_rshift */
-	(binaryfunc)instance_and,		/* nb_and */
-	(binaryfunc)instance_xor,		/* nb_xor */
-	(binaryfunc)instance_or,		/* nb_or */
-	(coercion)instance_coerce,		/* nb_coerce */
-	(unaryfunc)instance_int,		/* nb_int */
-	(unaryfunc)instance_long,		/* nb_long */
-	(unaryfunc)instance_float,		/* nb_float */
-	(unaryfunc)instance_oct,		/* nb_oct */
-	(unaryfunc)instance_hex,		/* nb_hex */
-	(binaryfunc)instance_iadd,		/* nb_inplace_add */
-	(binaryfunc)instance_isub,		/* nb_inplace_subtract */
-	(binaryfunc)instance_imul,		/* nb_inplace_multiply */
-	(binaryfunc)instance_imod,		/* nb_inplace_remainder */
-	(ternaryfunc)instance_ipow,		/* nb_inplace_power */
-	(binaryfunc)instance_ilshift,		/* nb_inplace_lshift */
-	(binaryfunc)instance_irshift,		/* nb_inplace_rshift */
-	(binaryfunc)instance_iand,		/* nb_inplace_and */
-	(binaryfunc)instance_ixor,		/* nb_inplace_xor */
-	(binaryfunc)instance_ior,		/* nb_inplace_or */
-	(binaryfunc)instance_floordiv,		/* nb_floor_divide */
-	(binaryfunc)instance_truediv,		/* nb_true_divide */
-	(binaryfunc)instance_ifloordiv,		/* nb_inplace_floor_divide */
-	(binaryfunc)instance_itruediv,		/* nb_inplace_true_divide */
-	(lenfunc)instance_index,		/* nb_index */
+	instance_add,			/* nb_add */
+	instance_sub,			/* nb_subtract */
+	instance_mul,			/* nb_multiply */
+	instance_mod,			/* nb_remainder */
+	instance_divmod,		/* nb_divmod */
+	instance_pow,			/* nb_power */
+	(unaryfunc)instance_neg,	/* nb_negative */
+	(unaryfunc)instance_pos,	/* nb_positive */
+	(unaryfunc)instance_abs,	/* nb_absolute */
+	(inquiry)instance_nonzero,	/* nb_nonzero */
+	(unaryfunc)instance_invert,	/* nb_invert */
+	instance_lshift,		/* nb_lshift */
+	instance_rshift,		/* nb_rshift */
+	instance_and,			/* nb_and */
+	instance_xor,			/* nb_xor */
+	instance_or,			/* nb_or */
+	instance_coerce,		/* nb_coerce */
+	(unaryfunc)instance_int,	/* nb_int */
+	(unaryfunc)instance_long,	/* nb_long */
+	(unaryfunc)instance_float,	/* nb_float */
+	(unaryfunc)instance_oct,	/* nb_oct */
+	(unaryfunc)instance_hex,	/* nb_hex */
+	instance_iadd,			/* nb_inplace_add */
+	instance_isub,			/* nb_inplace_subtract */
+	instance_imul,			/* nb_inplace_multiply */
+	instance_imod,			/* nb_inplace_remainder */
+	instance_ipow,			/* nb_inplace_power */
+	instance_ilshift,		/* nb_inplace_lshift */
+	instance_irshift,		/* nb_inplace_rshift */
+	instance_iand,			/* nb_inplace_and */
+	instance_ixor,			/* nb_inplace_xor */
+	instance_ior,			/* nb_inplace_or */
+	instance_floordiv,		/* nb_floor_divide */
+	instance_truediv,		/* nb_true_divide */
+	instance_ifloordiv,		/* nb_inplace_floor_divide */
+	instance_itruediv,		/* nb_inplace_true_divide */
+	(lenfunc)instance_index,	/* nb_index */
 };
 
 PyTypeObject PyInstance_Type = {
@@ -2140,7 +2085,7 @@
 static PyMethodObject *free_list;
 
 PyObject *
-PyMethod_New(PyObject *func, PyObject *self, PyObject *class)
+PyMethod_New(PyObject *func, PyObject *self, PyObject *klass)
 {
 	register PyMethodObject *im;
 	if (!PyCallable_Check(func)) {
@@ -2162,8 +2107,8 @@
 	im->im_func = func;
 	Py_XINCREF(self);
 	im->im_self = self;
-	Py_XINCREF(class);
-	im->im_class = class;
+	Py_XINCREF(klass);
+	im->im_class = klass;
 	_PyObject_GC_TRACK(im);
 	return (PyObject *)im;
 }
@@ -2365,35 +2310,22 @@
 static int
 instancemethod_traverse(PyMethodObject *im, visitproc visit, void *arg)
 {
-	int err;
-	if (im->im_func) {
-		err = visit(im->im_func, arg);
-		if (err)
-			return err;
-	}
-	if (im->im_self) {
-		err = visit(im->im_self, arg);
-		if (err)
-			return err;
-	}
-	if (im->im_class) {
-		err = visit(im->im_class, arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(im->im_func);
+	Py_VISIT(im->im_self);
+	Py_VISIT(im->im_class);
 	return 0;
 }
 
 static void
-getclassname(PyObject *class, char *buf, int bufsize)
+getclassname(PyObject *klass, char *buf, int bufsize)
 {
 	PyObject *name;
 
 	assert(bufsize > 1);
 	strcpy(buf, "?"); /* Default outcome */
-	if (class == NULL)
+	if (klass == NULL)
 		return;
-	name = PyObject_GetAttrString(class, "__name__");
+	name = PyObject_GetAttrString(klass, "__name__");
 	if (name == NULL) {
 		/* This function cannot return an exception */
 		PyErr_Clear();
@@ -2409,7 +2341,7 @@
 static void
 getinstclassname(PyObject *inst, char *buf, int bufsize)
 {
-	PyObject *class;
+	PyObject *klass;
 
 	if (inst == NULL) {
 		assert(bufsize > 0 && (size_t)bufsize > strlen("nothing"));
@@ -2417,22 +2349,22 @@
 		return;
 	}
 
-	class = PyObject_GetAttrString(inst, "__class__");
-	if (class == NULL) {
+	klass = PyObject_GetAttrString(inst, "__class__");
+	if (klass == NULL) {
 		/* This function cannot return an exception */
 		PyErr_Clear();
-		class = (PyObject *)(inst->ob_type);
-		Py_INCREF(class);
+		klass = (PyObject *)(inst->ob_type);
+		Py_INCREF(klass);
 	}
-	getclassname(class, buf, bufsize);
-	Py_XDECREF(class);
+	getclassname(klass, buf, bufsize);
+	Py_XDECREF(klass);
 }
 
 static PyObject *
 instancemethod_call(PyObject *func, PyObject *arg, PyObject *kw)
 {
 	PyObject *self = PyMethod_GET_SELF(func);
-	PyObject *class = PyMethod_GET_CLASS(func);
+	PyObject *klass = PyMethod_GET_CLASS(func);
 	PyObject *result;
 
 	func = PyMethod_GET_FUNCTION(func);
@@ -2445,14 +2377,14 @@
 		if (self == NULL)
 			ok = 0;
 		else {
-			ok = PyObject_IsInstance(self, class);
+			ok = PyObject_IsInstance(self, klass);
 			if (ok < 0)
 				return NULL;
 		}
 		if (!ok) {
 			char clsbuf[256];
 			char instbuf[256];
-			getclassname(class, clsbuf, sizeof(clsbuf));
+			getclassname(klass, clsbuf, sizeof(clsbuf));
 			getinstclassname(self, instbuf, sizeof(instbuf));
 			PyErr_Format(PyExc_TypeError,
 				     "unbound method %s%s must be called with "
@@ -2531,7 +2463,7 @@
 	(hashfunc)instancemethod_hash,		/* tp_hash */
 	instancemethod_call,			/* tp_call */
 	0,					/* tp_str */
-	(getattrofunc)instancemethod_getattro,	/* tp_getattro */
+	instancemethod_getattro,		/* tp_getattro */
 	PyObject_GenericSetAttr,		/* tp_setattro */
 	0,					/* tp_as_buffer */
 	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC  | Py_TPFLAGS_HAVE_WEAKREFS, /* tp_flags */
diff --git a/Objects/cobject.c b/Objects/cobject.c
index f764a1d..b2cae9a 100644
--- a/Objects/cobject.c
+++ b/Objects/cobject.c
@@ -136,25 +136,26 @@
 
 PyTypeObject PyCObject_Type = {
     PyObject_HEAD_INIT(&PyType_Type)
-    0,					/*ob_size*/
-    "PyCObject",			/*tp_name*/
-    sizeof(PyCObject),			/*tp_basicsize*/
-    0,					/*tp_itemsize*/
+    0,				/*ob_size*/
+    "PyCObject",		/*tp_name*/
+    sizeof(PyCObject),		/*tp_basicsize*/
+    0,				/*tp_itemsize*/
     /* methods */
-    (destructor)PyCObject_dealloc,	/*tp_dealloc*/
-    (printfunc)0,			/*tp_print*/
-    (getattrfunc)0,			/*tp_getattr*/
-    (setattrfunc)0,			/*tp_setattr*/
-    (cmpfunc)0,				/*tp_compare*/
-    (reprfunc)0,			/*tp_repr*/
-    0,					/*tp_as_number*/
-    0,					/*tp_as_sequence*/
-    0,					/*tp_as_mapping*/
-    (hashfunc)0,			/*tp_hash*/
-    (ternaryfunc)0,			/*tp_call*/
-    (reprfunc)0,			/*tp_str*/
-
-    /* Space for future expansion */
-    0L,0L,0L,0L,
-    PyCObject_Type__doc__ 		/* Documentation string */
+    (destructor)PyCObject_dealloc, /*tp_dealloc*/
+    0,				/*tp_print*/
+    0,				/*tp_getattr*/
+    0,				/*tp_setattr*/
+    0,				/*tp_compare*/
+    0,				/*tp_repr*/
+    0,				/*tp_as_number*/
+    0,				/*tp_as_sequence*/
+    0,				/*tp_as_mapping*/
+    0,				/*tp_hash*/
+    0,				/*tp_call*/
+    0,				/*tp_str*/
+    0,				/*tp_getattro*/
+    0,				/*tp_setattro*/
+    0,				/*tp_as_buffer*/
+    0,				/*tp_flags*/
+    PyCObject_Type__doc__	/*tp_doc*/
 };
diff --git a/Objects/codeobject.c b/Objects/codeobject.c
index f832911..8ae2399 100644
--- a/Objects/codeobject.c
+++ b/Objects/codeobject.c
@@ -451,3 +451,136 @@
 	}
 	return line;
 }
+
+/* 
+   Check whether the current instruction is at the start of a line.
+
+ */
+
+	/* The theory of SET_LINENO-less tracing.
+
+	   In a nutshell, we use the co_lnotab field of the code object
+	   to tell when execution has moved onto a different line.
+
+	   As mentioned above, the basic idea is so set things up so
+	   that
+
+	         *instr_lb <= frame->f_lasti < *instr_ub
+
+	   is true so long as execution does not change lines.
+
+	   This is all fairly simple.  Digging the information out of
+	   co_lnotab takes some work, but is conceptually clear.
+
+	   Somewhat harder to explain is why we don't *always* call the
+	   line trace function when the above test fails.
+
+	   Consider this code:
+
+	   1: def f(a):
+	   2:     if a:
+	   3:        print 1
+	   4:     else:
+	   5:        print 2
+
+	   which compiles to this:
+
+	   2           0 LOAD_FAST                0 (a)
+		       3 JUMP_IF_FALSE            9 (to 15)
+		       6 POP_TOP
+
+	   3           7 LOAD_CONST               1 (1)
+		      10 PRINT_ITEM
+		      11 PRINT_NEWLINE
+		      12 JUMP_FORWARD             6 (to 21)
+		 >>   15 POP_TOP
+
+	   5          16 LOAD_CONST               2 (2)
+		      19 PRINT_ITEM
+		      20 PRINT_NEWLINE
+		 >>   21 LOAD_CONST               0 (None)
+		      24 RETURN_VALUE
+
+	   If 'a' is false, execution will jump to instruction at offset
+	   15 and the co_lnotab will claim that execution has moved to
+	   line 3.  This is at best misleading.  In this case we could
+	   associate the POP_TOP with line 4, but that doesn't make
+	   sense in all cases (I think).
+
+	   What we do is only call the line trace function if the co_lnotab
+	   indicates we have jumped to the *start* of a line, i.e. if the
+	   current instruction offset matches the offset given for the
+	   start of a line by the co_lnotab.
+
+	   This also takes care of the situation where 'a' is true.
+	   Execution will jump from instruction offset 12 to offset 21.
+	   Then the co_lnotab would imply that execution has moved to line
+	   5, which is again misleading.
+
+	   Why do we set f_lineno when tracing?  Well, consider the code
+	   above when 'a' is true.  If stepping through this with 'n' in
+	   pdb, you would stop at line 1 with a "call" type event, then
+	   line events on lines 2 and 3, then a "return" type event -- but
+	   you would be shown line 5 during this event.  This is a change
+	   from the behaviour in 2.2 and before, and I've found it
+	   confusing in practice.  By setting and using f_lineno when
+	   tracing, one can report a line number different from that
+	   suggested by f_lasti on this one occasion where it's desirable.
+	*/
+
+
+int 
+PyCode_CheckLineNumber(PyCodeObject* co, int lasti, PyAddrPair *bounds)
+{
+        int size, addr, line;
+        unsigned char* p;
+
+        p = (unsigned char*)PyString_AS_STRING(co->co_lnotab);
+        size = PyString_GET_SIZE(co->co_lnotab) / 2;
+
+        addr = 0;
+        line = co->co_firstlineno;
+        assert(line > 0);
+
+        /* possible optimization: if f->f_lasti == instr_ub
+           (likely to be a common case) then we already know
+           instr_lb -- if we stored the matching value of p
+           somwhere we could skip the first while loop. */
+
+        /* see comments in compile.c for the description of
+           co_lnotab.  A point to remember: increments to p
+           should come in pairs -- although we don't care about
+           the line increments here, treating them as byte
+           increments gets confusing, to say the least. */
+
+        while (size > 0) {
+                if (addr + *p > lasti)
+                        break;
+                addr += *p++;
+                if (*p) 
+                        bounds->ap_lower = addr;
+                line += *p++;
+                --size;
+        }
+
+        /* If lasti and addr don't match exactly, we don't want to
+           change the lineno slot on the frame or execute a trace
+           function.  Return -1 instead.
+        */
+        if (addr != lasti)
+                line = -1;
+        
+        if (size > 0) {
+                while (--size >= 0) {
+                        addr += *p++;
+                        if (*p++)
+                                break;
+                }
+                bounds->ap_upper = addr;
+        }
+        else {
+                bounds->ap_upper = INT_MAX;
+        }
+
+        return line;
+}
diff --git a/Objects/complexobject.c b/Objects/complexobject.c
index f0915dd..c6021e9 100644
--- a/Objects/complexobject.c
+++ b/Objects/complexobject.c
@@ -667,7 +667,7 @@
 	}
 #ifdef Py_USING_UNICODE
 	else if (PyUnicode_Check(v)) {
-		if (PyUnicode_GET_SIZE(v) >= sizeof(s_buffer)) {
+		if (PyUnicode_GET_SIZE(v) >= (Py_ssize_t)sizeof(s_buffer)) {
 			PyErr_SetString(PyExc_ValueError,
 				 "complex() literal too large to convert");
 			return NULL;
@@ -940,10 +940,10 @@
 	0,					/* nb_and */
 	0,					/* nb_xor */
 	0,					/* nb_or */
-	(coercion)complex_coerce,		/* nb_coerce */
-	(unaryfunc)complex_int,			/* nb_int */
-	(unaryfunc)complex_long,		/* nb_long */
-	(unaryfunc)complex_float,		/* nb_float */
+	complex_coerce,				/* nb_coerce */
+	complex_int,				/* nb_int */
+	complex_long,				/* nb_long */
+	complex_float,				/* nb_float */
 	0,					/* nb_oct */
 	0,					/* nb_hex */
 	0,					/* nb_inplace_add */
@@ -968,7 +968,7 @@
 	"complex",
 	sizeof(PyComplexObject),
 	0,
-	(destructor)complex_dealloc,		/* tp_dealloc */
+	complex_dealloc,			/* tp_dealloc */
 	(printfunc)complex_print,		/* tp_print */
 	0,					/* tp_getattr */
 	0,					/* tp_setattr */
diff --git a/Objects/descrobject.c b/Objects/descrobject.c
index 9494062..561ba4a5 100644
--- a/Objects/descrobject.c
+++ b/Objects/descrobject.c
@@ -377,13 +377,7 @@
 descr_traverse(PyObject *self, visitproc visit, void *arg)
 {
 	PyDescrObject *descr = (PyDescrObject *)self;
-	int err;
-
-	if (descr->d_type) {
-		err = visit((PyObject *)(descr->d_type), arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(descr->d_type);
 	return 0;
 }
 
@@ -480,7 +474,7 @@
 	0,					/* tp_as_sequence */
 	0,					/* tp_as_mapping */
 	0,					/* tp_hash */
-	(ternaryfunc)0,				/* tp_call */
+	0,					/* tp_call */
 	0,					/* tp_str */
 	PyObject_GenericGetAttr,		/* tp_getattro */
 	0,					/* tp_setattro */
@@ -518,7 +512,7 @@
 	0,					/* tp_as_sequence */
 	0,					/* tp_as_mapping */
 	0,					/* tp_hash */
-	(ternaryfunc)0,				/* tp_call */
+	0,					/* tp_call */
 	0,					/* tp_str */
 	PyObject_GenericGetAttr,		/* tp_getattro */
 	0,					/* tp_setattro */
@@ -814,13 +808,7 @@
 proxy_traverse(PyObject *self, visitproc visit, void *arg)
 {
 	proxyobject *pp = (proxyobject *)self;
-	int err;
-
-	if (pp->dict) {
-		err = visit(pp->dict, arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(pp->dict);
 	return 0;
 }
 
@@ -999,18 +987,8 @@
 wrapper_traverse(PyObject *self, visitproc visit, void *arg)
 {
 	wrapperobject *wp = (wrapperobject *)self;
-	int err;
-
-	if (wp->descr) {
-		err = visit((PyObject *)(wp->descr), arg);
-		if (err)
-			return err;
-	}
-	if (wp->self) {
-		err = visit(wp->self, arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(wp->descr);
+	Py_VISIT(wp->self);
 	return 0;
 }
 
@@ -1237,20 +1215,10 @@
 property_traverse(PyObject *self, visitproc visit, void *arg)
 {
 	propertyobject *pp = (propertyobject *)self;
-	int err;
-
-#define VISIT(SLOT) \
-	if (pp->SLOT) { \
-		err = visit((PyObject *)(pp->SLOT), arg); \
-		if (err) \
-			return err; \
-	}
-
-	VISIT(prop_get);
-	VISIT(prop_set);
-	VISIT(prop_del);
-	VISIT(prop_doc);
-
+	Py_VISIT(pp->prop_get);
+	Py_VISIT(pp->prop_set);
+	Py_VISIT(pp->prop_del);
+	Py_VISIT(pp->prop_doc);
 	return 0;
 }
 
diff --git a/Objects/dictobject.c b/Objects/dictobject.c
index 0eccdbb..f5799ee 100644
--- a/Objects/dictobject.c
+++ b/Objects/dictobject.c
@@ -115,6 +115,14 @@
 /* Object used as dummy key to fill deleted entries */
 static PyObject *dummy = NULL; /* Initialized by first call to newdictobject() */
 
+#ifdef Py_REF_DEBUG
+PyObject *
+_PyDict_Dummy(void)
+{
+	return dummy;
+}
+#endif
+
 /* forward declarations */
 static dictentry *
 lookdict_string(dictobject *mp, PyObject *key, long hash);
@@ -1724,17 +1732,12 @@
 dict_traverse(PyObject *op, visitproc visit, void *arg)
 {
 	Py_ssize_t i = 0;
-	int err;
 	PyObject *pk;
 	PyObject *pv;
 
 	while (PyDict_Next(op, &i, &pk, &pv)) {
-		err = visit(pk, arg);
-		if (err)
-			return err;
-		err = visit(pv, arg);
-		if (err)
-			return err;
+		Py_VISIT(pk);
+		Py_VISIT(pv);
 	}
 	return 0;
 }
@@ -1880,16 +1883,16 @@
 
 /* Hack to implement "key in dict" */
 static PySequenceMethods dict_as_sequence = {
-	0,					/* sq_length */
-	0,					/* sq_concat */
-	0,					/* sq_repeat */
-	0,					/* sq_item */
-	0,					/* sq_slice */
-	0,					/* sq_ass_item */
-	0,					/* sq_ass_slice */
-	(objobjproc)PyDict_Contains,		/* sq_contains */
-	0,					/* sq_inplace_concat */
-	0,					/* sq_inplace_repeat */
+	0,			/* sq_length */
+	0,			/* sq_concat */
+	0,			/* sq_repeat */
+	0,			/* sq_item */
+	0,			/* sq_slice */
+	0,			/* sq_ass_item */
+	0,			/* sq_ass_slice */
+	PyDict_Contains,	/* sq_contains */
+	0,			/* sq_inplace_concat */
+	0,			/* sq_inplace_repeat */
 };
 
 static PyObject *
@@ -1966,8 +1969,8 @@
 	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC |
 		Py_TPFLAGS_BASETYPE,		/* tp_flags */
 	dictionary_doc,				/* tp_doc */
-	(traverseproc)dict_traverse,		/* tp_traverse */
-	(inquiry)dict_tp_clear,			/* tp_clear */
+	dict_traverse,				/* tp_traverse */
+	dict_tp_clear,				/* tp_clear */
 	dict_richcompare,			/* tp_richcompare */
 	0,					/* tp_weaklistoffset */
 	(getiterfunc)dict_iter,			/* tp_iter */
@@ -1980,7 +1983,7 @@
 	0,					/* tp_descr_get */
 	0,					/* tp_descr_set */
 	0,					/* tp_dictoffset */
-	(initproc)dict_init,			/* tp_init */
+	dict_init,				/* tp_init */
 	PyType_GenericAlloc,			/* tp_alloc */
 	dict_new,				/* tp_new */
 	PyObject_GC_Del,        		/* tp_free */
diff --git a/Objects/enumobject.c b/Objects/enumobject.c
index 4811239..a8f43e0 100644
--- a/Objects/enumobject.c
+++ b/Objects/enumobject.c
@@ -9,8 +9,6 @@
 	PyObject* en_result;	   /* result tuple  */
 } enumobject;
 
-PyTypeObject PyEnum_Type;
-
 static PyObject *
 enum_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 {
@@ -51,18 +49,8 @@
 static int
 enum_traverse(enumobject *en, visitproc visit, void *arg)
 {
-	int err;
-
-	if (en->en_sit) {
-		err = visit(en->en_sit, arg);
-		if (err)
-			return err;
-	}
-	if (en->en_result) {
-		err = visit(en->en_result, arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(en->en_sit);
+	Py_VISIT(en->en_result);
 	return 0;
 }
 
@@ -207,8 +195,7 @@
 static int
 reversed_traverse(reversedobject *ro, visitproc visit, void *arg)
 {
-	if (ro->seq)
-		return visit((PyObject *)(ro->seq), arg);
+	Py_VISIT(ro->seq);
 	return 0;
 }
 
diff --git a/Objects/fileobject.c b/Objects/fileobject.c
index f96ee7b..632ab04 100644
--- a/Objects/fileobject.c
+++ b/Objects/fileobject.c
@@ -48,6 +48,10 @@
 #define NEWLINE_LF 2		/* \n newline seen */
 #define NEWLINE_CRLF 4		/* \r\n newline seen */
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 FILE *
 PyFile_AsFile(PyObject *f)
 {
@@ -313,7 +317,8 @@
 			PyMem_Free(file->f_setbuf);
 			file->f_setbuf = NULL;
 		} else {
-			file->f_setbuf = PyMem_Realloc(file->f_setbuf, bufsize);
+			file->f_setbuf = (char *)PyMem_Realloc(file->f_setbuf, 
+                                                                bufsize);
 		}
 #ifdef HAVE_SETVBUF
 		setvbuf(file->f_fp, file->f_setbuf, type, bufsize);
@@ -818,7 +823,7 @@
 		buffersize = new_buffersize(f, (size_t)0);
 	else
 		buffersize = bytesrequested;
-	if (buffersize > INT_MAX) {
+	if (buffersize > PY_SSIZE_T_MAX) {
 		PyErr_SetString(PyExc_OverflowError,
 	"requested number of bytes is more than a Python string can hold");
 		return NULL;
@@ -1093,7 +1098,7 @@
 		assert(*(pvend-1) == '\0');
 		increment = total_v_size >> 2;	/* mild exponential growth */
 		total_v_size += increment;
-		if (total_v_size > INT_MAX) {
+		if (total_v_size > PY_SSIZE_T_MAX) {
 			PyErr_SetString(PyExc_OverflowError,
 			    "line is longer than a Python string can hold");
 			Py_DECREF(v);
@@ -1204,7 +1209,7 @@
 		used_v_size = total_v_size;
 		increment = total_v_size >> 2; /* mild exponential growth */
 		total_v_size += increment;
-		if (total_v_size > INT_MAX) {
+		if (total_v_size > PY_SSIZE_T_MAX) {
 			PyErr_SetString(PyExc_OverflowError,
 			    "line is longer than a Python string can hold");
 			Py_DECREF(v);
@@ -1391,12 +1396,12 @@
 			goto cleanup;
 		}
 		totalread += nread;
-		p = memchr(buffer+nfilled, '\n', nread);
+		p = (char *)memchr(buffer+nfilled, '\n', nread);
 		if (p == NULL) {
 			/* Need a larger buffer to fit this line */
 			nfilled += nread;
 			buffersize *= 2;
-			if (buffersize > INT_MAX) {
+			if (buffersize > PY_SSIZE_T_MAX) {
 				PyErr_SetString(PyExc_OverflowError,
 			    "line is longer than a Python string can hold");
 				goto error;
@@ -1431,7 +1436,7 @@
 			if (err != 0)
 				goto error;
 			q = p;
-			p = memchr(q, '\n', end-q);
+			p = (char *)memchr(q, '\n', end-q);
 		} while (p != NULL);
 		/* Move the remaining incomplete line to the start */
 		nfilled = end-q;
@@ -1790,7 +1795,7 @@
 
 /* Make sure that file has a readahead buffer with at least one byte
    (unless at EOF) and no more than bufsize.  Returns negative value on
-   error */
+   error, will set MemoryError if bufsize bytes cannot be allocated. */
 static int
 readahead(PyFileObject *f, int bufsize)
 {
@@ -1802,7 +1807,8 @@
 		else
 			drop_readahead(f);
 	}
-	if ((f->f_buf = PyMem_Malloc(bufsize)) == NULL) {
+	if ((f->f_buf = (char *)PyMem_Malloc(bufsize)) == NULL) {
+		PyErr_NoMemory();
 		return -1;
 	}
 	Py_BEGIN_ALLOW_THREADS
@@ -1844,7 +1850,7 @@
 	if (len == 0)
 		return (PyStringObject *)
 			PyString_FromStringAndSize(NULL, skip);
-	bufptr = memchr(f->f_bufptr, '\n', len);
+	bufptr = (char *)memchr(f->f_bufptr, '\n', len);
 	if (bufptr != NULL) {
 		bufptr++;			/* Count the '\n' */
 		len = bufptr - f->f_bufptr;
@@ -2056,7 +2062,7 @@
 	0,					/* tp_descr_get */
 	0,					/* tp_descr_set */
 	0,					/* tp_dictoffset */
-	(initproc)file_init,			/* tp_init */
+	file_init,				/* tp_init */
 	PyType_GenericAlloc,			/* tp_alloc */
 	file_new,				/* tp_new */
 	PyObject_Del,                           /* tp_free */
@@ -2432,3 +2438,8 @@
 	f->f_skipnextlf = skipnextlf;
 	return dst - buf;
 }
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Objects/floatobject.c b/Objects/floatobject.c
index 20ed86e..8708690 100644
--- a/Objects/floatobject.c
+++ b/Objects/floatobject.c
@@ -97,7 +97,7 @@
 	}
 #ifdef Py_USING_UNICODE
 	else if (PyUnicode_Check(v)) {
-		if (PyUnicode_GET_SIZE(v) >= sizeof(s_buffer)) {
+		if (PyUnicode_GET_SIZE(v) >= (Py_ssize_t)sizeof(s_buffer)) {
 			PyErr_SetString(PyExc_ValueError,
 				"Unicode float() literal too long to convert");
 			return NULL;
@@ -940,21 +940,21 @@
 static PyObject *
 float_subtype_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 {
-	PyObject *tmp, *new;
+	PyObject *tmp, *newobj;
 
 	assert(PyType_IsSubtype(type, &PyFloat_Type));
 	tmp = float_new(&PyFloat_Type, args, kwds);
 	if (tmp == NULL)
 		return NULL;
 	assert(PyFloat_CheckExact(tmp));
-	new = type->tp_alloc(type, 0);
-	if (new == NULL) {
+	newobj = type->tp_alloc(type, 0);
+	if (newobj == NULL) {
 		Py_DECREF(tmp);
 		return NULL;
 	}
-	((PyFloatObject *)new)->ob_fval = ((PyFloatObject *)tmp)->ob_fval;
+	((PyFloatObject *)newobj)->ob_fval = ((PyFloatObject *)tmp)->ob_fval;
 	Py_DECREF(tmp);
-	return new;
+	return newobj;
 }
 
 static PyObject *
@@ -1106,12 +1106,12 @@
 
 
 static PyNumberMethods float_as_number = {
-	(binaryfunc)float_add, /*nb_add*/
-	(binaryfunc)float_sub, /*nb_subtract*/
-	(binaryfunc)float_mul, /*nb_multiply*/
-	(binaryfunc)float_rem, /*nb_remainder*/
-	(binaryfunc)float_divmod, /*nb_divmod*/
-	(ternaryfunc)float_pow, /*nb_power*/
+	float_add, 	/*nb_add*/
+	float_sub, 	/*nb_subtract*/
+	float_mul, 	/*nb_multiply*/
+	float_rem, 	/*nb_remainder*/
+	float_divmod, 	/*nb_divmod*/
+	float_pow, 	/*nb_power*/
 	(unaryfunc)float_neg, /*nb_negative*/
 	(unaryfunc)float_pos, /*nb_positive*/
 	(unaryfunc)float_abs, /*nb_absolute*/
@@ -1122,10 +1122,10 @@
 	0,		/*nb_and*/
 	0,		/*nb_xor*/
 	0,		/*nb_or*/
-	(coercion)float_coerce, /*nb_coerce*/
-	(unaryfunc)float_int, /*nb_int*/
-	(unaryfunc)float_long, /*nb_long*/
-	(unaryfunc)float_float, /*nb_float*/
+	float_coerce, 	/*nb_coerce*/
+	float_int, 	/*nb_int*/
+	float_long, 	/*nb_long*/
+	float_float,	/*nb_float*/
 	0,		/* nb_oct */
 	0,		/* nb_hex */
 	0,		/* nb_inplace_add */
@@ -1170,7 +1170,7 @@
 	float_doc,				/* tp_doc */
  	0,					/* tp_traverse */
 	0,					/* tp_clear */
-	(richcmpfunc)float_richcompare,		/* tp_richcompare */
+	float_richcompare,			/* tp_richcompare */
 	0,					/* tp_weaklistoffset */
 	0,					/* tp_iter */
 	0,					/* tp_iternext */
diff --git a/Objects/frameobject.c b/Objects/frameobject.c
index 6e3f297..9aabc7a 100644
--- a/Objects/frameobject.c
+++ b/Objects/frameobject.c
@@ -1,4 +1,3 @@
-
 /* Frame object implementation */
 
 #include "Python.h"
@@ -333,7 +332,7 @@
 
 	Py_XINCREF(v);
 	f->f_trace = v;
-	
+
 	if (v != NULL)
 		f->f_lineno = PyCode_Addr2Line(f->f_code, f->f_lasti);
 
@@ -399,7 +398,7 @@
 		for (p = f->f_valuestack; p < f->f_stacktop; p++)
 			Py_XDECREF(*p);
 	}
-	
+
 	Py_XDECREF(f->f_back);
 	Py_DECREF(f->f_code);
 	Py_DECREF(f->f_builtins);
@@ -423,30 +422,28 @@
 frame_traverse(PyFrameObject *f, visitproc visit, void *arg)
 {
 	PyObject **fastlocals, **p;
-	int i, err, slots;
-#define VISIT(o) if (o) {if ((err = visit((PyObject *)(o), arg))) return err;}
+	int i, slots;
 
-	VISIT(f->f_back);
-	VISIT(f->f_code);
-	VISIT(f->f_builtins);
-	VISIT(f->f_globals);
-	VISIT(f->f_locals);
-	VISIT(f->f_trace);
-	VISIT(f->f_exc_type);
-	VISIT(f->f_exc_value);
-	VISIT(f->f_exc_traceback);
+	Py_VISIT(f->f_back);
+	Py_VISIT(f->f_code);
+	Py_VISIT(f->f_builtins);
+	Py_VISIT(f->f_globals);
+	Py_VISIT(f->f_locals);
+	Py_VISIT(f->f_trace);
+	Py_VISIT(f->f_exc_type);
+	Py_VISIT(f->f_exc_value);
+	Py_VISIT(f->f_exc_traceback);
 
 	/* locals */
 	slots = f->f_nlocals + f->f_ncells + f->f_nfreevars;
 	fastlocals = f->f_localsplus;
-	for (i = slots; --i >= 0; ++fastlocals) {
-		VISIT(*fastlocals);
-	}
+	for (i = slots; --i >= 0; ++fastlocals)
+		Py_VISIT(*fastlocals);
 
 	/* stack */
 	if (f->f_stacktop != NULL) {
 		for (p = f->f_valuestack; p < f->f_stacktop; p++)
-			VISIT(*p);
+			Py_VISIT(*p);
 	}
 	return 0;
 }
@@ -454,37 +451,32 @@
 static void
 frame_clear(PyFrameObject *f)
 {
-	PyObject **fastlocals, **p;
+	PyObject **fastlocals, **p, **oldtop;
 	int i, slots;
 
-	Py_XDECREF(f->f_exc_type);
-	f->f_exc_type = NULL;
+	/* Before anything else, make sure that this frame is clearly marked
+         * as being defunct!  Else, e.g., a generator reachable from this
+         * frame may also point to this frame, believe itself to still be
+         * active, and try cleaning up this frame again.
+         */
+	oldtop = f->f_stacktop;
+        f->f_stacktop = NULL;
 
-	Py_XDECREF(f->f_exc_value);
-	f->f_exc_value = NULL;
-
-	Py_XDECREF(f->f_exc_traceback);
-	f->f_exc_traceback = NULL;
-
-	Py_XDECREF(f->f_trace);
-	f->f_trace = NULL;
+	Py_CLEAR(f->f_exc_type);
+	Py_CLEAR(f->f_exc_value);
+	Py_CLEAR(f->f_exc_traceback);
+	Py_CLEAR(f->f_trace);
 
 	/* locals */
 	slots = f->f_nlocals + f->f_ncells + f->f_nfreevars;
 	fastlocals = f->f_localsplus;
-	for (i = slots; --i >= 0; ++fastlocals) {
-		if (*fastlocals != NULL) {
-			Py_XDECREF(*fastlocals);
-			*fastlocals = NULL;
-		}
-	}
+	for (i = slots; --i >= 0; ++fastlocals)
+		Py_CLEAR(*fastlocals);
 
 	/* stack */
-	if (f->f_stacktop != NULL) {
-		for (p = f->f_valuestack; p < f->f_stacktop; p++) {
-			Py_XDECREF(*p);
-			*p = NULL;
-		}
+	if (oldtop != NULL) {
+		for (p = f->f_valuestack; p < oldtop; p++)
+			Py_CLEAR(*p);
 	}
 }
 
@@ -534,7 +526,7 @@
 }
 
 PyFrameObject *
-PyFrame_New(PyThreadState *tstate, PyCodeObject *code, PyObject *globals, 
+PyFrame_New(PyThreadState *tstate, PyCodeObject *code, PyObject *globals,
 	    PyObject *locals)
 {
 	PyFrameObject *back = tstate->frame;
@@ -563,10 +555,10 @@
 				builtins = NULL;
 		}
 		if (builtins == NULL) {
-			/* No builtins!  Make up a minimal one 
+			/* No builtins!  Make up a minimal one
 			   Give them 'None', at least. */
 			builtins = PyDict_New();
-			if (builtins == NULL || 
+			if (builtins == NULL ||
 			    PyDict_SetItemString(
 				    builtins, "None", Py_None) < 0)
 				return NULL;
@@ -611,7 +603,7 @@
 	Py_INCREF(globals);
 	f->f_globals = globals;
 	/* Most functions have CO_NEWLOCALS and CO_OPTIMIZED set. */
-	if ((code->co_flags & (CO_NEWLOCALS | CO_OPTIMIZED)) == 
+	if ((code->co_flags & (CO_NEWLOCALS | CO_OPTIMIZED)) ==
 		(CO_NEWLOCALS | CO_OPTIMIZED))
 		locals = NULL; /* PyFrame_FastToLocals() will set. */
 	else if (code->co_flags & CO_NEWLOCALS) {
@@ -749,7 +741,7 @@
 		return;
 	PyErr_Fetch(&error_type, &error_value, &error_traceback);
 	fast = f->f_localsplus;
-	j = PyTuple_Size(map);
+	j = PyTuple_GET_SIZE(map);
 	if (j > f->f_nlocals)
 		j = f->f_nlocals;
 	if (f->f_nlocals)
@@ -759,10 +751,10 @@
 		      && PyTuple_Check(f->f_code->co_freevars))) {
 			return;
 		}
-		map_to_dict(f->f_code->co_cellvars, 
+		map_to_dict(f->f_code->co_cellvars,
 			    PyTuple_GET_SIZE(f->f_code->co_cellvars),
 			    locals, fast + f->f_nlocals, 1);
-		map_to_dict(f->f_code->co_freevars, 
+		map_to_dict(f->f_code->co_freevars,
 			    PyTuple_GET_SIZE(f->f_code->co_freevars),
 			    locals, fast + f->f_nlocals + f->f_ncells, 1);
 	}
@@ -787,7 +779,7 @@
 		return;
 	PyErr_Fetch(&error_type, &error_value, &error_traceback);
 	fast = f->f_localsplus;
-	j = PyTuple_Size(map);
+	j = PyTuple_GET_SIZE(map);
 	if (j > f->f_nlocals)
 		j = f->f_nlocals;
 	if (f->f_nlocals)
@@ -796,12 +788,12 @@
 		if (!(PyTuple_Check(f->f_code->co_cellvars)
 		      && PyTuple_Check(f->f_code->co_freevars)))
 			return;
-		dict_to_map(f->f_code->co_cellvars, 
+		dict_to_map(f->f_code->co_cellvars,
 			    PyTuple_GET_SIZE(f->f_code->co_cellvars),
 			    locals, fast + f->f_nlocals, 1, clear);
-		dict_to_map(f->f_code->co_freevars, 
+		dict_to_map(f->f_code->co_freevars,
 			    PyTuple_GET_SIZE(f->f_code->co_freevars),
-			    locals, fast + f->f_nlocals + f->f_ncells, 1, 
+			    locals, fast + f->f_nlocals + f->f_ncells, 1,
 			    clear);
 	}
 	PyErr_Restore(error_type, error_value, error_traceback);
diff --git a/Objects/funcobject.c b/Objects/funcobject.c
index 00ae2eb..59cb519 100644
--- a/Objects/funcobject.c
+++ b/Objects/funcobject.c
@@ -466,47 +466,14 @@
 static int
 func_traverse(PyFunctionObject *f, visitproc visit, void *arg)
 {
-	int err;
-	if (f->func_code) {
-		err = visit(f->func_code, arg);
-		if (err)
-			return err;
-	}
-	if (f->func_globals) {
-		err = visit(f->func_globals, arg);
-		if (err)
-			return err;
-	}
-	if (f->func_module) {
-		err = visit(f->func_module, arg);
-		if (err)
-			return err;
-	}
-	if (f->func_defaults) {
-		err = visit(f->func_defaults, arg);
-		if (err)
-			return err;
-	}
-	if (f->func_doc) {
-		err = visit(f->func_doc, arg);
-		if (err)
-			return err;
-	}
-	if (f->func_name) {
-		err = visit(f->func_name, arg);
-		if (err)
-			return err;
-	}
-	if (f->func_dict) {
-		err = visit(f->func_dict, arg);
-		if (err)
-			return err;
-	}
-	if (f->func_closure) {
-		err = visit(f->func_closure, arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(f->func_code);
+	Py_VISIT(f->func_globals);
+	Py_VISIT(f->func_module);
+	Py_VISIT(f->func_defaults);
+	Py_VISIT(f->func_doc);
+	Py_VISIT(f->func_name);
+	Py_VISIT(f->func_dict);
+	Py_VISIT(f->func_closure);
 	return 0;
 }
 
@@ -647,17 +614,14 @@
 static int
 cm_traverse(classmethod *cm, visitproc visit, void *arg)
 {
-	if (!cm->cm_callable)
-		return 0;
-	return visit(cm->cm_callable, arg);
+	Py_VISIT(cm->cm_callable);
+	return 0;
 }
 
 static int
 cm_clear(classmethod *cm)
 {
-	Py_XDECREF(cm->cm_callable);
-	cm->cm_callable = NULL;
-
+	Py_CLEAR(cm->cm_callable);
 	return 0;
 }
 
@@ -808,9 +772,8 @@
 static int
 sm_traverse(staticmethod *sm, visitproc visit, void *arg)
 {
-	if (!sm->sm_callable)
-		return 0;
-	return visit(sm->sm_callable, arg);
+	Py_VISIT(sm->sm_callable);
+	return 0;
 }
 
 static int
diff --git a/Objects/genobject.c b/Objects/genobject.c
index 3f6ef85..15e53dd 100644
--- a/Objects/genobject.c
+++ b/Objects/genobject.c
@@ -5,11 +5,13 @@
 #include "genobject.h"
 #include "ceval.h"
 #include "structmember.h"
+#include "opcode.h"
 
 static int
 gen_traverse(PyGenObject *gen, visitproc visit, void *arg)
 {
-	return visit((PyObject *)gen->gi_frame, arg);
+	Py_VISIT((PyObject *)gen->gi_frame);
+	return 0;
 }
 
 static void
@@ -20,12 +22,11 @@
 	_PyObject_GC_UNTRACK(gen);
 
 	if (gen->gi_weakreflist != NULL)
-		PyObject_ClearWeakRefs((PyObject *) gen);
-
+		PyObject_ClearWeakRefs(self);
 
 	_PyObject_GC_TRACK(self);
 
-	if (gen->gi_frame->f_stacktop!=NULL) {
+	if (gen->gi_frame != NULL && gen->gi_frame->f_stacktop != NULL) {
 		/* Generator is paused, so we need to close */
 		gen->ob_type->tp_del(self);
 		if (self->ob_refcnt > 0)
@@ -33,7 +34,7 @@
 	}
 
 	_PyObject_GC_UNTRACK(self);
-	Py_XDECREF(gen->gi_frame);
+	Py_CLEAR(gen->gi_frame);
 	PyObject_GC_Del(gen);
 }
 
@@ -50,16 +51,18 @@
 				"generator already executing");
 		return NULL;
 	}
-	if ((PyObject *)f == Py_None || f->f_stacktop == NULL) {
+	if (f==NULL || f->f_stacktop == NULL) {
 		/* Only set exception if called from send() */
-		if (arg && !exc) PyErr_SetNone(PyExc_StopIteration);
+		if (arg && !exc)
+			PyErr_SetNone(PyExc_StopIteration);
 		return NULL;
 	}
 
 	if (f->f_lasti == -1) {
 		if (arg && arg != Py_None) {
 			PyErr_SetString(PyExc_TypeError,
-				"can't send non-None value to a just-started generator");
+					"can't send non-None value to a "
+					"just-started generator");
 			return NULL;
 		}
 	} else {
@@ -91,21 +94,22 @@
 		Py_DECREF(result);
 		result = NULL;
 		/* Set exception if not called by gen_iternext() */
-		if (arg) PyErr_SetNone(PyExc_StopIteration);
+		if (arg)
+			PyErr_SetNone(PyExc_StopIteration);
 	}
 
 	if (!result || f->f_stacktop == NULL) {
 		/* generator can't be rerun, so release the frame */
 		Py_DECREF(f);
-		gen->gi_frame = (PyFrameObject *)Py_None;
-		Py_INCREF(Py_None);
+		gen->gi_frame = NULL;
 	}
 
 	return result;
 }
 
 PyDoc_STRVAR(send_doc,
-"send(arg) -> send 'arg' into generator, return next yielded value or raise StopIteration.");
+"send(arg) -> send 'arg' into generator,\n\
+return next yielded value or raise StopIteration.");
 
 static PyObject *
 gen_send(PyGenObject *gen, PyObject *arg)
@@ -125,11 +129,11 @@
 	if (retval) {
 		Py_DECREF(retval);
 		PyErr_SetString(PyExc_RuntimeError,
-			"generator ignored GeneratorExit");
+				"generator ignored GeneratorExit");
 		return NULL;
 	}
-	if ( PyErr_ExceptionMatches(PyExc_StopIteration) 
-	     || PyErr_ExceptionMatches(PyExc_GeneratorExit) ) 
+	if (PyErr_ExceptionMatches(PyExc_StopIteration)
+	    || PyErr_ExceptionMatches(PyExc_GeneratorExit))
 	{
 		PyErr_Clear();	/* ignore these errors */
 		Py_INCREF(Py_None);
@@ -145,7 +149,7 @@
         PyObject *error_type, *error_value, *error_traceback;
 	PyGenObject *gen = (PyGenObject *)self;
 
-	if ((PyObject *)gen->gi_frame == Py_None || gen->gi_frame->f_stacktop==NULL)
+	if (gen->gi_frame == NULL || gen->gi_frame->f_stacktop == NULL)
 		/* Generator isn't paused, so no need to close */
 		return;
 
@@ -156,10 +160,10 @@
         /* Save the current exception, if any. */
         PyErr_Fetch(&error_type, &error_value, &error_traceback);
 
-	res = gen_close((PyGenObject *)self, NULL);
+	res = gen_close(gen, NULL);
 
 	if (res == NULL)
-		PyErr_WriteUnraisable((PyObject *)self);
+		PyErr_WriteUnraisable(self);
 	else
 		Py_DECREF(res);
 
@@ -181,7 +185,7 @@
                 _Py_NewReference(self);
                 self->ob_refcnt = refcnt;
         }
-        assert(!PyType_IS_GC(self->ob_type) ||
+        assert(PyType_IS_GC(self->ob_type) &&
                _Py_AS_GC(self)->gc.gc_refs != _PyGC_REFS_UNTRACKED);
 
         /* If Py_REF_DEBUG, _Py_NewReference bumped _Py_RefTotal, so
@@ -202,10 +206,11 @@
 
 
 PyDoc_STRVAR(throw_doc,
-"throw(typ[,val[,tb]]) -> raise exception in generator, return next yielded value or raise StopIteration.");
+"throw(typ[,val[,tb]]) -> raise exception in generator,\n\
+return next yielded value or raise StopIteration.");
 
 static PyObject *
-gen_throw(PyGenObject *gen, PyObject *args) 
+gen_throw(PyGenObject *gen, PyObject *args)
 {
 	PyObject *typ;
 	PyObject *tb = NULL;
@@ -216,10 +221,8 @@
 
 	/* First, check the traceback argument, replacing None with
 	   NULL. */
-	if (tb == Py_None) {
-		Py_DECREF(tb);
+	if (tb == Py_None)
 		tb = NULL;
-	}
 	else if (tb != NULL && !PyTraceBack_Check(tb)) {
 		PyErr_SetString(PyExc_TypeError,
 			"throw() third argument must be a traceback object");
@@ -249,7 +252,10 @@
 			Py_INCREF(typ);
 		}
 	}
-	else {
+
+	/* Allow raising builtin string exceptions */
+
+	else if (!PyString_CheckExact(typ)) {
 		/* Not something you can raise.  throw() fails. */
 		PyErr_Format(PyExc_TypeError,
 			     "exceptions must be classes, or instances, not %s",
@@ -257,7 +263,7 @@
 			goto failed_throw;
 	}
 
-	PyErr_Restore(typ,val,tb);
+	PyErr_Restore(typ, val, tb);
 	return gen_send_ex(gen, Py_None, 1);
 
 failed_throw:
@@ -324,7 +330,7 @@
 	0,					/* tp_getset */
 	0,					/* tp_base */
 	0,					/* tp_dict */
-        
+
 	0,					/* tp_descr_get */
 	0,					/* tp_descr_set */
 	0,					/* tp_dictoffset */
@@ -355,3 +361,23 @@
 	_PyObject_GC_TRACK(gen);
 	return (PyObject *)gen;
 }
+
+int
+PyGen_NeedsFinalizing(PyGenObject *gen)
+{
+	int i;
+	PyFrameObject *f = gen->gi_frame;
+
+	if (f == NULL || f->f_stacktop == NULL || f->f_iblock <= 0)
+		return 0; /* no frame or empty blockstack == no finalization */
+
+	/* Any block type besides a loop requires cleanup. */
+	i = f->f_iblock;
+	while (--i >= 0) {
+		if (f->f_blockstack[i].b_type != SETUP_LOOP)
+			return 1;
+	}
+
+	/* No blocks except loops, it's safe to skip finalization. */
+	return 0;
+}
diff --git a/Objects/intobject.c b/Objects/intobject.c
index c734840..fb3221f 100644
--- a/Objects/intobject.c
+++ b/Objects/intobject.c
@@ -255,18 +255,18 @@
 	if (op == NULL || (nb = op->ob_type->tp_as_number) == NULL ||
 	    nb->nb_int == NULL) {
 		PyErr_SetString(PyExc_TypeError, "an integer is required");
-		return -1;
+		return (unsigned long)-1;
 	}
 
 	io = (PyIntObject*) (*nb->nb_int) (op);
 	if (io == NULL)
-		return -1;
+		return (unsigned long)-1;
 	if (!PyInt_Check(io)) {
 		if (PyLong_Check(io)) {
 			val = PyLong_AsUnsignedLongMask((PyObject *)io);
 			Py_DECREF(io);
 			if (PyErr_Occurred())
-				return -1;
+				return (unsigned long)-1;
 			return val;
 		}
 		else
@@ -274,7 +274,7 @@
 			Py_DECREF(io);
 			PyErr_SetString(PyExc_TypeError,
 					"nb_int should return int object");
-			return -1;
+			return (unsigned long)-1;
 		}
 	}
 
@@ -300,18 +300,18 @@
 	if (op == NULL || (nb = op->ob_type->tp_as_number) == NULL ||
 	    nb->nb_int == NULL) {
 		PyErr_SetString(PyExc_TypeError, "an integer is required");
-		return -1;
+		return (unsigned PY_LONG_LONG)-1;
 	}
 
 	io = (PyIntObject*) (*nb->nb_int) (op);
 	if (io == NULL)
-		return -1;
+		return (unsigned PY_LONG_LONG)-1;
 	if (!PyInt_Check(io)) {
 		if (PyLong_Check(io)) {
 			val = PyLong_AsUnsignedLongLongMask((PyObject *)io);
 			Py_DECREF(io);
 			if (PyErr_Occurred())
-				return -1;
+				return (unsigned PY_LONG_LONG)-1;
 			return val;
 		}
 		else
@@ -319,7 +319,7 @@
 			Py_DECREF(io);
 			PyErr_SetString(PyExc_TypeError,
 					"nb_int should return int object");
-			return -1;
+			return (unsigned PY_LONG_LONG)-1;
 		}
 	}
 
@@ -335,7 +335,8 @@
 {
 	char *end;
 	long x;
-	char buffer[256]; /* For errors */
+	Py_ssize_t slen;
+	PyObject *sobj, *srepr;
 
 	if ((base != 0 && base < 2) || base > 36) {
 		PyErr_SetString(PyExc_ValueError,
@@ -359,9 +360,18 @@
 		end++;
 	if (*end != '\0') {
   bad:
-		PyOS_snprintf(buffer, sizeof(buffer),
-			      "invalid literal for int(): %.200s", s);
-		PyErr_SetString(PyExc_ValueError, buffer);
+		slen = strlen(s) < 200 ? strlen(s) : 200;
+		sobj = PyString_FromStringAndSize(s, slen);
+		if (sobj == NULL)
+			return NULL;
+		srepr = PyObject_Repr(sobj);
+		Py_DECREF(sobj);
+		if (srepr == NULL)
+			return NULL;
+		PyErr_Format(PyExc_ValueError,
+			     "invalid literal for int() with base %d: %s",
+			     base, PyString_AS_STRING(srepr));
+		Py_DECREF(srepr);
 		return NULL;
 	}
 	else if (errno != 0)
@@ -376,7 +386,7 @@
 PyInt_FromUnicode(Py_UNICODE *s, Py_ssize_t length, int base)
 {
 	PyObject *result;
-	char *buffer = PyMem_MALLOC(length+1);
+	char *buffer = (char *)PyMem_MALLOC(length+1);
 
 	if (buffer == NULL)
 		return NULL;
@@ -961,7 +971,7 @@
 static PyObject *
 int_subtype_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 {
-	PyObject *tmp, *new;
+	PyObject *tmp, *newobj;
 	long ival;
 
 	assert(PyType_IsSubtype(type, &PyInt_Type));
@@ -978,14 +988,14 @@
 		ival = ((PyIntObject *)tmp)->ob_ival;
 	}
 
-	new = type->tp_alloc(type, 0);
-	if (new == NULL) {
+	newobj = type->tp_alloc(type, 0);
+	if (newobj == NULL) {
 		Py_DECREF(tmp);
 		return NULL;
 	}
-	((PyIntObject *)new)->ob_ival = ival;
+	((PyIntObject *)newobj)->ob_ival = ival;
 	Py_DECREF(tmp);
-	return new;
+	return newobj;
 }
 
 static PyObject *
@@ -1046,7 +1056,7 @@
 	int_true_divide,	/* nb_true_divide */
 	0,			/* nb_inplace_floor_divide */
 	0,			/* nb_inplace_true_divide */
-	(lenfunc)PyInt_AsSsize_t, /* nb_index */
+	PyInt_AsSsize_t,	/* nb_index */
 };
 
 PyTypeObject PyInt_Type = {
@@ -1119,6 +1129,7 @@
 	PyIntObject *p;
 	PyIntBlock *list, *next;
 	int i;
+	unsigned int ctr;
 	int bc, bf;	/* block count, number of freed blocks */
 	int irem, isum;	/* remaining unfreed ints per block, total */
 
@@ -1141,9 +1152,9 @@
 	while (list != NULL) {
 		bc++;
 		irem = 0;
-		for (i = 0, p = &list->objects[0];
-		     i < N_INTOBJECTS;
-		     i++, p++) {
+		for (ctr = 0, p = &list->objects[0];
+		     ctr < N_INTOBJECTS;
+		     ctr++, p++) {
 			if (PyInt_CheckExact(p) && p->ob_refcnt != 0)
 				irem++;
 		}
@@ -1151,9 +1162,9 @@
 		if (irem) {
 			list->next = block_list;
 			block_list = list;
-			for (i = 0, p = &list->objects[0];
-			     i < N_INTOBJECTS;
-			     i++, p++) {
+			for (ctr = 0, p = &list->objects[0];
+			     ctr < N_INTOBJECTS;
+			     ctr++, p++) {
 				if (!PyInt_CheckExact(p) ||
 				    p->ob_refcnt == 0) {
 					p->ob_type = (struct _typeobject *)
@@ -1194,9 +1205,9 @@
 	if (Py_VerboseFlag > 1) {
 		list = block_list;
 		while (list != NULL) {
-			for (i = 0, p = &list->objects[0];
-			     i < N_INTOBJECTS;
-			     i++, p++) {
+			for (ctr = 0, p = &list->objects[0];
+			     ctr < N_INTOBJECTS;
+			     ctr++, p++) {
 				if (PyInt_CheckExact(p) && p->ob_refcnt != 0)
 					/* XXX(twouters) cast refcount to
 					   long until %zd is universally
diff --git a/Objects/iterobject.c b/Objects/iterobject.c
index 51f551b..cf839f4 100644
--- a/Objects/iterobject.c
+++ b/Objects/iterobject.c
@@ -38,9 +38,8 @@
 static int
 iter_traverse(seqiterobject *it, visitproc visit, void *arg)
 {
-	if (it->it_seq == NULL)
-		return 0;
-	return visit(it->it_seq, arg);
+	Py_VISIT(it->it_seq);
+	return 0;
 }
 
 static PyObject *
@@ -123,7 +122,7 @@
 	0,					/* tp_richcompare */
 	0,					/* tp_weaklistoffset */
 	PyObject_SelfIter,			/* tp_iter */
-	(iternextfunc)iter_iternext,		/* tp_iternext */
+	iter_iternext,				/* tp_iternext */
 	seqiter_methods,			/* tp_methods */
 	0,					/* tp_members */
 };
@@ -162,11 +161,8 @@
 static int
 calliter_traverse(calliterobject *it, visitproc visit, void *arg)
 {
-	int err;
-	if (it->it_callable != NULL && (err = visit(it->it_callable, arg)))
-		return err;
-	if (it->it_sentinel != NULL && (err = visit(it->it_sentinel, arg)))
-		return err;
+	Py_VISIT(it->it_callable);
+	Py_VISIT(it->it_sentinel);
 	return 0;
 }
 
diff --git a/Objects/listobject.c b/Objects/listobject.c
index 966d659..105df4c 100644
--- a/Objects/listobject.c
+++ b/Objects/listobject.c
@@ -181,7 +181,7 @@
 		PyErr_BadInternalCall();
 		return -1;
 	}
-	if (n == INT_MAX) {
+	if (n == PY_SSIZE_T_MAX) {
 		PyErr_SetString(PyExc_OverflowError,
 			"cannot add more objects to list");
 		return -1;
@@ -221,7 +221,7 @@
 	Py_ssize_t n = PyList_GET_SIZE(self);
 
 	assert (v != NULL);
-	if (n == INT_MAX) {
+	if (n == PY_SSIZE_T_MAX) {
 		PyErr_SetString(PyExc_OverflowError,
 			"cannot add more objects to list");
 		return -1;
@@ -1805,28 +1805,11 @@
 	PyObject *value;
 } sortwrapperobject;
 
-static PyTypeObject sortwrapper_type;
-
-static PyObject *
-sortwrapper_richcompare(sortwrapperobject *a, sortwrapperobject *b, int op)
-{
-	if (!PyObject_TypeCheck(b, &sortwrapper_type)) {
-		PyErr_SetString(PyExc_TypeError,
-			"expected a sortwrapperobject");
-		return NULL;
-	}
-	return PyObject_RichCompare(a->key, b->key, op);
-}
-
-static void
-sortwrapper_dealloc(sortwrapperobject *so)
-{
-	Py_XDECREF(so->key);
-	Py_XDECREF(so->value);
-	PyObject_Del(so);
-}
-
 PyDoc_STRVAR(sortwrapper_doc, "Object wrapper with a custom sort key.");
+static PyObject *
+sortwrapper_richcompare(sortwrapperobject *, sortwrapperobject *, int);
+static void
+sortwrapper_dealloc(sortwrapperobject *);
 
 static PyTypeObject sortwrapper_type = {
 	PyObject_HEAD_INIT(&PyType_Type)
@@ -1858,6 +1841,26 @@
 	(richcmpfunc)sortwrapper_richcompare,	/* tp_richcompare */
 };
 
+
+static PyObject *
+sortwrapper_richcompare(sortwrapperobject *a, sortwrapperobject *b, int op)
+{
+	if (!PyObject_TypeCheck(b, &sortwrapper_type)) {
+		PyErr_SetString(PyExc_TypeError,
+			"expected a sortwrapperobject");
+		return NULL;
+	}
+	return PyObject_RichCompare(a->key, b->key, op);
+}
+
+static void
+sortwrapper_dealloc(sortwrapperobject *so)
+{
+	Py_XDECREF(so->key);
+	Py_XDECREF(so->value);
+	PyObject_Del(so);
+}
+
 /* Returns a new reference to a sortwrapper.
    Consumes the references to the two underlying objects. */
 
@@ -2271,16 +2274,9 @@
 list_traverse(PyListObject *o, visitproc visit, void *arg)
 {
 	Py_ssize_t i;
-	PyObject *x;
 
-	for (i = o->ob_size; --i >= 0; ) {
-		x = o->ob_item[i];
-		if (x != NULL) {
-			int err = visit(x, arg);
-			if (err)
-				return err;
-		}
-	}
+	for (i = o->ob_size; --i >= 0; )
+		Py_VISIT(o->ob_item[i]);
 	return 0;
 }
 
@@ -2698,78 +2694,11 @@
 	PyListObject *it_seq; /* Set to NULL when iterator is exhausted */
 } listiterobject;
 
-PyTypeObject PyListIter_Type;
-
-static PyObject *
-list_iter(PyObject *seq)
-{
-	listiterobject *it;
-
-	if (!PyList_Check(seq)) {
-		PyErr_BadInternalCall();
-		return NULL;
-	}
-	it = PyObject_GC_New(listiterobject, &PyListIter_Type);
-	if (it == NULL)
-		return NULL;
-	it->it_index = 0;
-	Py_INCREF(seq);
-	it->it_seq = (PyListObject *)seq;
-	_PyObject_GC_TRACK(it);
-	return (PyObject *)it;
-}
-
-static void
-listiter_dealloc(listiterobject *it)
-{
-	_PyObject_GC_UNTRACK(it);
-	Py_XDECREF(it->it_seq);
-	PyObject_GC_Del(it);
-}
-
-static int
-listiter_traverse(listiterobject *it, visitproc visit, void *arg)
-{
-	if (it->it_seq == NULL)
-		return 0;
-	return visit((PyObject *)it->it_seq, arg);
-}
-
-static PyObject *
-listiter_next(listiterobject *it)
-{
-	PyListObject *seq;
-	PyObject *item;
-
-	assert(it != NULL);
-	seq = it->it_seq;
-	if (seq == NULL)
-		return NULL;
-	assert(PyList_Check(seq));
-
-	if (it->it_index < PyList_GET_SIZE(seq)) {
-		item = PyList_GET_ITEM(seq, it->it_index);
-		++it->it_index;
-		Py_INCREF(item);
-		return item;
-	}
-
-	Py_DECREF(seq);
-	it->it_seq = NULL;
-	return NULL;
-}
-
-static PyObject *
-listiter_len(listiterobject *it)
-{
-	Py_ssize_t len;
-	if (it->it_seq) {
-		len = PyList_GET_SIZE(it->it_seq) - it->it_index;
-		if (len >= 0)
-			return PyInt_FromSsize_t(len);
-	}
-	return PyInt_FromLong(0);
-}
+static PyObject *list_iter(PyObject *);
+static void listiter_dealloc(listiterobject *);
+static int listiter_traverse(listiterobject *, visitproc, void *);
+static PyObject *listiter_next(listiterobject *);
+static PyObject *listiter_len(listiterobject *);
 
 PyDoc_STRVAR(length_hint_doc, "Private method returning an estimate of len(list(it)).");
 
@@ -2812,6 +2741,76 @@
 	0,					/* tp_members */
 };
 
+
+static PyObject *
+list_iter(PyObject *seq)
+{
+	listiterobject *it;
+
+	if (!PyList_Check(seq)) {
+		PyErr_BadInternalCall();
+		return NULL;
+	}
+	it = PyObject_GC_New(listiterobject, &PyListIter_Type);
+	if (it == NULL)
+		return NULL;
+	it->it_index = 0;
+	Py_INCREF(seq);
+	it->it_seq = (PyListObject *)seq;
+	_PyObject_GC_TRACK(it);
+	return (PyObject *)it;
+}
+
+static void
+listiter_dealloc(listiterobject *it)
+{
+	_PyObject_GC_UNTRACK(it);
+	Py_XDECREF(it->it_seq);
+	PyObject_GC_Del(it);
+}
+
+static int
+listiter_traverse(listiterobject *it, visitproc visit, void *arg)
+{
+	Py_VISIT(it->it_seq);
+	return 0;
+}
+
+static PyObject *
+listiter_next(listiterobject *it)
+{
+	PyListObject *seq;
+	PyObject *item;
+
+	assert(it != NULL);
+	seq = it->it_seq;
+	if (seq == NULL)
+		return NULL;
+	assert(PyList_Check(seq));
+
+	if (it->it_index < PyList_GET_SIZE(seq)) {
+		item = PyList_GET_ITEM(seq, it->it_index);
+		++it->it_index;
+		Py_INCREF(item);
+		return item;
+	}
+
+	Py_DECREF(seq);
+	it->it_seq = NULL;
+	return NULL;
+}
+
+static PyObject *
+listiter_len(listiterobject *it)
+{
+	Py_ssize_t len;
+	if (it->it_seq) {
+		len = PyList_GET_SIZE(it->it_seq) - it->it_index;
+		if (len >= 0)
+			return PyInt_FromSsize_t(len);
+	}
+	return PyInt_FromLong(0);
+}
 /*********************** List Reverse Iterator **************************/
 
 typedef struct {
@@ -2820,69 +2819,11 @@
 	PyListObject *it_seq; /* Set to NULL when iterator is exhausted */
 } listreviterobject;
 
-PyTypeObject PyListRevIter_Type;
-
-static PyObject *
-list_reversed(PyListObject *seq, PyObject *unused)
-{
-	listreviterobject *it;
-
-	it = PyObject_GC_New(listreviterobject, &PyListRevIter_Type);
-	if (it == NULL)
-		return NULL;
-	assert(PyList_Check(seq));
-	it->it_index = PyList_GET_SIZE(seq) - 1;
-	Py_INCREF(seq);
-	it->it_seq = seq;
-	PyObject_GC_Track(it);
-	return (PyObject *)it;
-}
-
-static void
-listreviter_dealloc(listreviterobject *it)
-{
-	PyObject_GC_UnTrack(it);
-	Py_XDECREF(it->it_seq);
-	PyObject_GC_Del(it);
-}
-
-static int
-listreviter_traverse(listreviterobject *it, visitproc visit, void *arg)
-{
-	if (it->it_seq == NULL)
-		return 0;
-	return visit((PyObject *)it->it_seq, arg);
-}
-
-static PyObject *
-listreviter_next(listreviterobject *it)
-{
-	PyObject *item;
-	Py_ssize_t index = it->it_index;
-	PyListObject *seq = it->it_seq;
-
-	if (index>=0 && index < PyList_GET_SIZE(seq)) {
-		item = PyList_GET_ITEM(seq, index);
-		it->it_index--;
-		Py_INCREF(item);
-		return item;
-	}
-	it->it_index = -1;
-	if (seq != NULL) {
-		it->it_seq = NULL;
-		Py_DECREF(seq);
-	}
-	return NULL;
-}
-
-static Py_ssize_t
-listreviter_len(listreviterobject *it)
-{
-	Py_ssize_t len = it->it_index + 1;
-	if (it->it_seq == NULL || PyList_GET_SIZE(it->it_seq) < len)
-		return 0;
-	return len;
-}
+static PyObject *list_reversed(PyListObject *, PyObject *);
+static void listreviter_dealloc(listreviterobject *);
+static int listreviter_traverse(listreviterobject *, visitproc, void *);
+static PyObject *listreviter_next(listreviterobject *);
+static Py_ssize_t listreviter_len(listreviterobject *);
 
 static PySequenceMethods listreviter_as_sequence = {
 	(lenfunc)listreviter_len,	/* sq_length */
@@ -2921,3 +2862,65 @@
 	(iternextfunc)listreviter_next,		/* tp_iternext */
 	0,
 };
+
+static PyObject *
+list_reversed(PyListObject *seq, PyObject *unused)
+{
+	listreviterobject *it;
+
+	it = PyObject_GC_New(listreviterobject, &PyListRevIter_Type);
+	if (it == NULL)
+		return NULL;
+	assert(PyList_Check(seq));
+	it->it_index = PyList_GET_SIZE(seq) - 1;
+	Py_INCREF(seq);
+	it->it_seq = seq;
+	PyObject_GC_Track(it);
+	return (PyObject *)it;
+}
+
+static void
+listreviter_dealloc(listreviterobject *it)
+{
+	PyObject_GC_UnTrack(it);
+	Py_XDECREF(it->it_seq);
+	PyObject_GC_Del(it);
+}
+
+static int
+listreviter_traverse(listreviterobject *it, visitproc visit, void *arg)
+{
+	Py_VISIT(it->it_seq);
+	return 0;
+}
+
+static PyObject *
+listreviter_next(listreviterobject *it)
+{
+	PyObject *item;
+	Py_ssize_t index = it->it_index;
+	PyListObject *seq = it->it_seq;
+
+	if (index>=0 && index < PyList_GET_SIZE(seq)) {
+		item = PyList_GET_ITEM(seq, index);
+		it->it_index--;
+		Py_INCREF(item);
+		return item;
+	}
+	it->it_index = -1;
+	if (seq != NULL) {
+		it->it_seq = NULL;
+		Py_DECREF(seq);
+	}
+	return NULL;
+}
+
+static Py_ssize_t
+listreviter_len(listreviterobject *it)
+{
+	Py_ssize_t len = it->it_index + 1;
+	if (it->it_seq == NULL || PyList_GET_SIZE(it->it_seq) < len)
+		return 0;
+	return len;
+}
+
diff --git a/Objects/longobject.c b/Objects/longobject.c
index 7c5ebc4..3073923 100644
--- a/Objects/longobject.c
+++ b/Objects/longobject.c
@@ -281,7 +281,7 @@
 	if (sign > 0) 
 		return PY_SSIZE_T_MAX;
 	else 
-		return -PY_SSIZE_T_MAX-1;
+		return PY_SSIZE_T_MIN;
 }
 
 /* Get a Py_ssize_t from a long int object.
@@ -301,7 +301,7 @@
 /* Get a Py_ssize_t from a long int object.
    Silently reduce values larger than PY_SSIZE_T_MAX to PY_SSIZE_T_MAX,
    and silently boost values less than -PY_SSIZE_T_MAX-1 to -PY_SSIZE_T_MAX-1.
-   Return 0 on error, 1 on success.
+   On error, return -1 with an exception set.
 */
 
 static Py_ssize_t
@@ -419,7 +419,7 @@
 		digit msd = v->ob_digit[ndigits - 1];
 
 		result = (ndigits - 1) * SHIFT;
-		if (result / SHIFT != ndigits - 1)
+		if (result / SHIFT != (size_t)(ndigits - 1))
 			goto Overflow;
 		do {
 			++result;
@@ -771,6 +771,8 @@
 PyLong_FromVoidPtr(void *p)
 {
 #if SIZEOF_VOID_P <= SIZEOF_LONG
+	if ((long)p < 0)
+		return PyLong_FromUnsignedLong((unsigned long)p);
 	return PyInt_FromLong((long)p);
 #else
 
@@ -783,7 +785,7 @@
 	/* optimize null pointers */
 	if (p == NULL)
 		return PyInt_FromLong(0);
-	return PyLong_FromLongLong((PY_LONG_LONG)p);
+	return PyLong_FromUnsignedLongLong((unsigned PY_LONG_LONG)p);
 
 #endif /* SIZEOF_VOID_P <= SIZEOF_LONG */
 }
@@ -802,8 +804,10 @@
 
 	if (PyInt_Check(vv))
 		x = PyInt_AS_LONG(vv);
-	else
+	else if (PyLong_Check(vv) && _PyLong_Sign(vv) < 0)
 		x = PyLong_AsLong(vv);
+	else
+		x = PyLong_AsUnsignedLong(vv);
 #else
 
 #ifndef HAVE_LONG_LONG
@@ -816,8 +820,10 @@
 
 	if (PyInt_Check(vv))
 		x = PyInt_AS_LONG(vv);
-	else
+	else if (PyLong_Check(vv) && _PyLong_Sign(vv) < 0)
 		x = PyLong_AsLongLong(vv);
+	else
+		x = PyLong_AsUnsignedLongLong(vv);
 
 #endif /* SIZEOF_VOID_P <= SIZEOF_LONG */
 
@@ -947,7 +953,7 @@
 
 	if (vv == NULL || !PyLong_Check(vv)) {
 		PyErr_BadInternalCall();
-		return -1;
+		return (unsigned PY_LONG_LONG)-1;
 	}
 
 	res = _PyLong_AsByteArray(
@@ -1394,6 +1400,8 @@
 	int sign = 1;
 	char *start, *orig_str = str;
 	PyLongObject *z;
+	PyObject *strobj, *strrepr;
+	Py_ssize_t slen;
 
 	if ((base != 0 && base < 2) || base > 36) {
 		PyErr_SetString(PyExc_ValueError,
@@ -1459,9 +1467,19 @@
 	return (PyObject *) z;
 
  onError:
-	PyErr_Format(PyExc_ValueError,
-		     "invalid literal for long(): %.200s", orig_str);
 	Py_XDECREF(z);
+	slen = strlen(orig_str) < 200 ? strlen(orig_str) : 200;
+	strobj = PyString_FromStringAndSize(orig_str, slen);
+	if (strobj == NULL)
+		return NULL;
+	strrepr = PyObject_Repr(strobj);
+	Py_DECREF(strobj);
+	if (strrepr == NULL)
+		return NULL;
+	PyErr_Format(PyExc_ValueError,
+		     "invalid literal for long() with base %d: %s",
+		     base, PyString_AS_STRING(strrepr));
+	Py_DECREF(strrepr);
 	return NULL;
 }
 
@@ -1470,7 +1488,7 @@
 PyLong_FromUnicode(Py_UNICODE *u, Py_ssize_t length, int base)
 {
 	PyObject *result;
-	char *buffer = PyMem_MALLOC(length+1);
+	char *buffer = (char *)PyMem_MALLOC(length+1);
 
 	if (buffer == NULL)
 		return NULL;
@@ -3066,7 +3084,7 @@
 static PyObject *
 long_subtype_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 {
-	PyLongObject *tmp, *new;
+	PyLongObject *tmp, *newobj;
 	Py_ssize_t i, n;
 
 	assert(PyType_IsSubtype(type, &PyLong_Type));
@@ -3077,17 +3095,17 @@
 	n = tmp->ob_size;
 	if (n < 0)
 		n = -n;
-	new = (PyLongObject *)type->tp_alloc(type, n);
-	if (new == NULL) {
+	newobj = (PyLongObject *)type->tp_alloc(type, n);
+	if (newobj == NULL) {
 		Py_DECREF(tmp);
 		return NULL;
 	}
-	assert(PyLong_Check(new));
-	new->ob_size = tmp->ob_size;
+	assert(PyLong_Check(newobj));
+	newobj->ob_size = tmp->ob_size;
 	for (i = 0; i < n; i++)
-		new->ob_digit[i] = tmp->ob_digit[i];
+		newobj->ob_digit[i] = tmp->ob_digit[i];
 	Py_DECREF(tmp);
-	return (PyObject *)new;
+	return (PyObject *)newobj;
 }
 
 static PyObject *
@@ -3114,25 +3132,25 @@
 	(binaryfunc)	long_add,	/*nb_add*/
 	(binaryfunc)	long_sub,	/*nb_subtract*/
 	(binaryfunc)	long_mul,	/*nb_multiply*/
-	(binaryfunc)	long_mod,	/*nb_remainder*/
-	(binaryfunc)	long_divmod,	/*nb_divmod*/
-	(ternaryfunc)	long_pow,	/*nb_power*/
+			long_mod,	/*nb_remainder*/
+			long_divmod,	/*nb_divmod*/
+			long_pow,	/*nb_power*/
 	(unaryfunc) 	long_neg,	/*nb_negative*/
 	(unaryfunc) 	long_pos,	/*tp_positive*/
 	(unaryfunc) 	long_abs,	/*tp_absolute*/
 	(inquiry)	long_nonzero,	/*tp_nonzero*/
 	(unaryfunc)	long_invert,	/*nb_invert*/
-	(binaryfunc)	long_lshift,	/*nb_lshift*/
+			long_lshift,	/*nb_lshift*/
 	(binaryfunc)	long_rshift,	/*nb_rshift*/
-	(binaryfunc)	long_and,	/*nb_and*/
-	(binaryfunc)	long_xor,	/*nb_xor*/
-	(binaryfunc)	long_or,	/*nb_or*/
-	(coercion)	long_coerce,	/*nb_coerce*/
-	(unaryfunc)	long_int,	/*nb_int*/
-	(unaryfunc)	long_long,	/*nb_long*/
-	(unaryfunc)	long_float,	/*nb_float*/
-	(unaryfunc)	long_oct,	/*nb_oct*/
-	(unaryfunc)	long_hex,	/*nb_hex*/
+			long_and,	/*nb_and*/
+			long_xor,	/*nb_xor*/
+			long_or,	/*nb_or*/
+			long_coerce,	/*nb_coerce*/
+			long_int,	/*nb_int*/
+			long_long,	/*nb_long*/
+			long_float,	/*nb_float*/
+			long_oct,	/*nb_oct*/
+			long_hex,	/*nb_hex*/
 	0,				/* nb_inplace_add */
 	0,				/* nb_inplace_subtract */
 	0,				/* nb_inplace_multiply */
@@ -3143,11 +3161,11 @@
 	0,				/* nb_inplace_and */
 	0,				/* nb_inplace_xor */
 	0,				/* nb_inplace_or */
-	(binaryfunc)long_div,		/* nb_floor_divide */
+	long_div,			/* nb_floor_divide */
 	long_true_divide,		/* nb_true_divide */
 	0,				/* nb_inplace_floor_divide */
 	0,				/* nb_inplace_true_divide */
-	(lenfunc)long_index,            /* nb_index */
+	long_index,			/* nb_index */
 };
 
 PyTypeObject PyLong_Type = {
@@ -3156,18 +3174,18 @@
 	"long",					/* tp_name */
 	sizeof(PyLongObject) - sizeof(digit),	/* tp_basicsize */
 	sizeof(digit),				/* tp_itemsize */
-	(destructor)long_dealloc,		/* tp_dealloc */
+	long_dealloc,				/* tp_dealloc */
 	0,					/* tp_print */
 	0,					/* tp_getattr */
 	0,					/* tp_setattr */
 	(cmpfunc)long_compare,			/* tp_compare */
-	(reprfunc)long_repr,			/* tp_repr */
+	long_repr,				/* tp_repr */
 	&long_as_number,			/* tp_as_number */
 	0,					/* tp_as_sequence */
 	0,					/* tp_as_mapping */
 	(hashfunc)long_hash,			/* tp_hash */
         0,              			/* tp_call */
-        (reprfunc)long_str,			/* tp_str */
+        long_str,				/* tp_str */
 	PyObject_GenericGetAttr,		/* tp_getattro */
 	0,					/* tp_setattro */
 	0,					/* tp_as_buffer */
diff --git a/Objects/methodobject.c b/Objects/methodobject.c
index 8e3bf86..ecc9a0a 100644
--- a/Objects/methodobject.c
+++ b/Objects/methodobject.c
@@ -149,17 +149,8 @@
 static int
 meth_traverse(PyCFunctionObject *m, visitproc visit, void *arg)
 {
-	int err;
-	if (m->m_self != NULL) {
-		err = visit(m->m_self, arg);
-		if (err)
-			return err;
-	}
-	if (m->m_module != NULL) {
-		err = visit(m->m_module, arg);
-		if (err)
-			return err;
-	}
+	Py_VISIT(m->m_self);
+	Py_VISIT(m->m_module);
 	return 0;
 }
 
diff --git a/Objects/moduleobject.c b/Objects/moduleobject.c
index 8124968..e454fcf 100644
--- a/Objects/moduleobject.c
+++ b/Objects/moduleobject.c
@@ -204,8 +204,7 @@
 static int
 module_traverse(PyModuleObject *m, visitproc visit, void *arg)
 {
-	if (m->md_dict != NULL)
-		return visit(m->md_dict, arg);
+	Py_VISIT(m->md_dict);
 	return 0;
 }
 
diff --git a/Objects/object.c b/Objects/object.c
index 9b6a30a..a75c14e 100644
--- a/Objects/object.c
+++ b/Objects/object.c
@@ -3,9 +3,30 @@
 
 #include "Python.h"
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 #ifdef Py_REF_DEBUG
 Py_ssize_t _Py_RefTotal;
-#endif
+
+Py_ssize_t
+_Py_GetRefTotal(void)
+{
+	PyObject *o;
+	Py_ssize_t total = _Py_RefTotal;
+        /* ignore the references to the dummy object of the dicts and sets
+           because they are not reliable and not useful (now that the
+           hash table code is well-tested) */
+	o = _PyDict_Dummy();
+	if (o != NULL)
+		total -= o->ob_refcnt;
+	o = _PySet_Dummy();
+	if (o != NULL)
+		total -= o->ob_refcnt;
+	return total;
+}
+#endif /* Py_REF_DEBUG */
 
 int Py_DivisionWarningFlag;
 
@@ -53,23 +74,30 @@
 
 #ifdef COUNT_ALLOCS
 static PyTypeObject *type_list;
+/* All types are added to type_list, at least when
+   they get one object created. That makes them
+   immortal, which unfortunately contributes to
+   garbage itself. If unlist_types_without_objects
+   is set, they will be removed from the type_list
+   once the last object is deallocated. */
+int unlist_types_without_objects;
 extern int tuple_zero_allocs, fast_tuple_allocs;
 extern int quick_int_allocs, quick_neg_int_allocs;
 extern int null_strings, one_strings;
 void
-dump_counts(void)
+dump_counts(FILE* f)
 {
 	PyTypeObject *tp;
 
 	for (tp = type_list; tp; tp = tp->tp_next)
-		fprintf(stderr, "%s alloc'd: %d, freed: %d, max in use: %d\n",
+		fprintf(f, "%s alloc'd: %d, freed: %d, max in use: %d\n",
 			tp->tp_name, tp->tp_allocs, tp->tp_frees,
 			tp->tp_maxalloc);
-	fprintf(stderr, "fast tuple allocs: %d, empty: %d\n",
+	fprintf(f, "fast tuple allocs: %d, empty: %d\n",
 		fast_tuple_allocs, tuple_zero_allocs);
-	fprintf(stderr, "fast int allocs: pos: %d, neg: %d\n",
+	fprintf(f, "fast int allocs: pos: %d, neg: %d\n",
 		quick_int_allocs, quick_neg_int_allocs);
-	fprintf(stderr, "null strings: %d, 1-strings: %d\n",
+	fprintf(f, "null strings: %d, 1-strings: %d\n",
 		null_strings, one_strings);
 }
 
@@ -103,10 +131,12 @@
 void
 inc_count(PyTypeObject *tp)
 {
-	if (tp->tp_allocs == 0) {
+	if (tp->tp_next == NULL && tp->tp_prev == NULL) {
 		/* first time; insert in linked list */
 		if (tp->tp_next != NULL) /* sanity check */
 			Py_FatalError("XXX inc_count sanity check");
+		if (type_list)
+			type_list->tp_prev = tp;
 		tp->tp_next = type_list;
 		/* Note that as of Python 2.2, heap-allocated type objects
 		 * can go away, but this code requires that they stay alive
@@ -129,6 +159,24 @@
 	if (tp->tp_allocs - tp->tp_frees > tp->tp_maxalloc)
 		tp->tp_maxalloc = tp->tp_allocs - tp->tp_frees;
 }
+
+void dec_count(PyTypeObject *tp)
+{
+	tp->tp_frees++;
+	if (unlist_types_without_objects &&
+	    tp->tp_allocs == tp->tp_frees) {
+		/* unlink the type from type_list */
+		if (tp->tp_prev)
+			tp->tp_prev->tp_next = tp->tp_next;
+		else
+			type_list = tp->tp_next;
+		if (tp->tp_next)
+			tp->tp_next->tp_prev = tp->tp_prev;
+		tp->tp_next = tp->tp_prev = NULL;
+		Py_DECREF(tp);
+	}
+}
+
 #endif
 
 #ifdef Py_REF_DEBUG
@@ -138,11 +186,10 @@
 {
 	char buf[300];
 
-	/* XXX(twouters) cast refcount to long until %zd is universally
-	   available */
 	PyOS_snprintf(buf, sizeof(buf),
-		      "%s:%i object at %p has negative ref count %ld",
-		      fname, lineno, op, (long)op->ob_refcnt);
+		      "%s:%i object at %p has negative ref count "
+		      "%" PY_FORMAT_SIZE_T "d",
+		      fname, lineno, op, op->ob_refcnt);
 	Py_FatalError(buf);
 }
 
@@ -317,7 +364,7 @@
 #ifdef Py_USING_UNICODE
 		if (PyUnicode_Check(res)) {
 			PyObject* str;
-			str = PyUnicode_AsUnicodeEscapeString(res);
+			str = PyUnicode_AsEncodedString(res, NULL, NULL);
 			Py_DECREF(res);
 			if (str)
 				res = str;
@@ -1775,12 +1822,12 @@
 	"NoneType",
 	0,
 	0,
-	(destructor)none_dealloc,	     /*tp_dealloc*/ /*never called*/
+	none_dealloc,	/*tp_dealloc*/ /*never called*/
 	0,		/*tp_print*/
 	0,		/*tp_getattr*/
 	0,		/*tp_setattr*/
 	0,		/*tp_compare*/
-	(reprfunc)none_repr, /*tp_repr*/
+	none_repr,	/*tp_repr*/
 	0,		/*tp_as_number*/
 	0,		/*tp_as_sequence*/
 	0,		/*tp_as_mapping*/
@@ -1806,12 +1853,12 @@
 	"NotImplementedType",
 	0,
 	0,
-	(destructor)none_dealloc,	     /*tp_dealloc*/ /*never called*/
+	none_dealloc,	/*tp_dealloc*/ /*never called*/
 	0,		/*tp_print*/
 	0,		/*tp_getattr*/
 	0,		/*tp_setattr*/
 	0,		/*tp_compare*/
-	(reprfunc)NotImplemented_repr, /*tp_repr*/
+	NotImplemented_repr, /*tp_repr*/
 	0,		/*tp_as_number*/
 	0,		/*tp_as_sequence*/
 	0,		/*tp_as_mapping*/
@@ -1901,9 +1948,7 @@
 	PyObject *op;
 	fprintf(fp, "Remaining objects:\n");
 	for (op = refchain._ob_next; op != &refchain; op = op->_ob_next) {
-		/* XXX(twouters) cast refcount to long until %zd is
-		   universally available */
-		fprintf(fp, "%p [%ld] ", op, (long)op->ob_refcnt);
+		fprintf(fp, "%p [%" PY_FORMAT_SIZE_T "d] ", op, op->ob_refcnt);
 		if (PyObject_Print(op, fp, 0) != 0)
 			PyErr_Clear();
 		putc('\n', fp);
@@ -1919,10 +1964,8 @@
 	PyObject *op;
 	fprintf(fp, "Remaining object addresses:\n");
 	for (op = refchain._ob_next; op != &refchain; op = op->_ob_next)
-		/* XXX(twouters) cast refcount to long until %zd is
-		   universally available */
-		fprintf(fp, "%p [%ld] %s\n", op, (long)op->ob_refcnt,
-					    op->ob_type->tp_name);
+		fprintf(fp, "%p [%" PY_FORMAT_SIZE_T "d] %s\n", op,
+			op->ob_refcnt, op->ob_type->tp_name);
 }
 
 PyObject *
@@ -2100,3 +2143,8 @@
 		--_PyTrash_delete_nesting;
 	}
 }
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Objects/obmalloc.c b/Objects/obmalloc.c
index 870f93c..a393cbc 100644
--- a/Objects/obmalloc.c
+++ b/Objects/obmalloc.c
@@ -529,7 +529,7 @@
 		nbytes = numarenas * sizeof(*arenas);
 		if (nbytes / sizeof(*arenas) != numarenas)
 			return NULL;	/* overflow */
-		arenaobj = realloc(arenas, nbytes);
+		arenaobj = (struct arena_object *)realloc(arenas, nbytes);
 		if (arenaobj == NULL)
 			return NULL;
 		arenas = arenaobj;
diff --git a/Objects/rangeobject.c b/Objects/rangeobject.c
index a9c0b55..c48bee0 100644
--- a/Objects/rangeobject.c
+++ b/Objects/rangeobject.c
@@ -104,13 +104,6 @@
 static Py_ssize_t
 range_length(rangeobject *r)
 {
-#if LONG_MAX != INT_MAX /* XXX ssize_t_max */
-	if (r->len > INT_MAX) {
-		PyErr_SetString(PyExc_ValueError,
-				"xrange object size cannot be reported");
-		return -1;
-	}
-#endif
 	return (Py_ssize_t)(r->len);
 }
 
@@ -157,44 +150,44 @@
 
 PyTypeObject PyRange_Type = {
 	PyObject_HEAD_INIT(&PyType_Type)
-	0,				/* Number of items for varobject */
-	"xrange",			/* Name of this type */
-	sizeof(rangeobject),		/* Basic object size */
-	0,				/* Item size for varobject */
-	(destructor)PyObject_Del,	/* tp_dealloc */
-	0,				/* tp_print */
-	0,				/* tp_getattr */
-	0,				/* tp_setattr */
-	0,				/* tp_compare */
-	(reprfunc)range_repr,		/* tp_repr */
-	0,				/* tp_as_number */
-	&range_as_sequence,		/* tp_as_sequence */
-	0,				/* tp_as_mapping */
-	0,				/* tp_hash */
-	0,				/* tp_call */
-	0,				/* tp_str */
-	PyObject_GenericGetAttr,	/* tp_getattro */
-	0,				/* tp_setattro */
-	0,				/* tp_as_buffer */
-	Py_TPFLAGS_DEFAULT,		/* tp_flags */
-	range_doc,			/* tp_doc */
-	0,				/* tp_traverse */
-	0,				/* tp_clear */
-	0,				/* tp_richcompare */
-	0,				/* tp_weaklistoffset */
-	(getiterfunc)range_iter,	/* tp_iter */
-	0,				/* tp_iternext */
-	range_methods,			/* tp_methods */
-	0,				/* tp_members */
-	0,				/* tp_getset */
-	0,				/* tp_base */
-	0,				/* tp_dict */
-	0,				/* tp_descr_get */
-	0,				/* tp_descr_set */
-	0,				/* tp_dictoffset */
-	0,				/* tp_init */
-	0,				/* tp_alloc */
-	range_new,			/* tp_new */
+	0,			/* Number of items for varobject */
+	"xrange",		/* Name of this type */
+	sizeof(rangeobject),	/* Basic object size */
+	0,			/* Item size for varobject */
+	(destructor)PyObject_Del, /* tp_dealloc */
+	0,			/* tp_print */
+	0,			/* tp_getattr */
+	0,			/* tp_setattr */
+	0,			/* tp_compare */
+	(reprfunc)range_repr,	/* tp_repr */
+	0,			/* tp_as_number */
+	&range_as_sequence,	/* tp_as_sequence */
+	0,			/* tp_as_mapping */
+	0,			/* tp_hash */
+	0,			/* tp_call */
+	0,			/* tp_str */
+	PyObject_GenericGetAttr,  /* tp_getattro */
+	0,			/* tp_setattro */
+	0,			/* tp_as_buffer */
+	Py_TPFLAGS_DEFAULT,	/* tp_flags */
+	range_doc,		/* tp_doc */
+	0,			/* tp_traverse */
+	0,			/* tp_clear */
+	0,			/* tp_richcompare */
+	0,			/* tp_weaklistoffset */
+	range_iter,		/* tp_iter */
+	0,			/* tp_iternext */
+	range_methods,		/* tp_methods */
+	0,			/* tp_members */
+	0,			/* tp_getset */
+	0,			/* tp_base */
+	0,			/* tp_dict */
+	0,			/* tp_descr_get */
+	0,			/* tp_descr_set */
+	0,			/* tp_dictoffset */
+	0,			/* tp_init */
+	0,			/* tp_alloc */
+	range_new,		/* tp_new */
 };
 
 /*********************** Xrange Iterator **************************/
@@ -207,53 +200,6 @@
 	long	len;
 } rangeiterobject;
 
-static PyTypeObject Pyrangeiter_Type;
-
-static PyObject *
-range_iter(PyObject *seq)
-{
-	rangeiterobject *it;
-
-	if (!PyRange_Check(seq)) {
-		PyErr_BadInternalCall();
-		return NULL;
-	}
-	it = PyObject_New(rangeiterobject, &Pyrangeiter_Type);
-	if (it == NULL)
-		return NULL;
-	it->index = 0;
-	it->start = ((rangeobject *)seq)->start;
-	it->step = ((rangeobject *)seq)->step;
-	it->len = ((rangeobject *)seq)->len;
-	return (PyObject *)it;
-}
-
-static PyObject *
-range_reverse(PyObject *seq)
-{
-	rangeiterobject *it;
-	long start, step, len;
-
-	if (!PyRange_Check(seq)) {
-		PyErr_BadInternalCall();
-		return NULL;
-	}
-	it = PyObject_New(rangeiterobject, &Pyrangeiter_Type);
-	if (it == NULL)
-		return NULL;
-
-	start = ((rangeobject *)seq)->start;
-	step = ((rangeobject *)seq)->step;
-	len = ((rangeobject *)seq)->len;
-
-	it->index = 0;
-	it->start = start + (len-1) * step;
-	it->step = -step;
-	it->len = len;
-
-	return (PyObject *)it;
-}
-
 static PyObject *
 rangeiter_next(rangeiterobject *r)
 {
@@ -308,3 +254,48 @@
 	rangeiter_methods,			/* tp_methods */
 	0,
 };
+
+static PyObject *
+range_iter(PyObject *seq)
+{
+	rangeiterobject *it;
+
+	if (!PyRange_Check(seq)) {
+		PyErr_BadInternalCall();
+		return NULL;
+	}
+	it = PyObject_New(rangeiterobject, &Pyrangeiter_Type);
+	if (it == NULL)
+		return NULL;
+	it->index = 0;
+	it->start = ((rangeobject *)seq)->start;
+	it->step = ((rangeobject *)seq)->step;
+	it->len = ((rangeobject *)seq)->len;
+	return (PyObject *)it;
+}
+
+static PyObject *
+range_reverse(PyObject *seq)
+{
+	rangeiterobject *it;
+	long start, step, len;
+
+	if (!PyRange_Check(seq)) {
+		PyErr_BadInternalCall();
+		return NULL;
+	}
+	it = PyObject_New(rangeiterobject, &Pyrangeiter_Type);
+	if (it == NULL)
+		return NULL;
+
+	start = ((rangeobject *)seq)->start;
+	step = ((rangeobject *)seq)->step;
+	len = ((rangeobject *)seq)->len;
+
+	it->index = 0;
+	it->start = start + (len-1) * step;
+	it->step = -step;
+	it->len = len;
+
+	return (PyObject *)it;
+}
diff --git a/Objects/setobject.c b/Objects/setobject.c
index 89d574f..26a232b 100644
--- a/Objects/setobject.c
+++ b/Objects/setobject.c
@@ -3,7 +3,7 @@
    Written and maintained by Raymond D. Hettinger <python@rcn.com>
    Derived from Lib/sets.py and Objects/dictobject.c.
 
-   Copyright (c) 2003-5 Python Software Foundation.
+   Copyright (c) 2003-6 Python Software Foundation.
    All rights reserved.
 */
 
@@ -16,6 +16,14 @@
 /* Object used as dummy key to fill deleted entries */
 static PyObject *dummy = NULL; /* Initialized by first call to make_new_set() */
 
+#ifdef Py_REF_DEBUG
+PyObject *
+_PySet_Dummy(void)
+{
+	return dummy;
+}
+#endif
+
 #define INIT_NONZERO_SET_SLOTS(so) do {				\
 	(so)->table = (so)->smalltable;				\
 	(so)->mask = PySet_MINSIZE - 1;				\
@@ -445,7 +453,7 @@
 		}
 #ifdef Py_DEBUG
 		else
-			assert(entry->key == NULL || entry->key == dummy);
+			assert(entry->key == NULL);
 #endif
 	}
 
@@ -719,8 +727,6 @@
 
 /***** Set iterator type ***********************************************/
 
-static PyTypeObject PySetIter_Type; /* Forward */
-
 typedef struct {
 	PyObject_HEAD
 	PySetObject *si_set; /* Set to NULL when iterator is exhausted */
@@ -729,20 +735,6 @@
 	long len;
 } setiterobject;
 
-static PyObject *
-set_iter(PySetObject *so)
-{
-	setiterobject *si = PyObject_New(setiterobject, &PySetIter_Type);
-	if (si == NULL)
-		return NULL;
-	Py_INCREF(so);
-	si->si_set = so;
-	si->si_used = so->used;
-	si->si_pos = 0;
-	si->len = so->used;
-	return (PyObject *)si;
-}
-
 static void
 setiter_dealloc(setiterobject *si)
 {
@@ -838,6 +830,20 @@
 	0,
 };
 
+static PyObject *
+set_iter(PySetObject *so)
+{
+	setiterobject *si = PyObject_New(setiterobject, &PySetIter_Type);
+	if (si == NULL)
+		return NULL;
+	Py_INCREF(so);
+	si->si_set = so;
+	si->si_used = so->used;
+	si->si_pos = 0;
+	si->len = so->used;
+	return (PyObject *)si;
+}
+
 static int
 set_update_internal(PySetObject *so, PyObject *other)
 {
@@ -972,8 +978,8 @@
 		so = free_sets[num_free_sets];
 		PyObject_GC_Del(so);
 	}
-	Py_XDECREF(dummy);
-	Py_XDECREF(emptyfrozenset);
+	Py_CLEAR(dummy);
+	Py_CLEAR(emptyfrozenset);
 }
 
 static PyObject *
@@ -1531,7 +1537,7 @@
 }
 
 static int
-set_nocmp(PyObject *self)
+set_nocmp(PyObject *self, PyObject *other)
 {
 	PyErr_SetString(PyExc_TypeError, "cannot compare sets using cmp()");
 	return -1;
@@ -1688,7 +1694,7 @@
 }
 
 static PySequenceMethods set_as_sequence = {
-	(lenfunc)set_len,		/* sq_length */
+	set_len,			/* sq_length */
 	0,				/* sq_concat */
 	0,				/* sq_repeat */
 	0,				/* sq_item */
@@ -1802,7 +1808,7 @@
 	(printfunc)set_tp_print,	/* tp_print */
 	0,				/* tp_getattr */
 	0,				/* tp_setattr */
-	(cmpfunc)set_nocmp,		/* tp_compare */
+	set_nocmp,			/* tp_compare */
 	(reprfunc)set_repr,		/* tp_repr */
 	&set_as_number,			/* tp_as_number */
 	&set_as_sequence,		/* tp_as_sequence */
@@ -1896,7 +1902,7 @@
 	(printfunc)set_tp_print,	/* tp_print */
 	0,				/* tp_getattr */
 	0,				/* tp_setattr */
-	(cmpfunc)set_nocmp,		/* tp_compare */
+	set_nocmp,			/* tp_compare */
 	(reprfunc)set_repr,		/* tp_repr */
 	&frozenset_as_number,		/* tp_as_number */
 	&set_as_sequence,		/* tp_as_sequence */
@@ -1966,6 +1972,16 @@
 }
 
 int
+PySet_Clear(PyObject *set)
+{
+	if (!PyType_IsSubtype(set->ob_type, &PySet_Type)) {
+		PyErr_BadInternalCall();
+		return -1;
+	}
+	return set_clear_internal((PySetObject *)set);
+}
+
+int
 PySet_Contains(PyObject *anyset, PyObject *key)
 {
 	if (!PyAnySet_Check(anyset)) {
@@ -1995,6 +2011,21 @@
 	return set_add_key((PySetObject *)set, key);
 }
 
+int
+_PySet_Next(PyObject *set, Py_ssize_t *pos, PyObject **entry)
+{
+	setentry *entry_ptr;
+
+	if (!PyAnySet_Check(set)) {
+		PyErr_BadInternalCall();
+		return -1;
+	}
+	if (set_next((PySetObject *)set, pos, &entry_ptr) == 0)
+		return 0;
+	*entry = entry_ptr->key;
+	return 1;
+}
+
 PyObject *
 PySet_Pop(PyObject *set)
 {
@@ -2005,6 +2036,15 @@
 	return set_pop((PySetObject *)set);
 }
 
+int
+_PySet_Update(PyObject *set, PyObject *iterable)
+{
+	if (!PyType_IsSubtype(set->ob_type, &PySet_Type)) {
+		PyErr_BadInternalCall();
+		return -1;
+	}
+	return set_update_internal((PySetObject *)set, iterable);
+}
 
 #ifdef Py_DEBUG
 
@@ -2021,7 +2061,11 @@
 static PyObject *
 test_c_api(PySetObject *so)
 {
-	PyObject *elem, *dup, *t, *f, *ob = (PyObject *)so;
+	int count;
+	char *s;
+	Py_ssize_t i;
+	PyObject *elem, *dup, *t, *f, *dup2;
+	PyObject *ob = (PyObject *)so;
 
 	/* Verify preconditions and exercise type/size checks */
 	assert(PyAnySet_Check(ob));
@@ -2052,6 +2096,35 @@
 	assert(PySet_Discard(ob, elem) == 0);
 	assert(PySet_GET_SIZE(ob) == 2);
 
+	/* Exercise clear */
+	dup2 = PySet_New(dup);
+	assert(PySet_Clear(dup2) == 0);
+	assert(PySet_Size(dup2) == 0);
+	Py_DECREF(dup2);
+
+	/* Raise SystemError on clear or update of frozen set */
+	f = PyFrozenSet_New(dup);
+	assertRaises(PySet_Clear(f) == -1, PyExc_SystemError);
+	assertRaises(_PySet_Update(f, dup) == -1, PyExc_SystemError);
+	Py_DECREF(f);
+
+	/* Exercise direct iteration */
+	i = 0, count = 0;
+	while (_PySet_Next((PyObject *)dup, &i, &elem)) {
+		s = PyString_AsString(elem);
+		assert(s && (s[0] == 'a' || s[0] == 'b' || s[0] == 'c'));
+		count++;
+	}
+	assert(count == 3);
+
+	/* Exercise updates */
+	dup2 = PySet_New(NULL);
+	assert(_PySet_Update(dup2, dup) == 0);
+	assert(PySet_Size(dup2) == 3);
+	assert(_PySet_Update(dup2, dup) == 0);
+	assert(PySet_Size(dup2) == 3);
+	Py_DECREF(dup2);
+
 	/* Raise SystemError when self argument is not a set or frozenset. */
 	t = PyTuple_New(0);
 	assertRaises(PySet_Size(t) == -1, PyExc_SystemError);
diff --git a/Objects/sliceobject.c b/Objects/sliceobject.c
index 3b37dbb..271a9ad 100644
--- a/Objects/sliceobject.c
+++ b/Objects/sliceobject.c
@@ -24,26 +24,26 @@
 
 static PyTypeObject PyEllipsis_Type = {
 	PyObject_HEAD_INIT(&PyType_Type)
-	0,					/* ob_size */
-	"ellipsis",				/* tp_name */
-	0,					/* tp_basicsize */
-	0,					/* tp_itemsize */
-	0, /*never called*/			/* tp_dealloc */
-	0,					/* tp_print */
-	0,					/* tp_getattr */
-	0,					/* tp_setattr */
-	0,					/* tp_compare */
-	(reprfunc)ellipsis_repr,		/* tp_repr */
-	0,					/* tp_as_number */
-	0,					/* tp_as_sequence */
-	0,					/* tp_as_mapping */
-	0,					/* tp_hash */
-	0,					/* tp_call */
-	0,					/* tp_str */
-	PyObject_GenericGetAttr,		/* tp_getattro */
-	0,					/* tp_setattro */
-	0,					/* tp_as_buffer */
-	Py_TPFLAGS_DEFAULT,			/* tp_flags */
+	0,				/* ob_size */
+	"ellipsis",			/* tp_name */
+	0,				/* tp_basicsize */
+	0,				/* tp_itemsize */
+	0, /*never called*/		/* tp_dealloc */
+	0,				/* tp_print */
+	0,				/* tp_getattr */
+	0,				/* tp_setattr */
+	0,				/* tp_compare */
+	ellipsis_repr,			/* tp_repr */
+	0,				/* tp_as_number */
+	0,				/* tp_as_sequence */
+	0,				/* tp_as_mapping */
+	0,				/* tp_hash */
+	0,				/* tp_call */
+	0,				/* tp_str */
+	PyObject_GenericGetAttr,	/* tp_getattro */
+	0,				/* tp_setattro */
+	0,				/* tp_as_buffer */
+	Py_TPFLAGS_DEFAULT,		/* tp_flags */
 };
 
 PyObject _Py_EllipsisObject = {
@@ -79,6 +79,25 @@
 	return (PyObject *) obj;
 }
 
+PyObject *
+_PySlice_FromIndices(Py_ssize_t istart, Py_ssize_t istop)
+{
+	PyObject *start, *end, *slice;
+	start = PyInt_FromSsize_t(istart);
+	if (!start)
+		return NULL;
+	end = PyInt_FromSsize_t(istop);
+	if (!end) {
+		Py_DECREF(start);
+		return NULL;
+	}
+
+	slice = PySlice_New(start, end, NULL);
+	Py_DECREF(start);
+	Py_DECREF(end);
+	return slice;
+}
+
 int
 PySlice_GetIndices(PySliceObject *r, Py_ssize_t length,
                    Py_ssize_t *start, Py_ssize_t *stop, Py_ssize_t *step)
@@ -87,21 +106,21 @@
 	if (r->step == Py_None) {
 		*step = 1;
 	} else {
-		if (!PyInt_Check(r->step)) return -1;
-		*step = PyInt_AsLong(r->step);
+		if (!PyInt_Check(r->step) && !PyLong_Check(r->step)) return -1;
+		*step = PyInt_AsSsize_t(r->step);
 	}
 	if (r->start == Py_None) {
 		*start = *step < 0 ? length-1 : 0;
 	} else {
-		if (!PyInt_Check(r->start)) return -1;
-		*start = PyInt_AsLong(r->start);
+		if (!PyInt_Check(r->start) && !PyLong_Check(r->step)) return -1;
+		*start = PyInt_AsSsize_t(r->start);
 		if (*start < 0) *start += length;
 	}
 	if (r->stop == Py_None) {
 		*stop = *step < 0 ? -1 : length;
 	} else {
-		if (!PyInt_Check(r->stop)) return -1;
-		*stop = PyInt_AsLong(r->stop);
+		if (!PyInt_Check(r->stop) && !PyLong_Check(r->step)) return -1;
+		*stop = PyInt_AsSsize_t(r->stop);
 		if (*stop < 0) *stop += length;
 	}
 	if (*stop > length) return -1;
@@ -233,7 +252,7 @@
 {
 	Py_ssize_t ilen, start, stop, step, slicelength;
 
-	ilen = PyInt_AsLong(len);
+	ilen = PyInt_AsSsize_t(len);
 
 	if (ilen == -1 && PyErr_Occurred()) {
 		return NULL;
@@ -244,7 +263,7 @@
 		return NULL;
 	}
 
-	return Py_BuildValue("(iii)", start, stop, step);
+	return Py_BuildValue("(nnn)", start, stop, step);
 }
 
 PyDoc_STRVAR(slice_indices_doc,
diff --git a/Objects/stringobject.c b/Objects/stringobject.c
index 32aacf5..32e825e 100644
--- a/Objects/stringobject.c
+++ b/Objects/stringobject.c
@@ -1,5 +1,6 @@
 /* String object implementation */
 
+#define PY_SSIZE_T_CLEAN
 #include "Python.h"
 
 #include <ctype.h>
@@ -16,7 +17,7 @@
    When the interned string reaches a refcnt of 0 the string deallocation
    function will delete the reference from this dictionary.
 
-   Another way to look at this is that to say that the actual reference 
+   Another way to look at this is that to say that the actual reference
    count of a string is:  s->ob_refcnt + (s->ob_sstate?2:0)
 */
 static PyObject *interned;
@@ -105,7 +106,7 @@
 
 	assert(str != NULL);
 	size = strlen(str);
-	if (size > INT_MAX) {
+	if (size > PY_SSIZE_T_MAX) {
 		PyErr_SetString(PyExc_OverflowError,
 			"string is too long for a Python string");
 		return NULL;
@@ -183,7 +184,7 @@
 				++f;
 			/* likewise for %zd */
 			if (*f == 'z' && *(f+1) == 'd')
-				++f;			
+				++f;
 
 			switch (*f) {
 			case 'c':
@@ -273,18 +274,9 @@
 			case 'd':
 				if (longflag)
 					sprintf(s, "%ld", va_arg(vargs, long));
-				else if (size_tflag) {
-					/* Instead of checking whether the C
-					   library supports %zd, handle the
-					   common cases. */
-				        #if SIZEOF_SIZE_T == SIZEOF_LONG
-					sprintf(s, "%ld", va_arg(vargs, long));
-					#elif defined(MS_WINDOWS)
-					sprintf(s, "%Id", va_arg(vargs, size_t));
-					#else
-					#error Cannot print size_t values
-					#endif
-				}
+				else if (size_tflag)
+					sprintf(s, "%" PY_FORMAT_SIZE_T "u",
+					        va_arg(vargs, size_t));
 				else
 					sprintf(s, "%d", va_arg(vargs, int));
 				s += strlen(s);
@@ -578,8 +570,9 @@
 				if (!w)	goto failed;
 
 				/* Append bytes to output buffer. */
-				r = PyString_AsString(w);
-				rn = PyString_Size(w);
+				assert(PyString_Check(w));
+				r = PyString_AS_STRING(w);
+				rn = PyString_GET_SIZE(w);
 				memcpy(p, r, rn);
 				p += rn;
 				Py_DECREF(w);
@@ -622,7 +615,7 @@
 			*p++ = c;
 			break;
 		case 'x':
-			if (isxdigit(Py_CHARMASK(s[0])) 
+			if (isxdigit(Py_CHARMASK(s[0]))
 			    && isxdigit(Py_CHARMASK(s[1]))) {
 				unsigned int x = 0;
 				c = Py_CHARMASK(*s);
@@ -646,7 +639,7 @@
 				break;
 			}
 			if (!errors || strcmp(errors, "strict") == 0) {
-				PyErr_SetString(PyExc_ValueError, 
+				PyErr_SetString(PyExc_ValueError,
 						"invalid \\x escape");
 				goto failed;
 			}
@@ -753,7 +746,7 @@
 	*s = PyString_AS_STRING(obj);
 	if (len != NULL)
 		*len = PyString_GET_SIZE(obj);
-	else if (strlen(*s) != PyString_GET_SIZE(obj)) {
+	else if (strlen(*s) != (size_t)PyString_GET_SIZE(obj)) {
 		PyErr_SetString(PyExc_TypeError,
 				"expected string without null bytes");
 		return -1;
@@ -822,7 +815,7 @@
 	register PyStringObject* op = (PyStringObject*) obj;
 	size_t newsize = 2 + 4 * op->ob_size;
 	PyObject *v;
-	if (newsize > INT_MAX) {
+	if (newsize > PY_SSIZE_T_MAX) {
 		PyErr_SetString(PyExc_OverflowError,
 			"string is too large to make repr");
 	}
@@ -838,7 +831,7 @@
 
 		/* figure out which quote to use; single is preferred */
 		quote = '\'';
-		if (smartquotes && 
+		if (smartquotes &&
 		    memchr(op->ob_sval, '\'', op->ob_size) &&
 		    !memchr(op->ob_sval, '"', op->ob_size))
 			quote = '"';
@@ -1003,7 +996,7 @@
 /* String slice a[i:j] consists of characters a[i] ... a[j-1] */
 
 static PyObject *
-string_slice(register PyStringObject *a, register Py_ssize_t i, 
+string_slice(register PyStringObject *a, register Py_ssize_t i,
 	     register Py_ssize_t j)
      /* j -- may be negative! */
 {
@@ -1047,7 +1040,7 @@
 
 	if (len_sub == 0)
 		return 1;
-	/* last points to one char beyond the start of the rightmost 
+	/* last points to one char beyond the start of the rightmost
 	   substring.  When s<last, there is still room for a possible match
 	   and s[0] through s[len_sub-1] will be in bounds.
 	   shortsub is len_sub minus the last character which is checked
@@ -1059,7 +1052,7 @@
 	lastchar = sub[shortsub];
 	last = s + PyString_GET_SIZE(a) - len_sub + 1;
 	while (s < last) {
-		s = memchr(s, firstchar, last-s);
+		s = (char *)memchr(s, firstchar, last-s);
 		if (s == NULL)
 			return 0;
 		assert(s < last);
@@ -1207,7 +1200,7 @@
 		char* result_buf;
 		PyObject* result;
 
-		if (PySlice_GetIndicesEx((PySliceObject*)item, 
+		if (PySlice_GetIndicesEx((PySliceObject*)item,
 				 PyString_GET_SIZE(self),
 				 &start, &stop, &step, &slicelength) < 0) {
 			return NULL;
@@ -1218,23 +1211,23 @@
 		}
 		else {
 			source_buf = PyString_AsString((PyObject*)self);
-			result_buf = PyMem_Malloc(slicelength);
+			result_buf = (char *)PyMem_Malloc(slicelength);
 			if (result_buf == NULL)
 				return PyErr_NoMemory();
 
-			for (cur = start, i = 0; i < slicelength; 
+			for (cur = start, i = 0; i < slicelength;
 			     cur += step, i++) {
 				result_buf[i] = source_buf[cur];
 			}
-			
-			result = PyString_FromStringAndSize(result_buf, 
+
+			result = PyString_FromStringAndSize(result_buf,
 							    slicelength);
 			PyMem_Free(result_buf);
 			return result;
 		}
-	} 
+	}
 	else {
-		PyErr_SetString(PyExc_TypeError, 
+		PyErr_SetString(PyExc_TypeError,
 				"string indices must be integers");
 		return NULL;
 	}
@@ -1340,7 +1333,7 @@
 		Py_DECREF(str);
 
 static PyObject *
-split_whitespace(const char *s, Py_ssize_t len, int maxsplit)
+split_whitespace(const char *s, Py_ssize_t len, Py_ssize_t maxsplit)
 {
 	Py_ssize_t i, j;
 	PyObject *str;
@@ -1374,7 +1367,7 @@
 }
 
 static PyObject *
-split_char(const char *s, Py_ssize_t len, char ch, int maxcount)
+split_char(const char *s, Py_ssize_t len, char ch, Py_ssize_t maxcount)
 {
 	register Py_ssize_t i, j;
 	PyObject *str;
@@ -1415,14 +1408,14 @@
 {
 	Py_ssize_t len = PyString_GET_SIZE(self), n, i, j;
 	int err;
-	int maxsplit = -1;
+	Py_ssize_t maxsplit = -1;
 	const char *s = PyString_AS_STRING(self), *sub;
 	PyObject *list, *item, *subobj = Py_None;
 
-	if (!PyArg_ParseTuple(args, "|Oi:split", &subobj, &maxsplit))
+	if (!PyArg_ParseTuple(args, "|On:split", &subobj, &maxsplit))
 		return NULL;
 	if (maxsplit < 0)
-		maxsplit = INT_MAX;
+		maxsplit = PY_SSIZE_T_MAX;
 	if (subobj == Py_None)
 		return split_whitespace(s, len, maxsplit);
 	if (PyString_Check(subobj)) {
@@ -1480,7 +1473,7 @@
 }
 
 static PyObject *
-rsplit_whitespace(const char *s, Py_ssize_t len, int maxsplit)
+rsplit_whitespace(const char *s, Py_ssize_t len, Py_ssize_t maxsplit)
 {
 	Py_ssize_t i, j;
 	PyObject *str;
@@ -1514,7 +1507,7 @@
 }
 
 static PyObject *
-rsplit_char(const char *s, Py_ssize_t len, char ch, int maxcount)
+rsplit_char(const char *s, Py_ssize_t len, char ch, Py_ssize_t maxcount)
 {
 	register Py_ssize_t i, j;
 	PyObject *str;
@@ -1556,14 +1549,14 @@
 {
 	Py_ssize_t len = PyString_GET_SIZE(self), n, i, j;
 	int err;
-	int maxsplit = -1;
+	Py_ssize_t maxsplit = -1;
 	const char *s = PyString_AS_STRING(self), *sub;
 	PyObject *list, *item, *subobj = Py_None;
 
-	if (!PyArg_ParseTuple(args, "|Oi:rsplit", &subobj, &maxsplit))
+	if (!PyArg_ParseTuple(args, "|On:rsplit", &subobj, &maxsplit))
 		return NULL;
 	if (maxsplit < 0)
-		maxsplit = INT_MAX;
+		maxsplit = PY_SSIZE_T_MAX;
 	if (subobj == Py_None)
 		return rsplit_whitespace(s, len, maxsplit);
 	if (PyString_Check(subobj)) {
@@ -1661,7 +1654,7 @@
 	}
 
 	/* There are at least two things to join, or else we have a subclass
-	 * of the builtin types in the sequence.  
+	 * of the builtin types in the sequence.
 	 * Do a pre-pass to figure out the total amount of space we'll
 	 * need (sz), see whether any argument is absurd, and defer to
 	 * the Unicode join if appropriate.
@@ -1684,16 +1677,16 @@
 			}
 #endif
 			PyErr_Format(PyExc_TypeError,
-				     "sequence item %i: expected string,"
+				     "sequence item %zd: expected string,"
 				     " %.80s found",
-				     /*XXX*/(int)i, item->ob_type->tp_name);
+				     i, item->ob_type->tp_name);
 			Py_DECREF(seq);
 			return NULL;
 		}
 		sz += PyString_GET_SIZE(item);
 		if (i != 0)
 			sz += seplen;
-		if (sz < old_sz || sz > INT_MAX) {
+		if (sz < old_sz || sz > PY_SSIZE_T_MAX) {
 			PyErr_SetString(PyExc_OverflowError,
 				"join() is too long for a Python string");
 			Py_DECREF(seq);
@@ -1754,7 +1747,7 @@
 {
 	const char *s = PyString_AS_STRING(self), *sub;
 	Py_ssize_t len = PyString_GET_SIZE(self);
-	Py_ssize_t n, i = 0, last = INT_MAX;
+	Py_ssize_t n, i = 0, last = PY_SSIZE_T_MAX;
 	PyObject *subobj;
 
 	/* XXX ssize_t i */
@@ -1960,17 +1953,14 @@
 			return res;
 		}
 #endif
-		else {
-			PyErr_Format(PyExc_TypeError,
+		PyErr_Format(PyExc_TypeError,
 #ifdef Py_USING_UNICODE
-				     "%s arg must be None, str or unicode",
+			     "%s arg must be None, str or unicode",
 #else
-				     "%s arg must be None or str",
+			     "%s arg must be None or str",
 #endif
-				     STRIPNAME(striptype));
-			return NULL;
-		}
-		return do_xstrip(self, striptype, sep);
+			     STRIPNAME(striptype));
+		return NULL;
 	}
 
 	return do_strip(self, striptype);
@@ -2039,12 +2029,12 @@
 {
 	char *s = PyString_AS_STRING(self), *s_new;
 	Py_ssize_t i, n = PyString_GET_SIZE(self);
-	PyObject *new;
+	PyObject *newobj;
 
-	new = PyString_FromStringAndSize(NULL, n);
-	if (new == NULL)
+	newobj = PyString_FromStringAndSize(NULL, n);
+	if (newobj == NULL)
 		return NULL;
-	s_new = PyString_AsString(new);
+	s_new = PyString_AsString(newobj);
 	for (i = 0; i < n; i++) {
 		int c = Py_CHARMASK(*s++);
 		if (isupper(c)) {
@@ -2053,7 +2043,7 @@
 			*s_new = c;
 		s_new++;
 	}
-	return new;
+	return newobj;
 }
 
 
@@ -2067,12 +2057,12 @@
 {
 	char *s = PyString_AS_STRING(self), *s_new;
 	Py_ssize_t i, n = PyString_GET_SIZE(self);
-	PyObject *new;
+	PyObject *newobj;
 
-	new = PyString_FromStringAndSize(NULL, n);
-	if (new == NULL)
+	newobj = PyString_FromStringAndSize(NULL, n);
+	if (newobj == NULL)
 		return NULL;
-	s_new = PyString_AsString(new);
+	s_new = PyString_AsString(newobj);
 	for (i = 0; i < n; i++) {
 		int c = Py_CHARMASK(*s++);
 		if (islower(c)) {
@@ -2081,7 +2071,7 @@
 			*s_new = c;
 		s_new++;
 	}
-	return new;
+	return newobj;
 }
 
 
@@ -2097,12 +2087,12 @@
 	char *s = PyString_AS_STRING(self), *s_new;
 	Py_ssize_t i, n = PyString_GET_SIZE(self);
 	int previous_is_cased = 0;
-	PyObject *new;
+	PyObject *newobj;
 
-	new = PyString_FromStringAndSize(NULL, n);
-	if (new == NULL)
+	newobj = PyString_FromStringAndSize(NULL, n);
+	if (newobj == NULL)
 		return NULL;
-	s_new = PyString_AsString(new);
+	s_new = PyString_AsString(newobj);
 	for (i = 0; i < n; i++) {
 		int c = Py_CHARMASK(*s++);
 		if (islower(c)) {
@@ -2117,7 +2107,7 @@
 			previous_is_cased = 0;
 		*s_new++ = c;
 	}
-	return new;
+	return newobj;
 }
 
 PyDoc_STRVAR(capitalize__doc__,
@@ -2131,12 +2121,12 @@
 {
 	char *s = PyString_AS_STRING(self), *s_new;
 	Py_ssize_t i, n = PyString_GET_SIZE(self);
-	PyObject *new;
+	PyObject *newobj;
 
-	new = PyString_FromStringAndSize(NULL, n);
-	if (new == NULL)
+	newobj = PyString_FromStringAndSize(NULL, n);
+	if (newobj == NULL)
 		return NULL;
-	s_new = PyString_AsString(new);
+	s_new = PyString_AsString(newobj);
 	if (0 < n) {
 		int c = Py_CHARMASK(*s++);
 		if (islower(c))
@@ -2153,7 +2143,7 @@
 			*s_new = c;
 		s_new++;
 	}
-	return new;
+	return newobj;
 }
 
 
@@ -2169,7 +2159,7 @@
 {
 	const char *s = PyString_AS_STRING(self), *sub, *t;
 	Py_ssize_t len = PyString_GET_SIZE(self), n;
-	Py_ssize_t i = 0, last = INT_MAX;
+	Py_ssize_t i = 0, last = PY_SSIZE_T_MAX;
 	Py_ssize_t m, r;
 	PyObject *subobj;
 
@@ -2210,7 +2200,7 @@
 		}
 		if (i >= m)
 			break;
-		t = memchr(s+i, sub[0], m-i);
+		t = (const char *)memchr(s+i, sub[0], m-i);
 		if (t == NULL)
 			break;
 		i = t - s;
@@ -2229,12 +2219,12 @@
 {
 	char *s = PyString_AS_STRING(self), *s_new;
 	Py_ssize_t i, n = PyString_GET_SIZE(self);
-	PyObject *new;
+	PyObject *newobj;
 
-	new = PyString_FromStringAndSize(NULL, n);
-	if (new == NULL)
+	newobj = PyString_FromStringAndSize(NULL, n);
+	if (newobj == NULL)
 		return NULL;
-	s_new = PyString_AsString(new);
+	s_new = PyString_AsString(newobj);
 	for (i = 0; i < n; i++) {
 		int c = Py_CHARMASK(*s++);
 		if (islower(c)) {
@@ -2247,7 +2237,7 @@
 			*s_new = c;
 		s_new++;
 	}
-	return new;
+	return newobj;
 }
 
 
@@ -2323,12 +2313,12 @@
 	}
 
 	table = table1;
-	inlen = PyString_Size(input_obj);
+	inlen = PyString_GET_SIZE(input_obj);
 	result = PyString_FromStringAndSize((char *)NULL, inlen);
 	if (result == NULL)
 		return NULL;
 	output_start = output = PyString_AsString(result);
-	input = PyString_AsString(input_obj);
+	input = PyString_AS_STRING(input_obj);
 
 	if (dellen == 0) {
 		/* If no deletions are required, use faster code */
@@ -2457,7 +2447,7 @@
 	/* find length of output string */
 	nfound = (pat_len > 0) ? mymemcnt(str, len, pat, pat_len) : len + 1;
 	if (count < 0)
-		count = INT_MAX;
+		count = PY_SSIZE_T_MAX;
 	else if (nfound > count)
 		nfound = count;
 	if (nfound == 0)
@@ -2534,11 +2524,11 @@
 	char *new_s;
 	const Py_ssize_t len = PyString_GET_SIZE(self);
 	Py_ssize_t sub_len, repl_len, out_len;
-	int count = -1;
-	PyObject *new;
+	Py_ssize_t count = -1;
+	PyObject *newobj;
 	PyObject *subobj, *replobj;
 
-	if (!PyArg_ParseTuple(args, "OO|i:replace",
+	if (!PyArg_ParseTuple(args, "OO|n:replace",
 			      &subobj, &replobj, &count))
 		return NULL;
 
@@ -2574,20 +2564,20 @@
 	if (out_len == -1) {
 		if (PyString_CheckExact(self)) {
 			/* we're returning another reference to self */
-			new = (PyObject*)self;
-			Py_INCREF(new);
+			newobj = (PyObject*)self;
+			Py_INCREF(newobj);
 		}
 		else {
-			new = PyString_FromStringAndSize(str, len);
-			if (new == NULL)
+			newobj = PyString_FromStringAndSize(str, len);
+			if (newobj == NULL)
 				return NULL;
 		}
 	}
 	else {
-		new = PyString_FromStringAndSize(new_s, out_len);
+		newobj = PyString_FromStringAndSize(new_s, out_len);
 		PyMem_FREE(new_s);
 	}
-	return new;
+	return newobj;
 }
 
 
@@ -2606,7 +2596,7 @@
 	const char* prefix;
 	Py_ssize_t plen;
 	Py_ssize_t start = 0;
-	Py_ssize_t end = INT_MAX;
+	Py_ssize_t end = PY_SSIZE_T_MAX;
 	PyObject *subobj;
 
 	if (!PyArg_ParseTuple(args, "O|O&O&:startswith", &subobj,
@@ -2657,7 +2647,7 @@
 	const char* suffix;
 	Py_ssize_t slen;
 	Py_ssize_t start = 0;
-	Py_ssize_t end = INT_MAX;
+	Py_ssize_t end = PY_SSIZE_T_MAX;
 	PyObject *subobj;
 
 	if (!PyArg_ParseTuple(args, "O|O&O&:endswith", &subobj,
@@ -2711,7 +2701,7 @@
     char *encoding = NULL;
     char *errors = NULL;
     PyObject *v;
-    
+
     if (!PyArg_ParseTuple(args, "|ss:encode", &encoding, &errors))
         return NULL;
     v = PyString_AsEncodedObject((PyObject *)self, encoding, errors);
@@ -2748,7 +2738,7 @@
     char *encoding = NULL;
     char *errors = NULL;
     PyObject *v;
-    
+
     if (!PyArg_ParseTuple(args, "|ss:decode", &encoding, &errors))
         return NULL;
     v = PyString_AsDecodedObject((PyObject *)self, encoding, errors);
@@ -2870,10 +2860,10 @@
 static PyObject *
 string_ljust(PyStringObject *self, PyObject *args)
 {
-    int width;
+    Py_ssize_t width;
     char fillchar = ' ';
 
-    if (!PyArg_ParseTuple(args, "i|c:ljust", &width, &fillchar))
+    if (!PyArg_ParseTuple(args, "n|c:ljust", &width, &fillchar))
         return NULL;
 
     if (PyString_GET_SIZE(self) >= width && PyString_CheckExact(self)) {
@@ -2894,10 +2884,10 @@
 static PyObject *
 string_rjust(PyStringObject *self, PyObject *args)
 {
-    int width;
+    Py_ssize_t width;
     char fillchar = ' ';
 
-    if (!PyArg_ParseTuple(args, "i|c:rjust", &width, &fillchar))
+    if (!PyArg_ParseTuple(args, "n|c:rjust", &width, &fillchar))
         return NULL;
 
     if (PyString_GET_SIZE(self) >= width && PyString_CheckExact(self)) {
@@ -2919,10 +2909,10 @@
 string_center(PyStringObject *self, PyObject *args)
 {
     Py_ssize_t marg, left;
-    long width;
+    Py_ssize_t width;
     char fillchar = ' ';
 
-    if (!PyArg_ParseTuple(args, "l|c:center", &width, &fillchar))
+    if (!PyArg_ParseTuple(args, "n|c:center", &width, &fillchar))
         return NULL;
 
     if (PyString_GET_SIZE(self) >= width && PyString_CheckExact(self)) {
@@ -2948,9 +2938,9 @@
     Py_ssize_t fill;
     PyObject *s;
     char *p;
+    Py_ssize_t width;
 
-    long width;
-    if (!PyArg_ParseTuple(args, "l:zfill", &width))
+    if (!PyArg_ParseTuple(args, "n:zfill", &width))
         return NULL;
 
     if (PyString_GET_SIZE(self) >= width) {
@@ -3467,22 +3457,22 @@
 	"str",
 	sizeof(PyStringObject),
 	sizeof(char),
- 	(destructor)string_dealloc, 		/* tp_dealloc */
+ 	string_dealloc, 			/* tp_dealloc */
 	(printfunc)string_print, 		/* tp_print */
 	0,					/* tp_getattr */
 	0,					/* tp_setattr */
 	0,					/* tp_compare */
-	(reprfunc)string_repr, 			/* tp_repr */
+	string_repr, 				/* tp_repr */
 	&string_as_number,			/* tp_as_number */
 	&string_as_sequence,			/* tp_as_sequence */
 	&string_as_mapping,			/* tp_as_mapping */
 	(hashfunc)string_hash, 			/* tp_hash */
 	0,					/* tp_call */
-	(reprfunc)string_str,			/* tp_str */
+	string_str,				/* tp_str */
 	PyObject_GenericGetAttr,		/* tp_getattro */
 	0,					/* tp_setattro */
 	&string_as_buffer,			/* tp_as_buffer */
-	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_CHECKTYPES | 
+	Py_TPFLAGS_DEFAULT | Py_TPFLAGS_CHECKTYPES |
 		Py_TPFLAGS_BASETYPE,		/* tp_flags */
 	string_doc,				/* tp_doc */
 	0,					/* tp_traverse */
@@ -3635,7 +3625,7 @@
 	     len = 1 + 50 + 1 + prec = 52 + prec
 
 	   If prec=0 the effective precision is 1 (the leading digit is
-	   always given), therefore increase the length by one. 
+	   always given), therefore increase the length by one.
 
 	*/
 	if ((type == 'g' && buflen <= (size_t)10 + (size_t)prec) ||
@@ -3711,7 +3701,7 @@
 	}
 	buf = PyString_AsString(result);
 	llen = PyString_Size(result);
-	if (llen > INT_MAX) {
+	if (llen > PY_SSIZE_T_MAX) {
 		PyErr_SetString(PyExc_ValueError, "string too large in _PyString_FormatLong");
 		return NULL;
 	}
@@ -4439,7 +4429,7 @@
 	   detector, interned strings are not forcibly deallocated; rather, we
 	   give them their stolen references back, and then clear and DECREF
 	   the interned dict. */
-	   
+
 	fprintf(stderr, "releasing interned strings\n");
 	n = PyList_GET_SIZE(keys);
 	for (i = 0; i < n; i++) {
diff --git a/Objects/structseq.c b/Objects/structseq.c
index 218d0b4..e074810 100644
--- a/Objects/structseq.c
+++ b/Objects/structseq.c
@@ -315,7 +315,7 @@
 	0,					/* tp_as_number */
 	&structseq_as_sequence,			/* tp_as_sequence */
 	0,					/* tp_as_mapping */
-	(hashfunc)structseq_hash,              	/* tp_hash */
+	structseq_hash,				/* tp_hash */
 	0,              			/* tp_call */
 	0,					/* tp_str */
 	0,                       		/* tp_getattro */
@@ -349,6 +349,14 @@
 	PyMemberDef* members;
 	int n_members, n_unnamed_members, i, k;
 
+#ifdef Py_TRACE_REFS
+	/* if the type object was chained, unchain it first
+	   before overwriting its storage */
+	if (type->_ob_next) {
+		_Py_ForgetReference((PyObject*)type);
+	}
+#endif
+
 	n_unnamed_members = 0;
 	for (i = 0; desc->fields[i].name != NULL; ++i)
 		if (desc->fields[i].name == PyStructSequence_UnnamedField)
diff --git a/Objects/tupleobject.c b/Objects/tupleobject.c
index 384b355..2161ab9 100644
--- a/Objects/tupleobject.c
+++ b/Objects/tupleobject.c
@@ -438,16 +438,9 @@
 tupletraverse(PyTupleObject *o, visitproc visit, void *arg)
 {
 	Py_ssize_t i;
-	PyObject *x;
 
-	for (i = o->ob_size; --i >= 0; ) {
-		x = o->ob_item[i];
-		if (x != NULL) {
-			int err = visit(x, arg);
-			if (err)
-				return err;
-		}
-	}
+	for (i = o->ob_size; --i >= 0; )
+		Py_VISIT(o->ob_item[i]);
 	return 0;
 }
 
@@ -547,7 +540,7 @@
 static PyObject *
 tuple_subtype_new(PyTypeObject *type, PyObject *args, PyObject *kwds)
 {
-	PyObject *tmp, *new, *item;
+	PyObject *tmp, *newobj, *item;
 	Py_ssize_t i, n;
 
 	assert(PyType_IsSubtype(type, &PyTuple_Type));
@@ -555,16 +548,16 @@
 	if (tmp == NULL)
 		return NULL;
 	assert(PyTuple_Check(tmp));
-	new = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
-	if (new == NULL)
+	newobj = type->tp_alloc(type, n = PyTuple_GET_SIZE(tmp));
+	if (newobj == NULL)
 		return NULL;
 	for (i = 0; i < n; i++) {
 		item = PyTuple_GET_ITEM(tmp, i);
 		Py_INCREF(item);
-		PyTuple_SET_ITEM(new, i, item);
+		PyTuple_SET_ITEM(newobj, i, item);
 	}
 	Py_DECREF(tmp);
-	return new;
+	return newobj;
 }
 
 PyDoc_STRVAR(tuple_doc,
@@ -615,6 +608,7 @@
 		}
 		else {
 			result = PyTuple_New(slicelength);
+			if (!result) return NULL;
 
 			src = self->ob_item;
 			dest = ((PyTupleObject *)result)->ob_item;
@@ -790,27 +784,6 @@
 	PyTupleObject *it_seq; /* Set to NULL when iterator is exhausted */
 } tupleiterobject;
 
-PyTypeObject PyTupleIter_Type;
-
-static PyObject *
-tuple_iter(PyObject *seq)
-{
-	tupleiterobject *it;
-
-	if (!PyTuple_Check(seq)) {
-		PyErr_BadInternalCall();
-		return NULL;
-	}
-	it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
-	if (it == NULL)
-		return NULL;
-	it->it_index = 0;
-	Py_INCREF(seq);
-	it->it_seq = (PyTupleObject *)seq;
-	_PyObject_GC_TRACK(it);
-	return (PyObject *)it;
-}
-
 static void
 tupleiter_dealloc(tupleiterobject *it)
 {
@@ -822,9 +795,8 @@
 static int
 tupleiter_traverse(tupleiterobject *it, visitproc visit, void *arg)
 {
-	if (it->it_seq == NULL)
-		return 0;
-	return visit((PyObject *)it->it_seq, arg);
+	Py_VISIT(it->it_seq);
+	return 0;
 }
 
 static PyObject *
@@ -900,3 +872,22 @@
 	tupleiter_methods,			/* tp_methods */
 	0,
 };
+
+static PyObject *
+tuple_iter(PyObject *seq)
+{
+	tupleiterobject *it;
+
+	if (!PyTuple_Check(seq)) {
+		PyErr_BadInternalCall();
+		return NULL;
+	}
+	it = PyObject_GC_New(tupleiterobject, &PyTupleIter_Type);
+	if (it == NULL)
+		return NULL;
+	it->it_index = 0;
+	Py_INCREF(seq);
+	it->it_seq = (PyTupleObject *)seq;
+	_PyObject_GC_TRACK(it);
+	return (PyObject *)it;
+}
diff --git a/Objects/typeobject.c b/Objects/typeobject.c
index c02f060..4caf538 100644
--- a/Objects/typeobject.c
+++ b/Objects/typeobject.c
@@ -453,7 +453,7 @@
 	if (PyType_IS_GC(type))
 		obj = _PyObject_GC_Malloc(size);
 	else
-		obj = PyObject_MALLOC(size);
+		obj = (PyObject *)PyObject_MALLOC(size);
 
 	if (obj == NULL)
 		return PyErr_NoMemory();
@@ -525,21 +525,15 @@
 
 	if (type->tp_dictoffset != base->tp_dictoffset) {
 		PyObject **dictptr = _PyObject_GetDictPtr(self);
-		if (dictptr && *dictptr) {
-			int err = visit(*dictptr, arg);
-			if (err)
-				return err;
-		}
+		if (dictptr && *dictptr)
+			Py_VISIT(*dictptr);
 	}
 
-	if (type->tp_flags & Py_TPFLAGS_HEAPTYPE) {
+	if (type->tp_flags & Py_TPFLAGS_HEAPTYPE)
 		/* For a heaptype, the instances count as references
 		   to the type.  Traverse the type so the collector
 		   can find cycles involving this link. */
-		int err = visit((PyObject *)type, arg);
-		if (err)
-			return err;
-	}
+		Py_VISIT(type);
 
 	if (basetraverse)
 		return basetraverse(self, visit, arg);
@@ -559,8 +553,8 @@
 			char *addr = (char *)self + mp->offset;
 			PyObject *obj = *(PyObject **)addr;
 			if (obj != NULL) {
-				Py_DECREF(obj);
 				*(PyObject **)addr = NULL;
+				Py_DECREF(obj);
 			}
 		}
 	}
@@ -1106,14 +1100,17 @@
 	char buf[1000];
 	PyObject *k, *v;
 	PyObject *set = PyDict_New();
+	if (!set) return;
 
 	to_merge_size = PyList_GET_SIZE(to_merge);
 	for (i = 0; i < to_merge_size; i++) {
 		PyObject *L = PyList_GET_ITEM(to_merge, i);
 		if (remain[i] < PyList_GET_SIZE(L)) {
 			PyObject *c = PyList_GET_ITEM(L, remain[i]);
-			if (PyDict_SetItem(set, c, Py_None) < 0)
+			if (PyDict_SetItem(set, c, Py_None) < 0) {
+				Py_DECREF(set);
 				return;
+			}
 		}
 	}
 	n = PyDict_Size(set);
@@ -1121,12 +1118,12 @@
 	off = PyOS_snprintf(buf, sizeof(buf), "Cannot create a \
 consistent method resolution\norder (MRO) for bases");
 	i = 0;
-	while (PyDict_Next(set, &i, &k, &v) && off < sizeof(buf)) {
+	while (PyDict_Next(set, &i, &k, &v) && (size_t)off < sizeof(buf)) {
 		PyObject *name = class_name(k);
 		off += PyOS_snprintf(buf + off, sizeof(buf) - off, " %s",
 				     name ? PyString_AS_STRING(name) : "?");
 		Py_XDECREF(name);
-		if (--n && off+1 < sizeof(buf)) {
+		if (--n && (size_t)(off+1) < sizeof(buf)) {
 			buf[off++] = ',';
 			buf[off] = '\0';
 		}
@@ -1147,7 +1144,7 @@
 	   remain[i] is the index of the next base in to_merge[i]
 	   that is not included in acc.
 	*/
-	remain = PyMem_MALLOC(SIZEOF_INT*to_merge_size);
+	remain = (int *)PyMem_MALLOC(SIZEOF_INT*to_merge_size);
 	if (remain == NULL)
 		return -1;
 	for (i = 0; i < to_merge_size; i++)
@@ -1893,7 +1890,7 @@
 		PyObject *doc = PyDict_GetItemString(dict, "__doc__");
 		if (doc != NULL && PyString_Check(doc)) {
 			const size_t n = (size_t)PyString_GET_SIZE(doc);
-                        char *tp_doc = PyObject_MALLOC(n+1);
+                        char *tp_doc = (char *)PyObject_MALLOC(n+1);
 			if (tp_doc == NULL) {
 				Py_DECREF(type);
 				return NULL;
@@ -2195,51 +2192,31 @@
 static int
 type_traverse(PyTypeObject *type, visitproc visit, void *arg)
 {
-	int err;
-
 	/* Because of type_is_gc(), the collector only calls this
 	   for heaptypes. */
 	assert(type->tp_flags & Py_TPFLAGS_HEAPTYPE);
 
-#define VISIT(SLOT) \
-	if (SLOT) { \
-		err = visit((PyObject *)(SLOT), arg); \
-		if (err) \
-			return err; \
-	}
-
-	VISIT(type->tp_dict);
-	VISIT(type->tp_cache);
-	VISIT(type->tp_mro);
-	VISIT(type->tp_bases);
-	VISIT(type->tp_base);
+	Py_VISIT(type->tp_dict);
+	Py_VISIT(type->tp_cache);
+	Py_VISIT(type->tp_mro);
+	Py_VISIT(type->tp_bases);
+	Py_VISIT(type->tp_base);
 
 	/* There's no need to visit type->tp_subclasses or
 	   ((PyHeapTypeObject *)type)->ht_slots, because they can't be involved
 	   in cycles; tp_subclasses is a list of weak references,
 	   and slots is a tuple of strings. */
 
-#undef VISIT
-
 	return 0;
 }
 
 static int
 type_clear(PyTypeObject *type)
 {
-	PyObject *tmp;
-
 	/* Because of type_is_gc(), the collector only calls this
 	   for heaptypes. */
 	assert(type->tp_flags & Py_TPFLAGS_HEAPTYPE);
 
-#define CLEAR(SLOT) \
-	if (SLOT) { \
-		tmp = (PyObject *)(SLOT); \
-		SLOT = NULL; \
-		Py_DECREF(tmp); \
-	}
-
 	/* The only field we need to clear is tp_mro, which is part of a
 	   hard cycle (its first element is the class itself) that won't
 	   be broken otherwise (it's a tuple and tuples don't have a
@@ -2265,9 +2242,7 @@
 	       A tuple of strings can't be part of a cycle.
 	*/
 
-	CLEAR(type->tp_mro);
-
-#undef CLEAR
+	Py_CLEAR(type->tp_mro);
 
 	return 0;
 }
@@ -2443,23 +2418,23 @@
 }
 
 static int
-compatible_for_assignment(PyTypeObject* old, PyTypeObject* new, char* attr)
+compatible_for_assignment(PyTypeObject* oldto, PyTypeObject* newto, char* attr)
 {
 	PyTypeObject *newbase, *oldbase;
 
-	if (new->tp_dealloc != old->tp_dealloc ||
-	    new->tp_free != old->tp_free)
+	if (newto->tp_dealloc != oldto->tp_dealloc ||
+	    newto->tp_free != oldto->tp_free)
 	{
 		PyErr_Format(PyExc_TypeError,
 			     "%s assignment: "
 			     "'%s' deallocator differs from '%s'",
 			     attr,
-			     new->tp_name,
-			     old->tp_name);
+			     newto->tp_name,
+			     oldto->tp_name);
 		return 0;
 	}
-	newbase = new;
-	oldbase = old;
+	newbase = newto;
+	oldbase = oldto;
 	while (equiv_structs(newbase, newbase->tp_base))
 		newbase = newbase->tp_base;
 	while (equiv_structs(oldbase, oldbase->tp_base))
@@ -2471,8 +2446,8 @@
 			     "%s assignment: "
 			     "'%s' object layout differs from '%s'",
 			     attr,
-			     new->tp_name,
-			     old->tp_name);
+			     newto->tp_name,
+			     oldto->tp_name);
 		return 0;
 	}
 
@@ -2482,8 +2457,8 @@
 static int
 object_set_class(PyObject *self, PyObject *value, void *closure)
 {
-	PyTypeObject *old = self->ob_type;
-	PyTypeObject *new;
+	PyTypeObject *oldto = self->ob_type;
+	PyTypeObject *newto;
 
 	if (value == NULL) {
 		PyErr_SetString(PyExc_TypeError,
@@ -2496,18 +2471,18 @@
 		  value->ob_type->tp_name);
 		return -1;
 	}
-	new = (PyTypeObject *)value;
-	if (!(new->tp_flags & Py_TPFLAGS_HEAPTYPE) ||
-	    !(old->tp_flags & Py_TPFLAGS_HEAPTYPE))
+	newto = (PyTypeObject *)value;
+	if (!(newto->tp_flags & Py_TPFLAGS_HEAPTYPE) ||
+	    !(oldto->tp_flags & Py_TPFLAGS_HEAPTYPE))
 	{
 		PyErr_Format(PyExc_TypeError,
 			     "__class__ assignment: only for heap types");
 		return -1;
 	}
-	if (compatible_for_assignment(new, old, "__class__")) {
-		Py_INCREF(new);
-		self->ob_type = new;
-		Py_DECREF(old);
+	if (compatible_for_assignment(newto, oldto, "__class__")) {
+		Py_INCREF(newto);
+		self->ob_type = newto;
+		Py_DECREF(oldto);
 		return 0;
 	}
 	else {
@@ -2785,7 +2760,7 @@
 	"object",				/* tp_name */
 	sizeof(PyObject),			/* tp_basicsize */
 	0,					/* tp_itemsize */
-	(destructor)object_dealloc,		/* tp_dealloc */
+	object_dealloc,				/* tp_dealloc */
 	0,					/* tp_print */
 	0,			 		/* tp_getattr */
 	0,					/* tp_setattr */
@@ -3326,7 +3301,7 @@
 {
 	Py_ssize_t i;
 	int result;
-	PyObject *list, *ref, *new;
+	PyObject *list, *ref, *newobj;
 
 	list = base->tp_subclasses;
 	if (list == NULL) {
@@ -3335,16 +3310,16 @@
 			return -1;
 	}
 	assert(PyList_Check(list));
-	new = PyWeakref_NewRef((PyObject *)type, NULL);
+	newobj = PyWeakref_NewRef((PyObject *)type, NULL);
 	i = PyList_GET_SIZE(list);
 	while (--i >= 0) {
 		ref = PyList_GET_ITEM(list, i);
 		assert(PyWeakref_CheckRef(ref));
 		if (PyWeakref_GET_OBJECT(ref) == Py_None)
-			return PyList_SetItem(list, i, new);
+			return PyList_SetItem(list, i, newobj);
 	}
-	result = PyList_Append(list, new);
-	Py_DECREF(new);
+	result = PyList_Append(list, newobj);
+	Py_DECREF(newobj);
 	return result;
 }
 
@@ -3536,12 +3511,16 @@
 }
 
 static PyObject *
-wrap_ssizeargfunc(PyObject *self, PyObject *args, void *wrapped)
+wrap_indexargfunc(PyObject *self, PyObject *args, void *wrapped)
 {
 	ssizeargfunc func = (ssizeargfunc)wrapped;
+	PyObject* o;
 	Py_ssize_t i;
 
-	if (!PyArg_ParseTuple(args, "n", &i))
+	if (!PyArg_UnpackTuple(args, "", 1, 1, &o))
+		return NULL;
+	i = PyNumber_Index(o);
+	if (i == -1 && PyErr_Occurred())
 		return NULL;
 	return (*func)(self, i);
 }
@@ -3551,7 +3530,7 @@
 {
 	Py_ssize_t i;
 
-	i = PyInt_AsSsize_t(arg);
+	i = PyNumber_Index(arg);
 	if (i == -1 && PyErr_Occurred())
 		return -1;
 	if (i < 0) {
@@ -4359,36 +4338,21 @@
 static Py_ssize_t 
 slot_nb_index(PyObject *self)
 {
-	PyObject *func, *args;
 	static PyObject *index_str;
-	Py_ssize_t result = -1;
+	PyObject *temp = call_method(self, "__index__", &index_str, "()");
+	Py_ssize_t result;
 
-	func = lookup_maybe(self, "__index__", &index_str);
-	if (func == NULL) {
-		if (!PyErr_Occurred()) {
-			PyErr_SetString(PyExc_TypeError, 
-				"object cannot be interpreted as an index");
-		}
+	if (temp == NULL)
 		return -1;
- 	}
-	args = PyTuple_New(0);
-	if (args != NULL) {
-		PyObject *temp = PyObject_Call(func, args, NULL);
-		Py_DECREF(args);
-		if (temp != NULL) {
-			if (PyInt_Check(temp) || PyLong_Check(temp)) {
-				result =
-                                  temp->ob_type->tp_as_number->nb_index(temp);
-			}
-			else {
- 				PyErr_SetString(PyExc_TypeError, 
-				    "__index__ must return an int or a long");
-				result = -1;
-			}
-			Py_DECREF(temp);
-		}
+	if (PyInt_CheckExact(temp) || PyLong_CheckExact(temp)) {
+		result = temp->ob_type->tp_as_number->nb_index(temp);
 	}
-	Py_DECREF(func);
+	else {
+		PyErr_SetString(PyExc_TypeError, 
+				"__index__ must return an int or a long");
+		result = -1;
+	}
+	Py_DECREF(temp);
 	return result;
 }
 
@@ -5018,9 +4982,9 @@
 	   test_descr.notimplemented() */
 	SQSLOT("__add__", sq_concat, NULL, wrap_binaryfunc,
           "x.__add__(y) <==> x+y"),
-	SQSLOT("__mul__", sq_repeat, NULL, wrap_ssizeargfunc,
+	SQSLOT("__mul__", sq_repeat, NULL, wrap_indexargfunc,
           "x.__mul__(n) <==> x*n"),
-	SQSLOT("__rmul__", sq_repeat, NULL, wrap_ssizeargfunc,
+	SQSLOT("__rmul__", sq_repeat, NULL, wrap_indexargfunc,
           "x.__rmul__(n) <==> n*x"),
 	SQSLOT("__getitem__", sq_item, slot_sq_item, wrap_sq_item,
 	       "x.__getitem__(y) <==> x[y]"),
@@ -5046,7 +5010,7 @@
 	SQSLOT("__iadd__", sq_inplace_concat, NULL,
           wrap_binaryfunc, "x.__iadd__(y) <==> x+=y"),
 	SQSLOT("__imul__", sq_inplace_repeat, NULL,
-          wrap_ssizeargfunc, "x.__imul__(y) <==> x*=y"),
+          wrap_indexargfunc, "x.__imul__(y) <==> x*=y"),
 
 	MPSLOT("__len__", mp_length, slot_mp_length, wrap_lenfunc,
 	       "x.__len__() <==> len(x)"),
@@ -5211,21 +5175,21 @@
 
 	/* Note: this depends on the order of the members of PyHeapTypeObject! */
 	assert(offset >= 0);
-	assert(offset < offsetof(PyHeapTypeObject, as_buffer));
-	if (offset >= offsetof(PyHeapTypeObject, as_sequence)) {
-		ptr = (void *)type->tp_as_sequence;
+	assert((size_t)offset < offsetof(PyHeapTypeObject, as_buffer));
+	if ((size_t)offset >= offsetof(PyHeapTypeObject, as_sequence)) {
+		ptr = (char *)type->tp_as_sequence;
 		offset -= offsetof(PyHeapTypeObject, as_sequence);
 	}
-	else if (offset >= offsetof(PyHeapTypeObject, as_mapping)) {
-		ptr = (void *)type->tp_as_mapping;
+	else if ((size_t)offset >= offsetof(PyHeapTypeObject, as_mapping)) {
+		ptr = (char *)type->tp_as_mapping;
 		offset -= offsetof(PyHeapTypeObject, as_mapping);
 	}
-	else if (offset >= offsetof(PyHeapTypeObject, as_number)) {
-		ptr = (void *)type->tp_as_number;
+	else if ((size_t)offset >= offsetof(PyHeapTypeObject, as_number)) {
+		ptr = (char *)type->tp_as_number;
 		offset -= offsetof(PyHeapTypeObject, as_number);
 	}
 	else {
-		ptr = (void *)type;
+		ptr = (char *)type;
 	}
 	if (ptr != NULL)
 		ptr += offset;
@@ -5743,7 +5707,7 @@
 super_descr_get(PyObject *self, PyObject *obj, PyObject *type)
 {
 	superobject *su = (superobject *)self;
-	superobject *new;
+	superobject *newobj;
 
 	if (obj == NULL || obj == Py_None || su->obj != NULL) {
 		/* Not binding to an object, or already bound */
@@ -5760,16 +5724,16 @@
 		PyTypeObject *obj_type = supercheck(su->type, obj);
 		if (obj_type == NULL)
 			return NULL;
-		new = (superobject *)PySuper_Type.tp_new(&PySuper_Type,
+		newobj = (superobject *)PySuper_Type.tp_new(&PySuper_Type,
 							 NULL, NULL);
-		if (new == NULL)
+		if (newobj == NULL)
 			return NULL;
 		Py_INCREF(su->type);
 		Py_INCREF(obj);
-		new->type = su->type;
-		new->obj = obj;
-		new->obj_type = obj_type;
-		return (PyObject *)new;
+		newobj->type = su->type;
+		newobj->obj = obj;
+		newobj->obj_type = obj_type;
+		return (PyObject *)newobj;
 	}
 }
 
@@ -5811,20 +5775,10 @@
 super_traverse(PyObject *self, visitproc visit, void *arg)
 {
 	superobject *su = (superobject *)self;
-	int err;
 
-#define VISIT(SLOT) \
-	if (SLOT) { \
-		err = visit((PyObject *)(SLOT), arg); \
-		if (err) \
-			return err; \
-	}
-
-	VISIT(su->obj);
-	VISIT(su->type);
-	VISIT(su->obj_type);
-
-#undef VISIT
+	Py_VISIT(su->obj);
+	Py_VISIT(su->type);
+	Py_VISIT(su->obj_type);
 
 	return 0;
 }
diff --git a/Objects/unicodeobject.c b/Objects/unicodeobject.c
index 7fbce14..e62c774 100644
--- a/Objects/unicodeobject.c
+++ b/Objects/unicodeobject.c
@@ -36,6 +36,7 @@
 
 */
 
+#define PY_SSIZE_T_CLEAN
 #include "Python.h"
 
 #include "unicodeobject.h"
@@ -83,6 +84,11 @@
 
 */
 
+
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 /* Free list for Unicode objects */
 static PyUnicodeObject *unicode_freelist;
 static int unicode_freelist_size;
@@ -130,14 +136,9 @@
     /* Resizing shared object (unicode_empty or single character
        objects) in-place is not allowed. Use PyUnicode_Resize()
        instead ! */
-    if (unicode == unicode_empty ||
-	(unicode->length == 1 &&
-         /* MvL said unicode->str[] may be signed.  Python generally assumes
-          * an int contains at least 32 bits, and we don't use more than
-          * 32 bits even in a UCS4 build, so casting to unsigned int should
-          * be correct.
-          */
-	 (unsigned int)unicode->str[0] < 256U &&
+    if (unicode == unicode_empty || 
+	(unicode->length == 1 && 
+	 unicode->str[0] < 256U &&
 	 unicode_latin1[unicode->str[0]] == unicode)) {
         PyErr_SetString(PyExc_SystemError,
                         "can't resize shared unicode objects");
@@ -149,13 +150,12 @@
     oldstr = unicode->str;
     PyMem_RESIZE(unicode->str, Py_UNICODE, length + 1);
     if (!unicode->str) {
-	unicode->str = oldstr;
+	unicode->str = (Py_UNICODE *)oldstr;
         PyErr_NoMemory();
         return -1;
     }
     unicode->str[length] = 0;
-	assert(length < INT_MAX);
-    unicode->length = (int)length;
+    unicode->length = length;
 
  reset:
     /* Reset the object caches */
@@ -226,8 +226,7 @@
      */
     unicode->str[0] = 0;
     unicode->str[length] = 0;
-	assert(length<INT_MAX);
-    unicode->length = (int)length;
+    unicode->length = length;
     unicode->hash = -1;
     unicode->defenc = NULL;
     return unicode;
@@ -368,7 +367,7 @@
 #else
     {
 	register Py_UNICODE *u;
-	register int i;
+	register Py_ssize_t i;
 	u = PyUnicode_AS_UNICODE(unicode);
 	for (i = size; i > 0; i--)
 	    *u++ = *w++;
@@ -396,7 +395,7 @@
 #else
     {
 	register Py_UNICODE *u;
-	register int i;
+	register Py_ssize_t i;
 	u = PyUnicode_AS_UNICODE(unicode);
 	for (i = size; i > 0; i--)
 	    *w++ = *u++;
@@ -1358,7 +1357,7 @@
     PyObject *v;        /* result string object */
     char *p;            /* next free byte in output buffer */
     Py_ssize_t nallocated;  /* number of result bytes allocated */
-    int nneeded;        /* number of result bytes needed */
+    Py_ssize_t nneeded;        /* number of result bytes needed */
     char stackbuf[MAX_SHORT_UNICHARS * 4];
 
     assert(s != NULL);
@@ -1427,13 +1426,13 @@
 
     if (v == NULL) {
         /* This was stack allocated. */
-        nneeded = Py_SAFE_DOWNCAST(p - stackbuf, long, int);
+        nneeded = p - stackbuf;
         assert(nneeded <= nallocated);
         v = PyString_FromStringAndSize(stackbuf, nneeded);
     }
     else {
     	/* Cut back to size actually needed. */
-        nneeded = Py_SAFE_DOWNCAST(p - PyString_AS_STRING(v), long, int);
+        nneeded = p - PyString_AS_STRING(v);
         assert(nneeded <= nallocated);
         _PyString_Resize(&v, nneeded);
     }
@@ -1884,7 +1883,7 @@
                 Py_DECREF(m);
                 if (api == NULL)
                     goto ucnhashError;
-                ucnhash_CAPI = PyCObject_AsVoidPtr(api);
+                ucnhash_CAPI = (_PyUnicode_Name_CAPI *)PyCObject_AsVoidPtr(api);
                 Py_DECREF(api);
                 if (ucnhash_CAPI == NULL)
                     goto ucnhashError;
@@ -1934,7 +1933,7 @@
         nextByte:
         ;
     }
-    if (_PyUnicode_Resize(&v, (int)(p - PyUnicode_AS_UNICODE(v))) < 0)
+    if (_PyUnicode_Resize(&v, p - PyUnicode_AS_UNICODE(v)) < 0)
         goto onError;
     Py_XDECREF(errorHandler);
     Py_XDECREF(exc);
@@ -2003,7 +2002,7 @@
 #ifdef Py_UNICODE_WIDE
         /* Map 21-bit characters to '\U00xxxxxx' */
         else if (ch >= 0x10000) {
-	    int offset = p - PyString_AS_STRING(repr);
+	    Py_ssize_t offset = p - PyString_AS_STRING(repr);
 
 	    /* Resize the string if necessary */
 	    if (offset + 12 > PyString_GET_SIZE(repr)) {
@@ -2205,7 +2204,7 @@
 	nextByte:
 	;
     }
-    if (_PyUnicode_Resize(&v, (int)(p - PyUnicode_AS_UNICODE(v))) < 0)
+    if (_PyUnicode_Resize(&v, p - PyUnicode_AS_UNICODE(v)) < 0)
 	goto onError;
     Py_XDECREF(errorHandler);
     Py_XDECREF(exc);
@@ -2348,7 +2347,7 @@
         }
     }
 
-    if (_PyUnicode_Resize(&v, (int)(p - PyUnicode_AS_UNICODE(v))) < 0)
+    if (_PyUnicode_Resize(&v, p - PyUnicode_AS_UNICODE(v)) < 0)
         goto onError;
     Py_XDECREF(errorHandler);
     Py_XDECREF(exc);
@@ -2499,8 +2498,8 @@
     /* current output position */
     Py_ssize_t respos = 0;
     Py_ssize_t ressize;
-    char *encoding = (limit == 256) ? "latin-1" : "ascii";
-    char *reason = (limit == 256) ? "ordinal not in range(256)" : "ordinal not in range(128)";
+    const char *encoding = (limit == 256) ? "latin-1" : "ascii";
+    const char *reason = (limit == 256) ? "ordinal not in range(256)" : "ordinal not in range(128)";
     PyObject *errorHandler = NULL;
     PyObject *exc = NULL;
     /* the following variable is used for caching string comparisons
@@ -2723,7 +2722,7 @@
 	}
     }
     if (p - PyUnicode_AS_UNICODE(v) < PyString_GET_SIZE(v))
-	if (_PyUnicode_Resize(&v, (int)(p - PyUnicode_AS_UNICODE(v))) < 0)
+	if (_PyUnicode_Resize(&v, p - PyUnicode_AS_UNICODE(v)) < 0)
 	    goto onError;
     Py_XDECREF(errorHandler);
     Py_XDECREF(exc);
@@ -2982,7 +2981,7 @@
 	}
     }
     if (p - PyUnicode_AS_UNICODE(v) < PyUnicode_GET_SIZE(v))
-	if (_PyUnicode_Resize(&v, (int)(p - PyUnicode_AS_UNICODE(v))) < 0)
+	if (_PyUnicode_Resize(&v, p - PyUnicode_AS_UNICODE(v)) < 0)
 	    goto onError;
     Py_XDECREF(errorHandler);
     Py_XDECREF(exc);
@@ -3336,9 +3335,9 @@
     Py_ssize_t startpos, Py_ssize_t endpos,
     Py_ssize_t *newpos)
 {
-    static char *argparse = "O!i;translating error handler must return (unicode, int) tuple";
+    static char *argparse = "O!n;translating error handler must return (unicode, int) tuple";
 
-    int i_newpos;
+    Py_ssize_t i_newpos;
     PyObject *restuple;
     PyObject *resunicode;
 
@@ -3798,7 +3797,7 @@
 		 Py_ssize_t end,
 		 PyUnicodeObject *substring)
 {
-    int count = 0;
+    Py_ssize_t count = 0;
 
     if (start < 0)
         start += self->length;
@@ -4157,7 +4156,7 @@
     PyObject *fseq;          /* PySequence_Fast(seq) */
     Py_ssize_t seqlen;              /* len(fseq) -- number of items in sequence */
     PyObject *item;
-    int i;
+    Py_ssize_t i;
 
     fseq = PySequence_Fast(seq, "");
     if (fseq == NULL) {
@@ -4206,7 +4205,7 @@
     }
 
     /* Get space. */
-    res = _PyUnicode_New((int)res_alloc);
+    res = _PyUnicode_New(res_alloc);
     if (res == NULL)
         goto onError;
     res_p = PyUnicode_AS_UNICODE(res);
@@ -4220,7 +4219,7 @@
 	/* Convert item to Unicode. */
 	if (! PyUnicode_Check(item) && ! PyString_Check(item)) {
 	    PyErr_Format(PyExc_TypeError,
-			 "sequence item %i: expected string or Unicode,"
+			 "sequence item %zd: expected string or Unicode,"
 			 " %.80s found",
 			 i, item->ob_type->tp_name);
 	    goto onError;
@@ -4236,11 +4235,11 @@
         /* Make sure we have enough space for the separator and the item. */
 	itemlen = PyUnicode_GET_SIZE(item);
 	new_res_used = res_used + itemlen;
-	if (new_res_used < res_used ||  new_res_used > INT_MAX)
+	if (new_res_used < res_used ||  new_res_used > PY_SSIZE_T_MAX)
 	    goto Overflow;
 	if (i < seqlen - 1) {
 	    new_res_used += seplen;
-	    if (new_res_used < res_used ||  new_res_used > INT_MAX)
+	    if (new_res_used < res_used ||  new_res_used > PY_SSIZE_T_MAX)
 		goto Overflow;
 	}
 	if (new_res_used > res_alloc) {
@@ -4248,10 +4247,10 @@
 	    do {
 	        size_t oldsize = res_alloc;
 	        res_alloc += res_alloc;
-	        if (res_alloc < oldsize || res_alloc > INT_MAX)
+	        if (res_alloc < oldsize || res_alloc > PY_SSIZE_T_MAX)
 	            goto Overflow;
 	    } while (new_res_used > res_alloc);
-	    if (_PyUnicode_Resize(&res, (int)res_alloc) < 0) {
+	    if (_PyUnicode_Resize(&res, res_alloc) < 0) {
 		Py_DECREF(item);
 		goto onError;
 	    }
@@ -4259,10 +4258,10 @@
 	}
 
 	/* Copy item, and maybe the separator. */
-	Py_UNICODE_COPY(res_p, PyUnicode_AS_UNICODE(item), (int)itemlen);
+	Py_UNICODE_COPY(res_p, PyUnicode_AS_UNICODE(item), itemlen);
 	res_p += itemlen;
 	if (i < seqlen - 1) {
-	    Py_UNICODE_COPY(res_p, sep, (int)seplen);
+	    Py_UNICODE_COPY(res_p, sep, seplen);
 	    res_p += seplen;
 	}
 	Py_DECREF(item);
@@ -4272,7 +4271,7 @@
     /* Shrink res to match the used area; this probably can't fail,
      * but it's cheap to check.
      */
-    if (_PyUnicode_Resize(&res, (int)res_used) < 0)
+    if (_PyUnicode_Resize(&res, res_used) < 0)
 	goto onError;
 
  Done:
@@ -4605,7 +4604,7 @@
     PyObject *list;
 
     if (maxcount < 0)
-        maxcount = INT_MAX;
+        maxcount = PY_SSIZE_T_MAX;
 
     list = PyList_New(0);
     if (!list)
@@ -4634,7 +4633,7 @@
     PyObject *list;
 
     if (maxcount < 0)
-        maxcount = INT_MAX;
+        maxcount = PY_SSIZE_T_MAX;
 
     list = PyList_New(0);
     if (!list)
@@ -4664,10 +4663,10 @@
     PyUnicodeObject *u;
 
     if (maxcount < 0)
-	maxcount = INT_MAX;
+	maxcount = PY_SSIZE_T_MAX;
 
     if (str1->length == 1 && str2->length == 1) {
-        int i;
+        Py_ssize_t i;
 
         /* replace characters */
         if (!findchar(self->str, self->length, str1->str[0]) &&
@@ -5088,7 +5087,7 @@
 {
     PyUnicodeObject *substring;
     Py_ssize_t start = 0;
-    Py_ssize_t end = INT_MAX;
+    Py_ssize_t end = PY_SSIZE_T_MAX;
     PyObject *result;
 
     if (!PyArg_ParseTuple(args, "O|O&O&:count", &substring,
@@ -5265,7 +5264,7 @@
 {
     PyUnicodeObject *substring;
     Py_ssize_t start = 0;
-    Py_ssize_t end = INT_MAX;
+    Py_ssize_t end = PY_SSIZE_T_MAX;
     PyObject *result;
 
     if (!PyArg_ParseTuple(args, "O|O&O&:find", &substring,
@@ -5331,7 +5330,7 @@
     Py_ssize_t result;
     PyUnicodeObject *substring;
     Py_ssize_t start = 0;
-    Py_ssize_t end = INT_MAX;
+    Py_ssize_t end = PY_SSIZE_T_MAX;
 
     if (!PyArg_ParseTuple(args, "O|O&O&:index", &substring,
 		_PyEval_SliceIndex, &start, _PyEval_SliceIndex, &end))
@@ -5669,10 +5668,10 @@
 static PyObject *
 unicode_ljust(PyUnicodeObject *self, PyObject *args)
 {
-    int width;
+    Py_ssize_t width;
     Py_UNICODE fillchar = ' ';
 
-    if (!PyArg_ParseTuple(args, "i|O&:ljust", &width, convert_uc, &fillchar))
+    if (!PyArg_ParseTuple(args, "n|O&:ljust", &width, convert_uc, &fillchar))
         return NULL;
 
     if (self->length >= width && PyUnicode_CheckExact(self)) {
@@ -5996,7 +5995,7 @@
 {
     PyUnicodeObject *substring;
     Py_ssize_t start = 0;
-    Py_ssize_t end = INT_MAX;
+    Py_ssize_t end = PY_SSIZE_T_MAX;
     PyObject *result;
 
     if (!PyArg_ParseTuple(args, "O|O&O&:rfind", &substring,
@@ -6024,7 +6023,7 @@
     Py_ssize_t result;
     PyUnicodeObject *substring;
     Py_ssize_t start = 0;
-    Py_ssize_t end = INT_MAX;
+    Py_ssize_t end = PY_SSIZE_T_MAX;
 
     if (!PyArg_ParseTuple(args, "O|O&O&:rindex", &substring,
 		_PyEval_SliceIndex, &start, _PyEval_SliceIndex, &end))
@@ -6053,10 +6052,10 @@
 static PyObject *
 unicode_rjust(PyUnicodeObject *self, PyObject *args)
 {
-    int width;
+    Py_ssize_t width;
     Py_UNICODE fillchar = ' ';
 
-    if (!PyArg_ParseTuple(args, "i|O&:rjust", &width, convert_uc, &fillchar))
+    if (!PyArg_ParseTuple(args, "n|O&:rjust", &width, convert_uc, &fillchar))
         return NULL;
 
     if (self->length >= width && PyUnicode_CheckExact(self)) {
@@ -6318,7 +6317,7 @@
 {
     PyUnicodeObject *substring;
     Py_ssize_t start = 0;
-    Py_ssize_t end = INT_MAX;
+    Py_ssize_t end = PY_SSIZE_T_MAX;
     PyObject *result;
 
     if (!PyArg_ParseTuple(args, "O|O&O&:startswith", &substring,
@@ -6349,7 +6348,7 @@
 {
     PyUnicodeObject *substring;
     Py_ssize_t start = 0;
-    Py_ssize_t end = INT_MAX;
+    Py_ssize_t end = PY_SSIZE_T_MAX;
     PyObject *result;
 
     if (!PyArg_ParseTuple(args, "O|O&O&:endswith", &substring,
@@ -6450,13 +6449,13 @@
 
 static PySequenceMethods unicode_as_sequence = {
     (lenfunc) unicode_length, 		/* sq_length */
-    (binaryfunc) PyUnicode_Concat, 	/* sq_concat */
+    PyUnicode_Concat,		 	/* sq_concat */
     (ssizeargfunc) unicode_repeat, 	/* sq_repeat */
     (ssizeargfunc) unicode_getitem, 	/* sq_item */
     (ssizessizeargfunc) unicode_slice, 	/* sq_slice */
     0, 					/* sq_ass_item */
     0, 					/* sq_ass_slice */
-    (objobjproc)PyUnicode_Contains, 	/*sq_contains*/
+    PyUnicode_Contains, 		/* sq_contains */
 };
 
 #define HASINDEX(o) PyType_HasFeature((o)->ob_type, Py_TPFLAGS_HAVE_INDEX)
@@ -6487,7 +6486,8 @@
             return PyUnicode_FromUnicode(NULL, 0);
         } else {
             source_buf = PyUnicode_AS_UNICODE((PyObject*)self);
-            result_buf = PyMem_MALLOC(slicelength*sizeof(Py_UNICODE));
+            result_buf = (Py_UNICODE *)PyMem_MALLOC(slicelength*
+                                                    sizeof(Py_UNICODE));
 	    
 	    if (result_buf == NULL)
 		    return PyErr_NoMemory();
@@ -7336,7 +7336,7 @@
     0,				 	/* tp_getattr */
     0, 					/* tp_setattr */
     (cmpfunc) unicode_compare, 		/* tp_compare */
-    (reprfunc) unicode_repr, 		/* tp_repr */
+    unicode_repr, 			/* tp_repr */
     &unicode_as_number, 		/* tp_as_number */
     &unicode_as_sequence, 		/* tp_as_sequence */
     &unicode_as_mapping, 		/* tp_as_mapping */
@@ -7416,6 +7416,11 @@
     unicode_freelist_size = 0;
 }
 
+#ifdef __cplusplus
+}
+#endif
+
+
 /*
 Local variables:
 c-basic-offset: 4
diff --git a/Objects/weakrefobject.c b/Objects/weakrefobject.c
index 39595ae..3f2c261 100644
--- a/Objects/weakrefobject.c
+++ b/Objects/weakrefobject.c
@@ -109,8 +109,7 @@
 static int
 gc_traverse(PyWeakReference *self, visitproc visit, void *arg)
 {
-    if (self->wr_callback != NULL)
-        return visit(self->wr_callback, arg);
+    Py_VISIT(self->wr_callback);
     return 0;
 }
 
@@ -367,7 +366,7 @@
     0,                          /*tp_descr_get*/
     0,                          /*tp_descr_set*/
     0,                          /*tp_dictoffset*/
-    (initproc)weakref___init__, /*tp_init*/
+    weakref___init__,           /*tp_init*/
     PyType_GenericAlloc,        /*tp_alloc*/
     weakref___new__,            /*tp_new*/
     PyObject_GC_Del,            /*tp_free*/
@@ -586,38 +585,38 @@
 
 
 static PyNumberMethods proxy_as_number = {
-    (binaryfunc)proxy_add,      /*nb_add*/
-    (binaryfunc)proxy_sub,      /*nb_subtract*/
-    (binaryfunc)proxy_mul,      /*nb_multiply*/
-    (binaryfunc)proxy_mod,      /*nb_remainder*/
-    (binaryfunc)proxy_divmod,   /*nb_divmod*/
-    (ternaryfunc)proxy_pow,     /*nb_power*/
-    (unaryfunc)proxy_neg,       /*nb_negative*/
-    (unaryfunc)proxy_pos,       /*nb_positive*/
-    (unaryfunc)proxy_abs,       /*nb_absolute*/
-    (inquiry)proxy_nonzero,     /*nb_nonzero*/
-    (unaryfunc)proxy_invert,    /*nb_invert*/
-    (binaryfunc)proxy_lshift,   /*nb_lshift*/
-    (binaryfunc)proxy_rshift,   /*nb_rshift*/
-    (binaryfunc)proxy_and,      /*nb_and*/
-    (binaryfunc)proxy_xor,      /*nb_xor*/
-    (binaryfunc)proxy_or,       /*nb_or*/
-    (coercion)0,                /*nb_coerce*/
-    (unaryfunc)proxy_int,       /*nb_int*/
-    (unaryfunc)proxy_long,      /*nb_long*/
-    (unaryfunc)proxy_float,     /*nb_float*/
-    (unaryfunc)0,               /*nb_oct*/
-    (unaryfunc)0,               /*nb_hex*/
-    (binaryfunc)proxy_iadd,     /*nb_inplace_add*/
-    (binaryfunc)proxy_isub,     /*nb_inplace_subtract*/
-    (binaryfunc)proxy_imul,     /*nb_inplace_multiply*/
-    (binaryfunc)proxy_imod,     /*nb_inplace_remainder*/
-    (ternaryfunc)proxy_ipow,    /*nb_inplace_power*/
-    (binaryfunc)proxy_ilshift,  /*nb_inplace_lshift*/
-    (binaryfunc)proxy_irshift,  /*nb_inplace_rshift*/
-    (binaryfunc)proxy_iand,     /*nb_inplace_and*/
-    (binaryfunc)proxy_ixor,     /*nb_inplace_xor*/
-    (binaryfunc)proxy_ior,      /*nb_inplace_or*/
+    proxy_add,              /*nb_add*/
+    proxy_sub,              /*nb_subtract*/
+    proxy_mul,              /*nb_multiply*/
+    proxy_mod,              /*nb_remainder*/
+    proxy_divmod,           /*nb_divmod*/
+    proxy_pow,              /*nb_power*/
+    proxy_neg,              /*nb_negative*/
+    proxy_pos,              /*nb_positive*/
+    proxy_abs,              /*nb_absolute*/
+    (inquiry)proxy_nonzero, /*nb_nonzero*/
+    proxy_invert,           /*nb_invert*/
+    proxy_lshift,           /*nb_lshift*/
+    proxy_rshift,           /*nb_rshift*/
+    proxy_and,              /*nb_and*/
+    proxy_xor,              /*nb_xor*/
+    proxy_or,               /*nb_or*/
+    0,                      /*nb_coerce*/
+    proxy_int,              /*nb_int*/
+    proxy_long,             /*nb_long*/
+    proxy_float,            /*nb_float*/
+    0,                      /*nb_oct*/
+    0,                      /*nb_hex*/
+    proxy_iadd,             /*nb_inplace_add*/
+    proxy_isub,             /*nb_inplace_subtract*/
+    proxy_imul,             /*nb_inplace_multiply*/
+    proxy_imod,             /*nb_inplace_remainder*/
+    proxy_ipow,             /*nb_inplace_power*/
+    proxy_ilshift,          /*nb_inplace_lshift*/
+    proxy_irshift,          /*nb_inplace_rshift*/
+    proxy_iand,             /*nb_inplace_and*/
+    proxy_ixor,             /*nb_inplace_xor*/
+    proxy_ior,              /*nb_inplace_or*/
 };
 
 static PySequenceMethods proxy_as_sequence = {
@@ -632,8 +631,8 @@
 };
 
 static PyMappingMethods proxy_as_mapping = {
-    (lenfunc)proxy_length,      /*mp_length*/
-    (binaryfunc)proxy_getitem,  /*mp_subscript*/
+    (lenfunc)proxy_length,        /*mp_length*/
+    proxy_getitem,                /*mp_subscript*/
     (objobjargproc)proxy_setitem, /*mp_ass_subscript*/
 };
 
@@ -651,14 +650,14 @@
     0,				        /* tp_getattr */
     0, 				        /* tp_setattr */
     proxy_compare,		        /* tp_compare */
-    (unaryfunc)proxy_repr,	        /* tp_repr */
+    (reprfunc)proxy_repr,	        /* tp_repr */
     &proxy_as_number,		        /* tp_as_number */
     &proxy_as_sequence,		        /* tp_as_sequence */
     &proxy_as_mapping,		        /* tp_as_mapping */
     0,	                                /* tp_hash */
-    (ternaryfunc)0,	                /* tp_call */
-    (unaryfunc)proxy_str,	        /* tp_str */
-    (getattrofunc)proxy_getattr,        /* tp_getattro */
+    0,	                                /* tp_call */
+    proxy_str,                          /* tp_str */
+    proxy_getattr,                      /* tp_getattro */
     (setattrofunc)proxy_setattr,        /* tp_setattro */
     0,				        /* tp_as_buffer */
     Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC
@@ -691,9 +690,9 @@
     &proxy_as_sequence,		        /* tp_as_sequence */
     &proxy_as_mapping,		        /* tp_as_mapping */
     0,	                                /* tp_hash */
-    (ternaryfunc)proxy_call,	        /* tp_call */
-    (unaryfunc)proxy_str,	        /* tp_str */
-    (getattrofunc)proxy_getattr,        /* tp_getattro */
+    proxy_call,	                        /* tp_call */
+    proxy_str,	                        /* tp_str */
+    proxy_getattr,                      /* tp_getattro */
     (setattrofunc)proxy_setattr,        /* tp_setattro */
     0,				        /* tp_as_buffer */
     Py_TPFLAGS_DEFAULT | Py_TPFLAGS_HAVE_GC
diff --git a/PC/VC6/pythoncore.dsp b/PC/VC6/pythoncore.dsp
index 04a1224..cf3200c 100644
--- a/PC/VC6/pythoncore.dsp
+++ b/PC/VC6/pythoncore.dsp
@@ -535,14 +535,6 @@
 # End Source File

 # Begin Source File

 

-SOURCE=..\..\Modules\regexmodule.c

-# End Source File

-# Begin Source File

-

-SOURCE=..\..\Modules\regexpr.c

-# End Source File

-# Begin Source File

-

 SOURCE=..\..\Modules\rgbimgmodule.c

 # End Source File

 # Begin Source File

diff --git a/PC/_subprocess.c b/PC/_subprocess.c
index 1ca4ece..78ceb7d 100644
--- a/PC/_subprocess.c
+++ b/PC/_subprocess.c
@@ -104,7 +104,7 @@
 {
 	if (self->handle != INVALID_HANDLE_VALUE)
 		CloseHandle(self->handle);
-	PyMem_DEL(self);
+	PyObject_FREE(self);
 }
 
 static PyMethodDef sp_handle_methods[] = {
diff --git a/PC/config.c b/PC/config.c
index ee62dc5..75e1a3c 100644
--- a/PC/config.c
+++ b/PC/config.c
@@ -20,7 +20,6 @@
 extern void init_md5(void);
 extern void initnt(void);
 extern void initoperator(void);
-extern void initregex(void);
 #ifndef MS_WIN64
 extern void initrgbimg(void);
 #endif
@@ -95,7 +94,6 @@
         {"_md5", init_md5},
         {"nt", initnt}, /* Use the NT os functions, not posix */
         {"operator", initoperator},
-        {"regex", initregex},
 #ifndef MS_WIN64
         {"rgbimg", initrgbimg},
 #endif
diff --git a/PC/dllbase_nt.txt b/PC/dllbase_nt.txt
index 944ef95..c06e497 100644
--- a/PC/dllbase_nt.txt
+++ b/PC/dllbase_nt.txt
@@ -33,6 +33,7 @@
  - bZ2                       1D170000 - 1D180000
  - datetime                  1D180000 - 1D190000    pyd removed in 2.4
  - _csv                      1D190000 - 1D1A0000    pyd removed in 2.4
+ - _ctypes                   1D1A0000 - 1D1B0000
 
 Other extension modules
  - win32api                  1e200000 - 1e220000
diff --git a/PC/os2emx/Makefile b/PC/os2emx/Makefile
index 847fa67..762bfdb 100644
--- a/PC/os2emx/Makefile
+++ b/PC/os2emx/Makefile
@@ -304,8 +304,6 @@
 		Modules/md5module.c \
 		Modules/operator.c \
 		Modules/_randommodule.c \
-		Modules/regexmodule.c \
-		Modules/regexpr.c \
 		Modules/rgbimgmodule.c \
 		Modules/shamodule.c \
 		Modules/_sre.c \
diff --git a/PC/os2emx/config.c b/PC/os2emx/config.c
index 5ee4343..40c2cdc 100644
--- a/PC/os2emx/config.c
+++ b/PC/os2emx/config.c
@@ -64,7 +64,6 @@
 extern void initmath();
 extern void initmd5();
 extern void initoperator();
-extern void initregex();
 extern void initrgbimg();
 extern void initsha();
 extern void initstrop();
@@ -128,7 +127,6 @@
 	{"math", initmath},
 	{"md5", initmd5},
 	{"operator", initoperator},
-	{"regex", initregex},
 	{"rgbimg", initrgbimg},
 	{"sha", initsha},
 	{"strop", initstrop},
diff --git a/PC/os2emx/python24.def b/PC/os2emx/python24.def
index 4f78914..534dff8 100644
--- a/PC/os2emx/python24.def
+++ b/PC/os2emx/python24.def
@@ -1134,19 +1134,6 @@
 ; From python24_s.lib(_randommodule)
 ;  "init_random"
 
-; From python24_s.lib(regexmodule)
-;  "initregex"
-
-; From python24_s.lib(regexpr)
-;  "_Py_re_syntax_table"
-;  "_Py_re_compile_initialize"
-;  "_Py_re_compile_pattern"
-;  "_Py_re_match"
-;  "_Py_re_search"
-;  "_Py_re_set_syntax"
-;  "_Py_re_compile_fastmap"
-;  "_Py_re_syntax"
-
 ; From python24_s.lib(rgbimgmodule)
 ;  "initrgbimg"
 
diff --git a/PC/os2vacpp/config.c b/PC/os2vacpp/config.c
index 7512de5..9bb5752 100644
--- a/PC/os2vacpp/config.c
+++ b/PC/os2vacpp/config.c
@@ -27,7 +27,6 @@
 extern void initos2(void);
 extern void initoperator(void);
 extern void initposix(void);
-extern void initregex(void);
 extern void initrgbimg(void);
 extern void initsignal(void);
 extern void initselect(void);
@@ -70,7 +69,6 @@
 #endif
 #endif
         {"operator", initoperator},
-        {"regex", initregex},
 //        {"rgbimg", initrgbimg},
         {"signal", initsignal},
 #ifdef USE_SOCKET
diff --git a/PC/os2vacpp/makefile b/PC/os2vacpp/makefile
index 994ac49..f34047f 100644
--- a/PC/os2vacpp/makefile
+++ b/PC/os2vacpp/makefile
@@ -948,34 +948,6 @@
 	 $(PY_INCLUDE)\sliceobject.h $(PY_INCLUDE)\stringobject.h \
 	 $(PY_INCLUDE)\sysmodule.h $(PY_INCLUDE)\traceback.h $(PY_INCLUDE)\tupleobject.h
 
-regexmodule.obj: $(PY_INCLUDE)\abstract.h $(PY_INCLUDE)\ceval.h \
-	 $(PY_INCLUDE)\classobject.h $(PY_INCLUDE)\cobject.h $(PY_INCLUDE)\complexobject.h \
-	 pyconfig.h $(PY_INCLUDE)\dictobject.h $(PY_INCLUDE)\fileobject.h \
-	 $(PY_INCLUDE)\floatobject.h $(PY_INCLUDE)\funcobject.h $(PY_INCLUDE)\import.h \
-	 $(PY_INCLUDE)\intobject.h $(PY_INCLUDE)\intrcheck.h $(PY_INCLUDE)\listobject.h \
-	 $(PY_INCLUDE)\longobject.h $(PY_INCLUDE)\methodobject.h \
-	 $(PY_INCLUDE)\modsupport.h $(PY_INCLUDE)\moduleobject.h $(PY_INCLUDE)\mymalloc.h \
-	 $(PY_INCLUDE)\myproto.h $(PY_INCLUDE)\object.h $(PY_INCLUDE)\objimpl.h \
-	 $(PY_INCLUDE)\pydebug.h $(PY_INCLUDE)\pyerrors.h $(PY_INCLUDE)\pyfpe.h \
-	 $(PY_INCLUDE)\pystate.h $(PY_INCLUDE)\python.h $(PY_INCLUDE)\pythonrun.h \
-	 $(PY_INCLUDE)\rangeobject.h $(PY_MODULES)\regexpr.h $(PY_INCLUDE)\sliceobject.h \
-	 $(PY_INCLUDE)\stringobject.h $(PY_INCLUDE)\sysmodule.h $(PY_INCLUDE)\traceback.h \
-	 $(PY_INCLUDE)\tupleobject.h
-
-regexpr.obj: $(PY_INCLUDE)\abstract.h $(PY_INCLUDE)\ceval.h \
-	 $(PY_INCLUDE)\classobject.h $(PY_INCLUDE)\cobject.h $(PY_INCLUDE)\complexobject.h \
-	 pyconfig.h $(PY_INCLUDE)\dictobject.h $(PY_INCLUDE)\fileobject.h \
-	 $(PY_INCLUDE)\floatobject.h $(PY_INCLUDE)\funcobject.h $(PY_INCLUDE)\import.h \
-	 $(PY_INCLUDE)\intobject.h $(PY_INCLUDE)\intrcheck.h $(PY_INCLUDE)\listobject.h \
-	 $(PY_INCLUDE)\longobject.h $(PY_INCLUDE)\methodobject.h \
-	 $(PY_INCLUDE)\modsupport.h $(PY_INCLUDE)\moduleobject.h $(PY_INCLUDE)\mymalloc.h \
-	 $(PY_INCLUDE)\myproto.h $(PY_INCLUDE)\object.h $(PY_INCLUDE)\objimpl.h \
-	 $(PY_INCLUDE)\pydebug.h $(PY_INCLUDE)\pyerrors.h $(PY_INCLUDE)\pyfpe.h \
-	 $(PY_INCLUDE)\pystate.h $(PY_INCLUDE)\python.h $(PY_INCLUDE)\pythonrun.h \
-	 $(PY_INCLUDE)\rangeobject.h $(PY_MODULES)\regexpr.h $(PY_INCLUDE)\sliceobject.h \
-	 $(PY_INCLUDE)\stringobject.h $(PY_INCLUDE)\sysmodule.h $(PY_INCLUDE)\traceback.h \
-	 $(PY_INCLUDE)\tupleobject.h
-
 resource.obj: $(PY_INCLUDE)\abstract.h $(OS2TCPIP)\Include\sys\time.h $(PY_INCLUDE)\ceval.h \
 	 $(PY_INCLUDE)\classobject.h $(PY_INCLUDE)\cobject.h $(PY_INCLUDE)\complexobject.h \
 	 pyconfig.h $(PY_INCLUDE)\dictobject.h $(PY_INCLUDE)\fileobject.h \
diff --git a/PC/os2vacpp/makefile.omk b/PC/os2vacpp/makefile.omk
index 0d11b6a..9582338 100644
--- a/PC/os2vacpp/makefile.omk
+++ b/PC/os2vacpp/makefile.omk
@@ -699,30 +699,6 @@
 	 pythonrun.h rangeobject.h sliceobject.h stringobject.h sysmodule.h \
 	 traceback.h tupleobject.h
 
-regexmodule.obj: abstract.h ceval.h classobject.h cobject.h complexobject.h \
-	 pyconfig.h dictobject.h fileobject.h floatobject.h funcobject.h \
-	 import.h intobject.h intrcheck.h listobject.h longobject.h \
-	 methodobject.h modsupport.h moduleobject.h mymalloc.h myproto.h \
-	 object.h objimpl.h pydebug.h pyerrors.h pyfpe.h pystate.h python.h \
-	 pythonrun.h rangeobject.h regexpr.h sliceobject.h stringobject.h \
-	 sysmodule.h traceback.h tupleobject.h
-
-regexpr.obj: abstract.h ceval.h classobject.h cobject.h \
-	 complexobject.h pyconfig.h dictobject.h fileobject.h floatobject.h \
-	 funcobject.h import.h intobject.h intrcheck.h listobject.h \
-	 longobject.h methodobject.h modsupport.h moduleobject.h mymalloc.h \
-	 myproto.h object.h objimpl.h pydebug.h pyerrors.h pyfpe.h \
-	 pystate.h python.h pythonrun.h rangeobject.h regexpr.h \
-	 sliceobject.h stringobject.h sysmodule.h traceback.h tupleobject.h
-
-reopmodule.obj: abstract.h ceval.h classobject.h cobject.h complexobject.h \
-	 pyconfig.h dictobject.h fileobject.h floatobject.h funcobject.h \
-	 import.h intobject.h intrcheck.h listobject.h longobject.h \
-	 methodobject.h modsupport.h moduleobject.h mymalloc.h myproto.h \
-	 object.h objimpl.h pydebug.h pyerrors.h pyfpe.h pystate.h python.h \
-	 pythonrun.h rangeobject.h regexpr.h sliceobject.h stringobject.h \
-	 sysmodule.h traceback.h tupleobject.h
-
 resource.obj: abstract.h c:\mptn\include\sys\time.h ceval.h classobject.h \
 	 cobject.h complexobject.h pyconfig.h dictobject.h fileobject.h \
 	 floatobject.h funcobject.h import.h intobject.h intrcheck.h \
diff --git a/PC/os2vacpp/python.def b/PC/os2vacpp/python.def
index bc73fac..79d05b0 100644
--- a/PC/os2vacpp/python.def
+++ b/PC/os2vacpp/python.def
@@ -464,12 +464,6 @@
 ;               _Py_mergebitset
 ;               _Py_meta_grammar
 ;               _Py_newbitset
-               _Py_re_compile_fastmap
-               _Py_re_compile_initialize
-               _Py_re_compile_pattern
-               _Py_re_match
-               _Py_re_search
-               _Py_re_set_syntax
 ;               _Py_samebitset
                PyBuffer_Type
                PyBuffer_FromObject
diff --git a/PC/testpy.py b/PC/testpy.py
index f8746a3..78ad63c 100644
--- a/PC/testpy.py
+++ b/PC/testpy.py
@@ -5,23 +5,23 @@
 # change this module too.
 
 try:
-    import string
+    import os
 except:
-    print """Could not import the standard "string" module.
+    print """Could not import the standard "os" module.
   Please check your PYTHONPATH environment variable."""
     sys.exit(1)
 
 try:
-    import regex_syntax
+    import symbol
 except:
-    print """Could not import the standard "regex_syntax" module.  If this is
+    print """Could not import the standard "symbol" module.  If this is
   a PC, you should add the dos_8x3 directory to your PYTHONPATH."""
     sys.exit(1)
 
 import os
 
 for dir in sys.path:
-    file = os.path.join(dir, "string.py")
+    file = os.path.join(dir, "os.py")
     if os.path.isfile(file):
         test = os.path.join(dir, "test")
         if os.path.isdir(test):
diff --git a/PC/tix.diff b/PC/tix.diff
deleted file mode 100644
index 93a271f..0000000
--- a/PC/tix.diff
+++ /dev/null
@@ -1,108 +0,0 @@
-diff -ur tix-8.1.4/win/common.mak tix-8.1.4.new/win/common.mak
---- tix-8.1.4/win/common.mak	2002-12-11 07:19:42.000000000 +0100
-+++ tix-8.1.4.new/win/common.mak	2004-08-03 21:45:09.859375000 +0200
-@@ -18,10 +18,10 @@
- #	    support files
- #
- #----------------------------------------------------------------------
--TCL_VER          = 8.3
-+TCL_VER          = 8.4
- ITCL_VER          = 
- 
--INSTALLDIR      = C:\progra~1\tcl
-+INSTALLDIR      = ..\..\tcltk
- 
- !IFNDEF TIX_DEBUG
- NODEBUG = 1
-@@ -61,7 +61,7 @@
- !IF "$(TCL_VER)" == "8.4"
- TCLMAJOR=8
- TCLMINOR=4
--TCLPATCH=1
-+TCLPATCH=7
- TMPDIR          = tk$(TCL_VER)
- !ENDIF
- 
-@@ -176,14 +176,14 @@
- 	$(TMPDIR)\tixWinWm.obj
- 
- RMDIR		= $(TCLDIR)\win\rmd.bat
--MKDIR		= $(TCLDIR)\win\mkd.bat
-+MKDIR		= mkdir
- RM		= del
- 
- install:    install-binaries install-libraries
- 
- install-binaries: $(TCLSH)
--	$(MKDIR) "$(BIN_INSTALL_DIR)"
--	$(MKDIR) "$(LIB_INSTALL_DIR)"
-+	-$(MKDIR) "$(BIN_INSTALL_DIR)"
-+	-$(MKDIR) "$(LIB_INSTALL_DIR)"
- 	@echo installing $(TIXDLL)
- 	@copy "$(TIXDLL)" "$(BIN_INSTALL_DIR)"
- 	@copy "$(TIXLIB)" "$(LIB_INSTALL_DIR)"
-diff -ur tix-8.1.4/win/makefile.vc tix-8.1.4.new/win/makefile.vc
---- tix-8.1.4/win/makefile.vc	2002-12-02 04:02:54.000000000 +0100
-+++ tix-8.1.4.new/win/makefile.vc	2004-08-03 21:42:07.953125000 +0200
-@@ -54,12 +54,11 @@
- DBGX = d
- !ENDIF
- 
--cc32   = "$(TOOLS32)\bin\cl.exe"
--rc32   = "$(TOOLS32_rc)\bin\rc.exe"
--link32 = "$(TOOLS32)\bin\link.exe"
--include32 = -I"$(TOOLS32)\include"
-+cc32   = "cl.exe"
-+rc32   = "rc.exe"
-+link32 = "link.exe"
- 
--TIX_INCLUDES = $(include32) \
-+TIX_INCLUDES = \
- 	-I$(ROOT)\win -I$(ROOT)\generic \
- 	-I$(TKDIR)\generic -I$(TKDIR)\win -I$(TKDIR)\xlib \
- 	-I$(TCLDIR)\generic $(ITCL_CFLAGS)
-@@ -171,7 +170,7 @@
- #
- cvarsdll  = -D_X86_=1 -DWIN32 -D_WIN32 -D_MT -D_DLL
- cflagsdll = $(cvarsdll) -c -W3 -nologo -Fp$(TMPDIR)\ -YX -MD \
--	    -Oti -Gs -GD
-+	    -Oti -Gs -Gd
- 
- ######################################################################
- # Project specific targets
-@@ -181,7 +180,6 @@
- 
- $(DUMPEXTS): $(WINDIR)\winDumpExts.c
- 	$(cc32) $(CON_CFLAGS) -Fo$(TMPDIR)\ /c $?
--	set LIB="$(TOOLS32)\lib"
- 	$(link32) $(ldebug) $(conlflags) $(guilibs) -out:$@ \
- 		$(TMPDIR)\winDumpExts.obj 
- 
-@@ -193,7 +191,6 @@
- # (ToDo) $(TIXDLL) doesn't have resources to define its icon, etc.
- #
- $(TIXDLL): $(TIXOBJS) $(TMPDIR)\tixvc.def
--	set LIB="$(TOOLS32)\lib"
- 	$(link32) $(ldebug) $(dlllflags) -def:$(TMPDIR)\tixvc.def \
- 		$(TKLIBDIR)\$(TKLIB) $(TCLLIBDIR)\$(TCLLIB) $(guilibsdll) \
- 		$(ITCL_LIBS) -out:$@ @<<
-@@ -202,7 +199,6 @@
- 
- 
- $(TIXWISH): $(WISHOBJS) $(TIXOBJS) $(TIXLIB) $(TMPDIR)\tixwish.res
--	set LIB="$(TOOLS32)\lib"
- 	$(link32) $(ldebug) $(guilflags) \
- 		$(WISHOBJS) $(TMPDIR)\tixwish.res $(TIXLIB) \
- 		$(TKLIBDIR)\$(TKLIB) $(TCLLIBDIR)\$(TCLLIB) $(guilibsdll) \
-diff -ur tix-8.1.4/win/tk8.4/pkgIndex.tcl tix-8.1.4.new/win/tk8.4/pkgIndex.tcl
---- tix-8.1.4/win/tk8.4/pkgIndex.tcl	2002-12-15 04:21:54.000000000 +0100
-+++ tix-8.1.4.new/win/tk8.4/pkgIndex.tcl	2004-08-31 08:38:43.921875000 +0200
-@@ -15,7 +15,7 @@
- # We look in the ../../bin directory (an installed Tcl)
- lappend dirs ../../bin
- # We look in the ../../DLLs directory (an installed Python)
--lappend dirs ../../Dlls
-+lappend dirs [file join [file dirname [info nameofexe]] DLLs]
- # If not, this pkgIndex.tcl will probably fail.
- 
- 
diff --git a/PCbuild/_ctypes.vcproj b/PCbuild/_ctypes.vcproj
index 76518e6..4990c9e 100644
--- a/PCbuild/_ctypes.vcproj
+++ b/PCbuild/_ctypes.vcproj
@@ -33,12 +33,14 @@
 				Name="VCCustomBuildTool"/>
 			<Tool
 				Name="VCLinkerTool"
+				AdditionalOptions="/EXPORT:DllGetClassObject,PRIVATE /EXPORT:DllCanUnloadNow,PRIVATE"
 				OutputFile="./_ctypes_d.pyd"
 				LinkIncremental="1"
 				SuppressStartupBanner="TRUE"
 				GenerateDebugInformation="TRUE"
 				ProgramDatabaseFile=".\./_ctypes_d.pdb"
 				SubSystem="0"
+				BaseAddress="0x1D1A0000"
 				ImportLibrary=".\./_ctypes_d.lib"
 				TargetMachine="1"/>
 			<Tool
@@ -85,6 +87,7 @@
 				Name="VCCustomBuildTool"/>
 			<Tool
 				Name="VCLinkerTool"
+				AdditionalOptions="/EXPORT:DllGetClassObject,PRIVATE /EXPORT:DllCanUnloadNow,PRIVATE"
 				OutputFile="./_ctypes.pyd"
 				LinkIncremental="1"
 				SuppressStartupBanner="TRUE"
@@ -93,6 +96,7 @@
 				SubSystem="0"
 				OptimizeReferences="0"
 				EnableCOMDATFolding="0"
+				BaseAddress="0x1D1A0000"
 				ImportLibrary=".\./_ctypes.lib"
 				TargetMachine="1"/>
 			<Tool
@@ -142,7 +146,7 @@
 				Name="VCCustomBuildTool"/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalOptions=" /MACHINE:AMD64 /USELINK:MS_SDK"
+				AdditionalOptions=" /MACHINE:AMD64 /USELINK:MS_SDK /EXPORT:DllGetClassObject,PRIVATE /EXPORT:DllCanUnloadNow,PRIVATE"
 				OutputFile="./_ctypes.pyd"
 				LinkIncremental="1"
 				SuppressStartupBanner="TRUE"
@@ -151,6 +155,7 @@
 				SubSystem="0"
 				OptimizeReferences="0"
 				EnableCOMDATFolding="0"
+				BaseAddress="0x1D1A0000"
 				ImportLibrary=".\./_ctypes.lib"
 				TargetMachine="0"/>
 			<Tool
@@ -200,7 +205,7 @@
 				Name="VCCustomBuildTool"/>
 			<Tool
 				Name="VCLinkerTool"
-				AdditionalOptions=" /MACHINE:IA64 /USELINK:MS_SDK"
+				AdditionalOptions=" /MACHINE:IA64 /USELINK:MS_SDK /EXPORT:DllGetClassObject,PRIVATE /EXPORT:DllCanUnloadNow,PRIVATE"
 				OutputFile="./_ctypes.pyd"
 				LinkIncremental="1"
 				SuppressStartupBanner="TRUE"
@@ -209,6 +214,7 @@
 				SubSystem="0"
 				OptimizeReferences="0"
 				EnableCOMDATFolding="0"
+				BaseAddress="0x1D1A0000"
 				ImportLibrary=".\./_ctypes.lib"
 				TargetMachine="0"/>
 			<Tool
diff --git a/PCbuild/_sqlite3.vcproj b/PCbuild/_sqlite3.vcproj
new file mode 100644
index 0000000..bdb1a9b
--- /dev/null
+++ b/PCbuild/_sqlite3.vcproj
@@ -0,0 +1,289 @@
+<?xml version="1.0" encoding="Windows-1252"?>
+<VisualStudioProject
+	ProjectType="Visual C++"
+	Version="7.10"
+	Name="_sqlite3"
+	ProjectGUID="{2FF0A312-22F9-4C34-B070-842916DE27A9}"
+	SccProjectName="_sqlite3"
+	SccLocalPath="..">
+	<Platforms>
+		<Platform
+			Name="Win32"/>
+	</Platforms>
+	<Configurations>
+		<Configuration
+			Name="Debug|Win32"
+			OutputDirectory=".\."
+			IntermediateDirectory=".\x86-temp-debug\_sqlite3"
+			ConfigurationType="2"
+			UseOfMFC="0"
+			ATLMinimizesCRunTimeLibraryUsage="FALSE">
+			<Tool
+				Name="VCCLCompilerTool"
+				Optimization="0"
+				AdditionalIncludeDirectories="..\Include;..\PC;..\..\sqlite-source-3.3.4"
+				PreprocessorDefinitions="_DEBUG;WIN32;_WINDOWS;MODULE_NAME=\&quot;sqlite3\&quot;"
+				RuntimeLibrary="3"
+				UsePrecompiledHeader="2"
+				WarningLevel="3"
+				SuppressStartupBanner="TRUE"
+				DebugInformationFormat="3"
+				CompileAs="0"/>
+			<Tool
+				Name="VCCustomBuildTool"/>
+			<Tool
+				Name="VCLinkerTool"
+				AdditionalDependencies="..\..\sqlite-source-3.3.4\sqlite3.lib"
+				OutputFile="./_sqlite3_d.pyd"
+				LinkIncremental="1"
+				SuppressStartupBanner="TRUE"
+				IgnoreDefaultLibraryNames=""
+				GenerateDebugInformation="TRUE"
+				ProgramDatabaseFile=".\./_sqlite3_d.pdb"
+				SubSystem="2"
+				BaseAddress="0x1e180000"
+				ImportLibrary=".\./_sqlite3_d.lib"
+				TargetMachine="1"/>
+			<Tool
+				Name="VCMIDLTool"/>
+			<Tool
+				Name="VCPostBuildEventTool"/>
+			<Tool
+				Name="VCPreBuildEventTool"/>
+			<Tool
+				Name="VCPreLinkEventTool"/>
+			<Tool
+				Name="VCResourceCompilerTool"/>
+			<Tool
+				Name="VCWebServiceProxyGeneratorTool"/>
+			<Tool
+				Name="VCXMLDataGeneratorTool"/>
+			<Tool
+				Name="VCWebDeploymentTool"/>
+			<Tool
+				Name="VCManagedWrapperGeneratorTool"/>
+			<Tool
+				Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
+		</Configuration>
+		<Configuration
+			Name="Release|Win32"
+			OutputDirectory=".\."
+			IntermediateDirectory=".\x86-temp-release\_sqlite3"
+			ConfigurationType="2"
+			UseOfMFC="0"
+			ATLMinimizesCRunTimeLibraryUsage="FALSE">
+			<Tool
+				Name="VCCLCompilerTool"
+				Optimization="2"
+				InlineFunctionExpansion="1"
+				AdditionalIncludeDirectories="..\Include;..\PC;..\..\sqlite-source-3.3.4"
+				PreprocessorDefinitions="NDEBUG;WIN32;_WINDOWS;MODULE_NAME=\&quot;sqlite3\&quot;"
+				StringPooling="TRUE"
+				RuntimeLibrary="2"
+				EnableFunctionLevelLinking="TRUE"
+				UsePrecompiledHeader="2"
+				WarningLevel="3"
+				SuppressStartupBanner="TRUE"
+				DebugInformationFormat="3"
+				CompileAs="0"/>
+			<Tool
+				Name="VCCustomBuildTool"/>
+			<Tool
+				Name="VCLinkerTool"
+				AdditionalDependencies="..\..\sqlite-source-3.3.4\sqlite3.lib"
+				OutputFile="./_sqlite3.pyd"
+				LinkIncremental="1"
+				SuppressStartupBanner="TRUE"
+				IgnoreDefaultLibraryNames=""
+				GenerateDebugInformation="TRUE"
+				ProgramDatabaseFile=".\./_sqlite3.pdb"
+				SubSystem="2"
+				BaseAddress="0x1e180000"
+				ImportLibrary=".\./_sqlite3.lib"
+				TargetMachine="1"/>
+			<Tool
+				Name="VCMIDLTool"/>
+			<Tool
+				Name="VCPostBuildEventTool"/>
+			<Tool
+				Name="VCPreBuildEventTool"/>
+			<Tool
+				Name="VCPreLinkEventTool"/>
+			<Tool
+				Name="VCResourceCompilerTool"/>
+			<Tool
+				Name="VCWebServiceProxyGeneratorTool"/>
+			<Tool
+				Name="VCXMLDataGeneratorTool"/>
+			<Tool
+				Name="VCWebDeploymentTool"/>
+			<Tool
+				Name="VCManagedWrapperGeneratorTool"/>
+			<Tool
+				Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
+		</Configuration>
+		<Configuration
+			Name="ReleaseItanium|Win32"
+			OutputDirectory="./."
+			IntermediateDirectory=".\ia64-temp-release\_sqlite3"
+			ConfigurationType="2"
+			UseOfMFC="0"
+			ATLMinimizesCRunTimeLibraryUsage="FALSE">
+			<Tool
+				Name="VCCLCompilerTool"
+				AdditionalOptions=" /USECL:MS_ITANIUM"
+				Optimization="2"
+				InlineFunctionExpansion="1"
+				AdditionalIncludeDirectories="{MSSDKPATH}\include\Win64\atl;{MSSDKPATH}\include\Win64\crt;{MSSDKPATH}\include\Win64\crt\sys;{MSSDKPATH}\include\Win64\mfc;..\Include;..\PC;..\..\sqlite-source-3.3.4"
+				PreprocessorDefinitions="NDEBUG;WIN32;_WINDOWS;MODULE_NAME=\&quot;sqlite3\&quot;"
+				StringPooling="TRUE"
+				BasicRuntimeChecks="0"
+				RuntimeLibrary="2"
+				BufferSecurityCheck="FALSE"
+				EnableFunctionLevelLinking="TRUE"
+				UsePrecompiledHeader="2"
+				WarningLevel="3"
+				SuppressStartupBanner="TRUE"
+				Detect64BitPortabilityProblems="TRUE"
+				DebugInformationFormat="3"
+				CompileAs="0"/>
+			<Tool
+				Name="VCCustomBuildTool"/>
+			<Tool
+				Name="VCLinkerTool"
+				AdditionalOptions=" /MACHINE:IA64 /USELINK:MS_SDK"
+				AdditionalDependencies="..\..\sqlite-source-3.3.4\ia64\sqlite3.lib"
+				OutputFile="./_sqlite3.pyd"
+				LinkIncremental="1"
+				SuppressStartupBanner="TRUE"
+				IgnoreDefaultLibraryNames=""
+				GenerateDebugInformation="TRUE"
+				ProgramDatabaseFile=".\./_sqlite3.pdb"
+				SubSystem="2"
+				BaseAddress="0x1e180000"
+				ImportLibrary=".\./_sqlite3.lib"
+				TargetMachine="0"/>
+			<Tool
+				Name="VCMIDLTool"/>
+			<Tool
+				Name="VCPostBuildEventTool"/>
+			<Tool
+				Name="VCPreBuildEventTool"/>
+			<Tool
+				Name="VCPreLinkEventTool"/>
+			<Tool
+				Name="VCResourceCompilerTool"/>
+			<Tool
+				Name="VCWebServiceProxyGeneratorTool"/>
+			<Tool
+				Name="VCXMLDataGeneratorTool"/>
+			<Tool
+				Name="VCWebDeploymentTool"/>
+			<Tool
+				Name="VCManagedWrapperGeneratorTool"/>
+			<Tool
+				Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
+		</Configuration>
+		<Configuration
+			Name="ReleaseAMD64|Win32"
+			OutputDirectory="."
+			IntermediateDirectory="amd64-temp-release\_sqlite3"
+			ConfigurationType="2"
+			UseOfMFC="0"
+			ATLMinimizesCRunTimeLibraryUsage="FALSE">
+			<Tool
+				Name="VCCLCompilerTool"
+				AdditionalOptions=" /USECL:MS_OPTERON"
+				Optimization="2"
+				InlineFunctionExpansion="1"
+				AdditionalIncludeDirectories="{MSSDKPATH}\include\Win64\atl\amd64;{MSSDKPATH}\include\Win64\crt\amd64;{MSSDKPATH}\include\Win64\crt\amd64\sys;{MSSDKPATH}\include\Win64\mfc\amd64;..\Include;..\PC;..\..\sqlite-source-3.3.4"
+				PreprocessorDefinitions="NDEBUG;WIN32;_WINDOWS;MODULE_NAME=\&quot;sqlite3\&quot;"
+				StringPooling="TRUE"
+				BasicRuntimeChecks="0"
+				RuntimeLibrary="2"
+				BufferSecurityCheck="FALSE"
+				EnableFunctionLevelLinking="TRUE"
+				UsePrecompiledHeader="2"
+				WarningLevel="3"
+				SuppressStartupBanner="TRUE"
+				Detect64BitPortabilityProblems="TRUE"
+				DebugInformationFormat="3"
+				CompileAs="0"/>
+			<Tool
+				Name="VCCustomBuildTool"/>
+			<Tool
+				Name="VCLinkerTool"
+				AdditionalOptions=" /MACHINE:AMD64 /USELINK:MS_SDK"
+				AdditionalDependencies="..\..\sqlite-source-3.3.4\amd64\sqlite3.lib"
+				OutputFile="./_sqlite3.pyd"
+				LinkIncremental="1"
+				SuppressStartupBanner="TRUE"
+				IgnoreDefaultLibraryNames=""
+				GenerateDebugInformation="TRUE"
+				ProgramDatabaseFile=".\./_sqlite3.pdb"
+				SubSystem="2"
+				BaseAddress="0x1e180000"
+				ImportLibrary=".\./_sqlite3.lib"
+				TargetMachine="0"/>
+			<Tool
+				Name="VCMIDLTool"/>
+			<Tool
+				Name="VCPostBuildEventTool"/>
+			<Tool
+				Name="VCPreBuildEventTool"/>
+			<Tool
+				Name="VCPreLinkEventTool"/>
+			<Tool
+				Name="VCResourceCompilerTool"/>
+			<Tool
+				Name="VCWebServiceProxyGeneratorTool"/>
+			<Tool
+				Name="VCXMLDataGeneratorTool"/>
+			<Tool
+				Name="VCWebDeploymentTool"/>
+			<Tool
+				Name="VCManagedWrapperGeneratorTool"/>
+			<Tool
+				Name="VCAuxiliaryManagedWrapperGeneratorTool"/>
+		</Configuration>
+	</Configurations>
+	<References>
+	</References>
+	<Files>
+		<File
+			RelativePath="..\Modules\_sqlite\adapters.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\cache.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\connection.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\converters.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\cursor.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\microprotocols.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\module.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\prepare_protocol.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\row.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\statement.c">
+		</File>
+		<File
+			RelativePath="..\Modules\_sqlite\util.c">
+		</File>
+	</Files>
+	<Globals>
+	</Globals>
+</VisualStudioProject>
diff --git a/PCbuild/_ssl.mak b/PCbuild/_ssl.mak
index c150d65..2c47c6b 100644
--- a/PCbuild/_ssl.mak
+++ b/PCbuild/_ssl.mak
@@ -2,17 +2,17 @@
 !IFDEF DEBUG
 MODULE=_ssl_d.pyd
 TEMP_DIR=x86-temp-debug/_ssl
-CFLAGS=/Od /Zi /MDd /LDd /DDEBUG /D_DEBUG
+CFLAGS=/Od /Zi /MDd /LDd /DDEBUG /D_DEBUG /DWIN32
 SSL_LIB_DIR=$(SSL_DIR)/out32.dbg
 !ELSE
 MODULE=_ssl.pyd
 TEMP_DIR=x86-temp-release/_ssl
-CFLAGS=/Ox /MD /LD
+CFLAGS=/Ox /MD /LD /DWIN32
 SSL_LIB_DIR=$(SSL_DIR)/out32
 !ENDIF
 
 INCLUDES=-I ../Include -I ../PC -I $(SSL_DIR)/inc32
-LIBS=gdi32.lib wsock32.lib /libpath:$(SSL_LIB_DIR) libeay32.lib ssleay32.lib
+LIBS=gdi32.lib wsock32.lib user32.lib advapi32.lib /libpath:$(SSL_LIB_DIR) libeay32.lib ssleay32.lib
 
 SOURCE=../Modules/_ssl.c $(SSL_LIB_DIR)/libeay32.lib $(SSL_LIB_DIR)/ssleay32.lib
 
diff --git a/PCbuild/db.build b/PCbuild/db.build
new file mode 100644
index 0000000..6a87f74
--- /dev/null
+++ b/PCbuild/db.build
@@ -0,0 +1,10 @@
+<?xml version="1.0"?>
+<project>
+ <target name="all" description="Build all targets.">
+   <solution configuration="release">
+     <projects>
+       <include name="db_static.vcproj" />
+     </projects>
+   </solution>
+ </target>
+</project>
diff --git a/PCbuild/make_buildinfo.c b/PCbuild/make_buildinfo.c
index 9d2f9f0..4cebf45 100644
--- a/PCbuild/make_buildinfo.c
+++ b/PCbuild/make_buildinfo.c
@@ -27,6 +27,9 @@
 	DWORD type, size;
 	if (_stat(".svn", &st) < 0)
 		return 0;
+	/* Allow suppression of subwcrev.exe invocation if a no_subwcrev file is present. */
+	if (_stat("no_subwcrev", &st) == 0)
+		return 0;
 	if (RegOpenKey(HKEY_LOCAL_MACHINE, "Software\\TortoiseSVN", &hTortoise) != ERROR_SUCCESS &&
 	    RegOpenKey(HKEY_CURRENT_USER, "Software\\TortoiseSVN", &hTortoise) != ERROR_SUCCESS)
 		/* Tortoise not installed */
diff --git a/PCbuild/pcbuild.sln b/PCbuild/pcbuild.sln
index 88d8c45..20d3ecf 100644
--- a/PCbuild/pcbuild.sln
+++ b/PCbuild/pcbuild.sln
@@ -96,6 +96,12 @@
 EndProject
 Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_ctypes_test", "_ctypes_test.vcproj", "{8CF334D9-4F82-42EB-97AF-83592C5AFD2F}"
 	ProjectSection(ProjectDependencies) = postProject
+		{F22F40F4-D318-40DC-96B3-88DC81CE0894} = {F22F40F4-D318-40DC-96B3-88DC81CE0894}
+	EndProjectSection
+EndProject
+Project("{8BC9CEB8-8B4A-11D0-8D11-00A0C91BC942}") = "_sqlite3", "_sqlite3.vcproj", "{2FF0A312-22F9-4C34-B070-842916DE27A9}"
+	ProjectSection(ProjectDependencies) = postProject
+		{CF7AC3D1-E2DF-41D2-BEA6-1E2556CDEA26} = {CF7AC3D1-E2DF-41D2-BEA6-1E2556CDEA26}
 	EndProjectSection
 EndProject
 Global
@@ -260,6 +266,14 @@
 		{8CF334D9-4F82-42EB-97AF-83592C5AFD2F}.Release.Build.0 = Release|Win32
 		{8CF334D9-4F82-42EB-97AF-83592C5AFD2F}.ReleaseAMD64.ActiveCfg = ReleaseAMD64|Win32
 		{8CF334D9-4F82-42EB-97AF-83592C5AFD2F}.ReleaseItanium.ActiveCfg = ReleaseItanium|Win32
+		{2FF0A312-22F9-4C34-B070-842916DE27A9}.Debug.ActiveCfg = Debug|Win32
+		{2FF0A312-22F9-4C34-B070-842916DE27A9}.Debug.Build.0 = Debug|Win32
+		{2FF0A312-22F9-4C34-B070-842916DE27A9}.Release.ActiveCfg = Release|Win32
+		{2FF0A312-22F9-4C34-B070-842916DE27A9}.Release.Build.0 = Release|Win32
+		{2FF0A312-22F9-4C34-B070-842916DE27A9}.ReleaseAMD64.ActiveCfg = ReleaseAMD64|Win32
+		{2FF0A312-22F9-4C34-B070-842916DE27A9}.ReleaseAMD64.Build.0 = ReleaseAMD64|Win32
+		{2FF0A312-22F9-4C34-B070-842916DE27A9}.ReleaseItanium.ActiveCfg = ReleaseItanium|Win32
+		{2FF0A312-22F9-4C34-B070-842916DE27A9}.ReleaseItanium.Build.0 = ReleaseItanium|Win32
 	EndGlobalSection
 	GlobalSection(SolutionItems) = postSolution
 		..\Modules\getbuildinfo.c = ..\Modules\getbuildinfo.c
diff --git a/PCbuild/python.build b/PCbuild/python.build
new file mode 100644
index 0000000..61bbe89
--- /dev/null
+++ b/PCbuild/python.build
@@ -0,0 +1,21 @@
+<?xml version="1.0"?>
+<project>
+ <target name="all" description="Build all targets.">
+   <solution configuration="release">
+     <projects>
+       <include name="make_versioninfo.vcproj" />
+     </projects>
+   </solution>
+   <exec program="make_versioninfo" output="pythonnt_rc.h" />
+
+   <solution configuration="release" solutionfile="pcbuild.sln">
+     <excludeprojects>
+       <include name="_tkinter.vcproj" />
+       <include name="bz2.vcproj" />
+       <include name="_bsddb.vcproj" />
+       <include name="_sqlite3.vcproj" />
+       <include name="_ssl.vcproj" />
+     </excludeprojects>
+   </solution>
+ </target>
+</project>
diff --git a/PCbuild/pythoncore.vcproj b/PCbuild/pythoncore.vcproj
index a1bb0ed..3bd740f 100644
--- a/PCbuild/pythoncore.vcproj
+++ b/PCbuild/pythoncore.vcproj
@@ -3,7 +3,7 @@
 	ProjectType="Visual C++"
 	Version="7.10"
 	Name="pythoncore"
-	ProjectGUID="{7AFA1F0B-A8A1-455A-A832-BF263404BBEF}"
+	ProjectGUID="{CF7AC3D1-E2DF-41D2-BEA6-1E2556CDEA26}"
 	RootNamespace="pythoncore"
 	SccProjectName="pythoncore"
 	SccLocalPath="..">
@@ -623,7 +623,7 @@
 			RelativePath="..\Modules\mathmodule.c">
 		</File>
 		<File
-			RelativePath="..\Modules\md5c.c">
+			RelativePath="..\Modules\md5.c">
 		</File>
 		<File
 			RelativePath="..\Modules\md5module.c">
@@ -707,12 +707,6 @@
 			RelativePath="..\Objects\rangeobject.c">
 		</File>
 		<File
-			RelativePath="..\Modules\regexmodule.c">
-		</File>
-		<File
-			RelativePath="..\Modules\regexpr.c">
-		</File>
-		<File
 			RelativePath="..\Modules\rgbimgmodule.c">
 		</File>
 		<File
diff --git a/PCbuild/readme.txt b/PCbuild/readme.txt
index 76c314d..e303313 100644
--- a/PCbuild/readme.txt
+++ b/PCbuild/readme.txt
@@ -64,27 +64,21 @@
 
 _tkinter
     Python wrapper for the Tk windowing system.  Requires building
-    Tcl/Tk first.  Following are instructions for Tcl/Tk 8.4.7; these
-    should work for version 8.4.6 too, with suitable substitutions:
+    Tcl/Tk first.  Following are instructions for Tcl/Tk 8.4.12.
 
     Get source
     ----------
-    Go to
-        http://prdownloads.sourceforge.net/tcl/
-    and download
-        tcl847-src.zip
-        tk847-src.zip
-    Unzip into
-        dist\tcl8.4.7\
-        dist\tk8.4.7\
-    respectively.
+    In the dist directory, run
+    svn export http://svn.python.org/projects/external/tcl8.4.12
+    svn export http://svn.python.org/projects/external/tk8.4.12
+    svn export http://svn.python.org/projects/external/tix-8.4.0
 
     Build Tcl first (done here w/ MSVC 7.1 on Windows XP)
     ---------------
     Use "Start -> All Programs -> Microsoft Visual Studio .NET 2003
          -> Visual Studio .NET Tools -> Visual Studio .NET 2003 Command Prompt"
     to get a shell window with the correct environment settings
-    cd dist\tcl8.4.7\win
+    cd dist\tcl8.4.12\win
     nmake -f makefile.vc
     nmake -f makefile.vc INSTALLDIR=..\..\tcltk install
 
@@ -99,9 +93,9 @@
 
     Build Tk
     --------
-    cd dist\tk8.4.7\win
-    nmake -f makefile.vc TCLDIR=..\..\tcl8.4.7
-    nmake -f makefile.vc TCLDIR=..\..\tcl8.4.7 INSTALLDIR=..\..\tcltk install
+    cd dist\tk8.4.12\win
+    nmake -f makefile.vc TCLDIR=..\..\tcl8.4.12
+    nmake -f makefile.vc TCLDIR=..\..\tcl8.4.12 INSTALLDIR=..\..\tcltk install
 
     XXX Should we compile with OPTS=threads?
 
@@ -109,7 +103,7 @@
     XXX directory.  Is all of that really needed for Python use of Tcl/Tk?
 
     Optional:  run tests, via
-        nmake -f makefile.vc TCLDIR=..\..\tcl8.4.7 test
+        nmake -f makefile.vc TCLDIR=..\..\tcl8.4.12 test
 
         On WinXP Pro, wholly up to date as of 30-Aug-2004:
         all.tcl:        Total   8420    Passed  6826    Skipped 1581    Failed  13
@@ -118,12 +112,9 @@
 
    Built Tix
    ---------
-   Download from http://prdownloads.sourceforge.net/tix/tix-8.1.4.tar.gz
-   cd dist\tix-8.1.4
-   [cygwin]patch -p1 < ..\..\python\PC\tix.diff
-   cd win
-   nmake -f makefile.vc
-   nmake -f makefile.vc install
+   cd dist\tix-8.4.0\win
+   nmake -f python.mak
+   nmake -f python.mak install
 
 bz2
     Python wrapper for the libbz2 compression library.  Homepage
@@ -223,23 +214,24 @@
       target ("Release IA64" for Itanium, "Release AMD64" for AMD64), e.g.
     devenv db-4.4.20\build_win32\Berkeley_DB.sln /build "Release AMD64" /project db_static /useenv
 
+_sqlite3
+    Python wrapper for SQLite library.
+    
+    Get the source code through
+    
+    svn export http://svn.python.org/projects/external/sqlite-source-3.3.4
+    
+    To use the extension module in a Python build tree, copy sqlite3.dll into
+    the PCbuild folder.
 
 _ssl
     Python wrapper for the secure sockets library.
 
-    Get the latest source code for OpenSSL from
-        http://www.openssl.org
+    Get the source code through
 
-    You (probably) don't want the "engine" code.  For example, get
-        openssl-0.9.7d.tar.gz
-    not
-        openssl-engine-0.9.7d.tar.gz
+    svn export http://svn.python.org/projects/external/openssl-0.9.8a
 
-    (see #1233049 for using 0.9.8).
-    Unpack into the "dist" directory, retaining the folder name from
-    the archive - for example, the latest stable OpenSSL will install as
-        dist/openssl-0.9.7d
-
+    Alternatively, get the latest version from http://www.openssl.org.
     You can (theoretically) use any version of OpenSSL you like - the
     build process will automatically select the latest version.
 
@@ -281,6 +273,143 @@
 to the Itanium configuration; make sure you use the latest version of
 vsextcomp.
 
+Building Python Using the free MS Toolkit Compiler
+--------------------------------------------------
+
+The build process for Visual C++ can be used almost unchanged with the free MS
+Toolkit Compiler. This provides a way of building Python using freely
+available software.
+
+Requirements
+
+    To build Python, the following tools are required:
+
+    * The Visual C++ Toolkit Compiler
+        from http://msdn.microsoft.com/visualc/vctoolkit2003/
+    * A recent Platform SDK
+        from http://www.microsoft.com/downloads/details.aspx?FamilyID=484269e2-3b89-47e3-8eb7-1f2be6d7123a
+    * The .NET 1.1 SDK
+        from http://www.microsoft.com/downloads/details.aspx?FamilyID=9b3a2ca6-3647-4070-9f41-a333c6b9181d
+
+    [Does anyone have better URLs for the last 2 of these?]
+
+    The toolkit compiler is needed as it is an optimising compiler (the
+    compiler supplied with the .NET SDK is a non-optimising version). The
+    platform SDK is needed to provide the Windows header files and libraries
+    (the Windows 2003 Server SP1 edition, typical install, is known to work -
+    other configurations or versions are probably fine as well). The .NET 1.1
+    SDK is needed because it contains a version of msvcrt.dll which links to
+    the msvcr71.dll CRT. Note that the .NET 2.0 SDK is NOT acceptable, as it
+    references msvcr80.dll.
+
+    All of the above items should be installed as normal.
+
+    If you intend to build the openssl (needed for the _ssl extension) you
+    will need the C runtime sources installed as part of the platform SDK.
+
+    In addition, you will need Nant, available from
+    http://nant.sourceforge.net. The 0.85 release candidate 3 version is known
+    to work. This is the latest released version at the time of writing. Later
+    "nightly build" versions are known NOT to work - it is not clear at
+    present whether future released versions will work.
+
+Setting up the environment
+
+    Start a platform SDK "build environment window" from the start menu. The
+    "Windows XP 32-bit retail" version is known to work.
+
+    Add the following directories to your PATH:
+        * The toolkit compiler directory
+        * The SDK "Win64" binaries directory
+	* The Nant directory
+    Add to your INCLUDE environment variable:
+        * The toolkit compiler INCLUDE directory
+    Add to your LIB environment variable:
+        * The toolkit compiler LIB directory
+	* The .NET SDK Visual Studio 2003 VC7\lib directory
+
+    The following commands should set things up as you need them:
+
+        rem Set these values according to where you installed the software
+        set TOOLKIT=C:\Program Files\Microsoft Visual C++ Toolkit 2003
+        set SDK=C:\Program Files\Microsoft Platform SDK
+        set NET=C:\Program Files\Microsoft Visual Studio .NET 2003
+        set NANT=C:\Utils\Nant
+
+        set PATH=%TOOLKIT%\bin;%PATH%;%SDK%\Bin\win64;%NANT%\bin
+        set INCLUDE=%TOOLKIT%\include;%INCLUDE%
+        set LIB=%TOOLKIT%\lib;%NET%\VC7\lib;%LIB%
+
+    The "win64" directory from the SDK is added to supply executables such as
+    "cvtres" and "lib", which are not available elsewhere. The versions in the
+    "win64" directory are 32-bit programs, so they are fine to use here.
+
+    That's it. To build Python (the core only, no binary extensions which
+    depend on external libraries) you just need to issue the command
+
+        nant -buildfile:python.build all
+
+    from within the PCBuild directory.
+
+Extension modules
+
+    To build those extension modules which require external libraries
+    (_tkinter, bz2, _bsddb, _sqlite3, _ssl) you can follow the instructions
+    for the Visual Studio build above, with a few minor modifications. These
+    instructions have only been tested using the sources in the Python
+    subversion repository - building from original sources should work, but
+    has not been tested.
+
+    For each extension module you wish to build, you should remove the
+    associated include line from the excludeprojects section of pc.build.
+
+    The changes required are:
+
+    _tkinter
+        The tix makefile (tix-8.4.0\win\makefile.vc) must be modified to
+	remove references to TOOLS32. The relevant lines should be changed to
+	read:
+            cc32 = cl.exe
+            link32 = link.exe
+            include32 = 
+	The remainder of the build instructions will work as given.
+
+    bz2
+        No changes are needed
+
+    _bsddb
+        The file db.build should be copied from the Python PCBuild directory
+	to the directory db-4.4.20\build_win32.
+
+	The file db_static.vcproj in db-4.4.20\build_win32 should be edited to
+	remove the string "$(SolutionDir)" - this occurs in 2 places, only
+	relevant for 64-bit builds. (The edit is required as otherwise, nant
+	wants to read the solution file, which is not in a suitable form).
+
+	The bsddb library can then be build with the command
+	    nant -buildfile:db.build all
+	run from the db-4.4.20\build_win32 directory.
+
+    _sqlite3
+        No changes are needed. However, in order for the tests to succeed, a
+	copy of sqlite3.dll must be downloaded, and placed alongside
+	python.exe.
+
+    _ssl
+        The documented build process works as written. However, it needs a
+	copy of the file setargv.obj, which is not supplied in the platform
+	SDK. However, the sources are available (in the crt source code). To
+	build setargv.obj, proceed as follows:
+
+        Copy setargv.c, cruntime.h and internal.h from %SDK%\src\crt to a
+	temporary directory.
+	Compile using "cl /c /I. /MD /D_CRTBLD setargv.c"
+	Copy the resulting setargv.obj to somewhere on your LIB environment
+	(%SDK%\lib is a reasonable place).
+
+	With setargv.obj in place, the standard build process should work
+	fine.
+
 YOUR OWN EXTENSION DLLs
 -----------------------
 If you want to create your own extension module DLL, there's an example
diff --git a/Parser/Python.asdl b/Parser/Python.asdl
index 4397d89..00de381 100644
--- a/Parser/Python.asdl
+++ b/Parser/Python.asdl
@@ -98,8 +98,11 @@
 	comprehension = (expr target, expr iter, expr* ifs)
 
 	-- not sure what to call the first argument for raise and except
-
-	excepthandler = (expr? type, expr? name, stmt* body)
+	-- TODO(jhylton): Figure out if there is a better way to handle
+	--                lineno and col_offset fields, particularly when
+        --                ast is exposed to Python.
+	excepthandler = (expr? type, expr? name, stmt* body, int lineno,
+	                 int col_offset)
 
 	arguments = (expr* args, identifier? vararg, 
 		     identifier? kwarg, expr* defaults)
diff --git a/Parser/asdl_c.py b/Parser/asdl_c.py
index ad2209d..b6d9830 100755
--- a/Parser/asdl_c.py
+++ b/Parser/asdl_c.py
@@ -155,8 +155,10 @@
             type = sum.types[i]
             enum.append("%s_kind=%d" % (type.name, i + 1))
 
+        emit("enum _%(name)s_kind {" + ", ".join(enum) + "};")
+
         emit("struct _%(name)s {")
-        emit("enum { " + ", ".join(enum) + " } kind;", depth + 1)
+        emit("enum _%(name)s_kind kind;", depth + 1)
         emit("union {", depth + 1)
         for t in sum.types:
             self.visit(t, depth + 2)
@@ -186,7 +188,10 @@
         ctype = get_c_type(field.type)
         name = field.name
         if field.seq:
-            self.emit("asdl_seq *%(name)s;" % locals(), depth)
+            if field.type.value in ('cmpop',):
+                self.emit("asdl_int_seq *%(name)s;" % locals(), depth)
+            else:
+                self.emit("asdl_seq *%(name)s;" % locals(), depth)
         else:
             self.emit("%(ctype)s %(name)s;" % locals(), depth)
 
@@ -232,7 +237,10 @@
                 name = f.name
             # XXX should extend get_c_type() to handle this
             if f.seq:
-                ctype = "asdl_seq *"
+                if f.type.value in ('cmpop',):
+                    ctype = "asdl_int_seq *"
+                else:
+                    ctype = "asdl_seq *"
             else:
                 ctype = get_c_type(f.type)
             args.append((ctype, name, f.opt or f.seq))
@@ -276,7 +284,7 @@
         emit("%s p;" % ctype, 1)
         for argtype, argname, opt in args:
             # XXX hack alert: false is allowed for a bool
-            if not opt and not argtype == "bool":
+            if not opt and not (argtype == "bool" or argtype == "int"):
                 emit("if (!%s) {" % argname, 1)
                 emit("PyErr_SetString(PyExc_ValueError,", 2)
                 msg = "field %s is required for %s" % (argname, name)
@@ -413,10 +421,10 @@
 
 static int add_attributes(PyTypeObject* type, char**attrs, int num_fields)
 {
-    int i;
+    int i, result;
     PyObject *s, *l = PyList_New(num_fields);
     if (!l) return 0;
-    for(i=0; i < num_fields; i++) {
+    for(i = 0; i < num_fields; i++) {
         s = PyString_FromString(attrs[i]);
         if (!s) {
             Py_DECREF(l);
@@ -424,7 +432,9 @@
         }
         PyList_SET_ITEM(l, i, s);
     }
-    return PyObject_SetAttrString((PyObject*)type, "_attributes", l) >=0;
+    result = PyObject_SetAttrString((PyObject*)type, "_attributes", l) >= 0;
+    Py_DECREF(l);
+    return result;
 }
 
 static PyObject* ast2obj_list(asdl_seq *seq, PyObject* (*func)(void*))
@@ -465,9 +475,9 @@
 }
 """, 0, reflow=False)
 
-        self.emit("static int initialized;", 0)
         self.emit("static int init_types(void)",0)
         self.emit("{", 0)
+        self.emit("static int initialized;", 1)
         self.emit("if (initialized) return 1;", 1)
         self.emit('AST_type = make_type("AST", &PyBaseObject_Type, NULL, 0);', 1)
         for dfn in mod.dfns:
@@ -543,7 +553,7 @@
         self.addObj(cons.name)
 
     def addObj(self, name):
-        self.emit('if(PyDict_SetItemString(d, "%s", (PyObject*)%s_type) < 0) return;' % (name, name), 1)
+        self.emit('if (PyDict_SetItemString(d, "%s", (PyObject*)%s_type) < 0) return;' % (name, name), 1)
 
 _SPECIALIZED_SEQUENCES = ('stmt', 'expr')
 
@@ -677,8 +687,8 @@
                 self.emit("if (!value) goto failed;", depth+1)
                 self.emit("for(i = 0; i < n; i++)", depth+1)
                 # This cannot fail, so no need for error handling
-                self.emit("PyList_SET_ITEM(value, i, ast2obj_%s((%s_ty)asdl_seq_GET(%s, i)));" %
-                                (field.type, field.type, value), depth+2, reflow=False)
+                self.emit("PyList_SET_ITEM(value, i, ast2obj_cmpop((cmpop_ty)asdl_seq_GET(%s, i)));" % value,
+                          depth+2, reflow=False)
                 self.emit("}", depth)
             else:
                 self.emit("value = ast2obj_list(%s, ast2obj_%s);" % (value, field.type), depth)
@@ -716,39 +726,35 @@
         sys.exit(1)
     if INC_DIR:
         p = "%s/%s-ast.h" % (INC_DIR, mod.name)
-    else:
-        p = "%s-ast.h" % mod.name
-    f = open(p, "wb")
-    print >> f, auto_gen_msg
-    print >> f, '#include "asdl.h"\n'
-    c = ChainOfVisitors(TypeDefVisitor(f),
-                        StructVisitor(f),
-                        PrototypeVisitor(f),
-                        )
-    c.visit(mod)
-    print >>f, "PyObject* PyAST_mod2obj(mod_ty t);"
-    f.close()
+        f = open(p, "wb")
+        print >> f, auto_gen_msg
+        print >> f, '#include "asdl.h"\n'
+        c = ChainOfVisitors(TypeDefVisitor(f),
+                            StructVisitor(f),
+                            PrototypeVisitor(f),
+                            )
+        c.visit(mod)
+        print >>f, "PyObject* PyAST_mod2obj(mod_ty t);"
+        f.close()
 
     if SRC_DIR:
-        p = "%s/%s-ast.c" % (SRC_DIR, mod.name)
-    else:
-        p = "%s-ast.c" % mod.name
-    f = open(p, "wb")
-    print >> f, auto_gen_msg
-    print >> f, '#include "Python.h"'
-    print >> f, '#include "%s-ast.h"' % mod.name
-    print >> f
-    print >>f, "static PyTypeObject* AST_type;"
-    v = ChainOfVisitors(
-                        PyTypesDeclareVisitor(f),
-                        PyTypesVisitor(f),
-                        FunctionVisitor(f),
-                        ObjVisitor(f),
-                        ASTModuleVisitor(f),
-                        PartingShots(f),
-                        )
-    v.visit(mod)
-    f.close()
+        p = os.path.join(SRC_DIR, str(mod.name) + "-ast.c")
+        f = open(p, "wb")
+        print >> f, auto_gen_msg
+        print >> f, '#include "Python.h"'
+        print >> f, '#include "%s-ast.h"' % mod.name
+        print >> f
+        print >>f, "static PyTypeObject* AST_type;"
+        v = ChainOfVisitors(
+            PyTypesDeclareVisitor(f),
+            PyTypesVisitor(f),
+            FunctionVisitor(f),
+            ObjVisitor(f),
+            ASTModuleVisitor(f),
+            PartingShots(f),
+            )
+        v.visit(mod)
+        f.close()
 
 if __name__ == "__main__":
     import sys
@@ -757,6 +763,9 @@
     INC_DIR = ''
     SRC_DIR = ''
     opts, args = getopt.getopt(sys.argv[1:], "h:c:")
+    if len(opts) != 1:
+        print "Must specify exactly one output file"
+        sys.exit(1)
     for o, v in opts:
         if o == '-h':
             INC_DIR = v
@@ -764,4 +773,5 @@
             SRC_DIR = v
     if len(args) != 1:
         print "Must specify single input file"
+        sys.exit(1)
     main(args[0])
diff --git a/Parser/bitset.c b/Parser/bitset.c
index 3834e19..b5543b8 100644
--- a/Parser/bitset.c
+++ b/Parser/bitset.c
@@ -8,7 +8,7 @@
 newbitset(int nbits)
 {
 	int nbytes = NBYTES(nbits);
-	bitset ss = PyMem_NEW(BYTE, nbytes);
+	bitset ss = (char *)PyObject_MALLOC(sizeof(BYTE) *  nbytes);
 	
 	if (ss == NULL)
 		Py_FatalError("no mem for bitset");
@@ -22,7 +22,7 @@
 void
 delbitset(bitset ss)
 {
-	PyMem_DEL(ss);
+	PyObject_FREE(ss);
 }
 
 int
diff --git a/Parser/firstsets.c b/Parser/firstsets.c
index 0f4e09d..00467b3 100644
--- a/Parser/firstsets.c
+++ b/Parser/firstsets.c
@@ -59,7 +59,7 @@
 	nbits = g->g_ll.ll_nlabels;
 	result = newbitset(nbits);
 	
-	sym = PyMem_NEW(int, 1);
+	sym = (int *)PyObject_MALLOC(sizeof(int));
 	if (sym == NULL)
 		Py_FatalError("no mem for new sym in calcfirstset");
 	nsyms = 1;
@@ -73,7 +73,8 @@
 				break;
 		}
 		if (j >= nsyms) { /* New label */
-			PyMem_RESIZE(sym, int, nsyms + 1);
+			sym = (int *)PyObject_REALLOC(sym, 
+                                                sizeof(int) * (nsyms + 1));
 			if (sym == NULL)
 				Py_FatalError(
 				    "no mem to resize sym in calcfirstset");
@@ -108,5 +109,5 @@
 		printf(" }\n");
 	}
 
-	PyMem_FREE(sym);
+	PyObject_FREE(sym);
 }
diff --git a/Parser/grammar.c b/Parser/grammar.c
index d8e3897..b0dafe7 100644
--- a/Parser/grammar.c
+++ b/Parser/grammar.c
@@ -20,7 +20,7 @@
 {
 	grammar *g;
 	
-	g = PyMem_NEW(grammar, 1);
+	g = (grammar *)PyObject_MALLOC(sizeof(grammar));
 	if (g == NULL)
 		Py_FatalError("no mem for new grammar");
 	g->g_ndfas = 0;
@@ -37,7 +37,8 @@
 {
 	dfa *d;
 	
-	PyMem_RESIZE(g->g_dfa, dfa, g->g_ndfas + 1);
+	g->g_dfa = (dfa *)PyObject_REALLOC(g->g_dfa, 
+                                            sizeof(dfa) * (g->g_ndfas + 1));
 	if (g->g_dfa == NULL)
 		Py_FatalError("no mem to resize dfa in adddfa");
 	d = &g->g_dfa[g->g_ndfas++];
@@ -55,7 +56,8 @@
 {
 	state *s;
 	
-	PyMem_RESIZE(d->d_state, state, d->d_nstates + 1);
+	d->d_state = (state *)PyObject_REALLOC(d->d_state,
+				      sizeof(state) * (d->d_nstates + 1));
 	if (d->d_state == NULL)
 		Py_FatalError("no mem to resize state in addstate");
 	s = &d->d_state[d->d_nstates++];
@@ -78,7 +80,7 @@
 	assert(0 <= to && to < d->d_nstates);
 	
 	s = &d->d_state[from];
-	PyMem_RESIZE(s->s_arc, arc, s->s_narcs + 1);
+	s->s_arc = (arc *)PyObject_REALLOC(s->s_arc, sizeof(arc) * (s->s_narcs + 1));
 	if (s->s_arc == NULL)
 		Py_FatalError("no mem to resize arc list in addarc");
 	a = &s->s_arc[s->s_narcs++];
@@ -97,7 +99,8 @@
 			strcmp(ll->ll_label[i].lb_str, str) == 0)
 			return i;
 	}
-	PyMem_RESIZE(ll->ll_label, label, ll->ll_nlabels + 1);
+	ll->ll_label = (label *)PyObject_REALLOC(ll->ll_label,
+					sizeof(label) * (ll->ll_nlabels + 1));
 	if (ll->ll_label == NULL)
 		Py_FatalError("no mem to resize labellist in addlabel");
 	lb = &ll->ll_label[ll->ll_nlabels++];
@@ -195,7 +198,7 @@
 				name_len = p - src;
 			else
 				name_len = strlen(src);
-			dest = malloc(name_len + 1);
+			dest = (char *)malloc(name_len + 1);
 			strncpy(dest, src, name_len);
 			dest[name_len] = '\0';
 			free(lb->lb_str);
diff --git a/Parser/myreadline.c b/Parser/myreadline.c
index a932a87..32a1088 100644
--- a/Parser/myreadline.c
+++ b/Parser/myreadline.c
@@ -111,7 +111,7 @@
 	size_t n;
 	char *p;
 	n = 100;
-	if ((p = PyMem_MALLOC(n)) == NULL)
+	if ((p = (char *)PyMem_MALLOC(n)) == NULL)
 		return NULL;
 	fflush(sys_stdout);
 #ifndef RISCOS
@@ -141,7 +141,7 @@
 	n = strlen(p);
 	while (n > 0 && p[n-1] != '\n') {
 		size_t incr = n+2;
-		p = PyMem_REALLOC(p, n + incr);
+		p = (char *)PyMem_REALLOC(p, n + incr);
 		if (p == NULL)
 			return NULL;
 		if (incr > INT_MAX) {
@@ -151,7 +151,7 @@
 			break;
 		n += strlen(p+n);
 	}
-	return PyMem_REALLOC(p, n+1);
+	return (char *)PyMem_REALLOC(p, n+1);
 }
 
 
diff --git a/Parser/node.c b/Parser/node.c
index 7ed6c0e..97f887a 100644
--- a/Parser/node.c
+++ b/Parser/node.c
@@ -62,7 +62,7 @@
  * Win98).
  *
  * In a run of compileall across the 2.3a0 Lib directory, Andrew MacIntyre
- * reported that, with this scheme, 89% of PyMem_RESIZE calls in
+ * reported that, with this scheme, 89% of PyObject_REALLOC calls in
  * PyNode_AddChild passed 1 for the size, and 9% passed 4.  So this usually
  * wastes very little memory, but is very effective at sidestepping
  * platform-realloc disasters on vulnernable platforms.
diff --git a/Parser/parser.c b/Parser/parser.c
index 213410c..45302ed 100644
--- a/Parser/parser.c
+++ b/Parser/parser.c
@@ -75,7 +75,7 @@
 	
 	if (!g->g_accel)
 		PyGrammar_AddAccelerators(g);
-	ps = PyMem_NEW(parser_state, 1);
+	ps = (parser_state *)PyMem_MALLOC(sizeof(parser_state));
 	if (ps == NULL)
 		return NULL;
 	ps->p_grammar = g;
@@ -84,7 +84,7 @@
 #endif
 	ps->p_tree = PyNode_New(start);
 	if (ps->p_tree == NULL) {
-		PyMem_DEL(ps);
+		PyMem_FREE(ps);
 		return NULL;
 	}
 	s_reset(&ps->p_stack);
@@ -98,7 +98,7 @@
 	/* NB If you want to save the parse tree,
 	   you must set p_tree to NULL before calling delparser! */
 	PyNode_Free(ps->p_tree);
-	PyMem_DEL(ps);
+	PyMem_FREE(ps);
 }
 
 
diff --git a/Parser/pgen.c b/Parser/pgen.c
index e643d33..dfe7cac 100644
--- a/Parser/pgen.c
+++ b/Parser/pgen.c
@@ -49,7 +49,8 @@
 {
 	nfastate *st;
 	
-	PyMem_RESIZE(nf->nf_state, nfastate, nf->nf_nstates + 1);
+	nf->nf_state = (nfastate *)PyObject_REALLOC(nf->nf_state, 
+                                    sizeof(nfastate) * (nf->nf_nstates + 1));
 	if (nf->nf_state == NULL)
 		Py_FatalError("out of mem");
 	st = &nf->nf_state[nf->nf_nstates++];
@@ -65,7 +66,8 @@
 	nfaarc *ar;
 	
 	st = &nf->nf_state[from];
-	PyMem_RESIZE(st->st_arc, nfaarc, st->st_narcs + 1);
+	st->st_arc = (nfaarc *)PyObject_REALLOC(st->st_arc,
+				      sizeof(nfaarc) * (st->st_narcs + 1));
 	if (st->st_arc == NULL)
 		Py_FatalError("out of mem");
 	ar = &st->st_arc[st->st_narcs++];
@@ -79,7 +81,7 @@
 	nfa *nf;
 	static int type = NT_OFFSET; /* All types will be disjunct */
 	
-	nf = PyMem_NEW(nfa, 1);
+	nf = (nfa *)PyObject_MALLOC(sizeof(nfa));
 	if (nf == NULL)
 		Py_FatalError("no mem for new nfa");
 	nf->nf_type = type++;
@@ -104,7 +106,7 @@
 {
 	nfagrammar *gr;
 	
-	gr = PyMem_NEW(nfagrammar, 1);
+	gr = (nfagrammar *)PyObject_MALLOC(sizeof(nfagrammar));
 	if (gr == NULL)
 		Py_FatalError("no mem for new nfa grammar");
 	gr->gr_nnfas = 0;
@@ -121,7 +123,8 @@
 	nfa *nf;
 	
 	nf = newnfa(name);
-	PyMem_RESIZE(gr->gr_nfa, nfa *, gr->gr_nnfas + 1);
+	gr->gr_nfa = (nfa **)PyObject_REALLOC(gr->gr_nfa,
+				      sizeof(nfa) * (gr->gr_nnfas + 1));
 	if (gr->gr_nfa == NULL)
 		Py_FatalError("out of mem");
 	gr->gr_nfa[gr->gr_nnfas++] = nf;
@@ -361,7 +364,7 @@
 typedef struct _ss_state {
 	bitset	ss_ss;
 	int	ss_narcs;
-	ss_arc	*ss_arc;
+	struct _ss_arc	*ss_arc;
 	int	ss_deleted;
 	int	ss_finish;
 	int	ss_rename;
@@ -392,7 +395,7 @@
 	
 	ss = newbitset(nbits);
 	addclosure(ss, nf, nf->nf_start);
-	xx_state = PyMem_NEW(ss_state, 1);
+	xx_state = (ss_state *)PyObject_MALLOC(sizeof(ss_state));
 	if (xx_state == NULL)
 		Py_FatalError("no mem for xx_state in makedfa");
 	xx_nstates = 1;
@@ -411,6 +414,7 @@
 
 	/* For each unmarked state... */
 	for (istate = 0; istate < xx_nstates; ++istate) {
+		size_t size;
 		yy = &xx_state[istate];
 		ss = yy->ss_ss;
 		/* For all its states... */
@@ -430,8 +434,9 @@
 						goto found;
 				}
 				/* Add new arc for this state */
-				PyMem_RESIZE(yy->ss_arc, ss_arc,
-					     yy->ss_narcs + 1);
+				size = sizeof(ss_arc) * (yy->ss_narcs + 1);
+				yy->ss_arc = (ss_arc *)PyObject_REALLOC(
+                                                            yy->ss_arc, size);
 				if (yy->ss_arc == NULL)
 					Py_FatalError("out of mem");
 				zz = &yy->ss_arc[yy->ss_narcs++];
@@ -453,7 +458,9 @@
 					goto done;
 				}
 			}
-			PyMem_RESIZE(xx_state, ss_state, xx_nstates + 1);
+			size = sizeof(ss_state) * (xx_nstates + 1);
+			xx_state = (ss_state *)PyObject_REALLOC(xx_state, 
+                                                                    size);
 			if (xx_state == NULL)
 				Py_FatalError("out of mem");
 			zz->sa_arrow = xx_nstates;
diff --git a/Parser/pgenmain.c b/Parser/pgenmain.c
index 695e2b7..fc27a2c 100644
--- a/Parser/pgenmain.c
+++ b/Parser/pgenmain.c
@@ -104,7 +104,7 @@
 					putc(' ', stderr);
 			}
 			fprintf(stderr, "^\n");
-			PyMem_DEL(err.text);
+			PyObject_FREE(err.text);
 		}
 		Py_Exit(1);
 	}
@@ -136,7 +136,7 @@
 PyOS_Readline(FILE *sys_stdin, FILE *sys_stdout, char *prompt)
 {
 	size_t n = 1000;
-	char *p = PyMem_MALLOC(n);
+	char *p = (char *)PyMem_MALLOC(n);
 	char *q;
 	if (p == NULL)
 		return NULL;
@@ -149,7 +149,7 @@
 	n = strlen(p);
 	if (n > 0 && p[n-1] != '\n')
 		p[n-1] = '\n';
-	return PyMem_REALLOC(p, n+1);
+	return (char *)PyMem_REALLOC(p, n+1);
 }
 
 /* No-nonsense fgets */
diff --git a/Parser/tokenizer.c b/Parser/tokenizer.c
index 0631ca3..d9dcc41 100644
--- a/Parser/tokenizer.c
+++ b/Parser/tokenizer.c
@@ -105,7 +105,8 @@
 static struct tok_state *
 tok_new(void)
 {
-	struct tok_state *tok = PyMem_NEW(struct tok_state, 1);
+	struct tok_state *tok = (struct tok_state *)PyMem_MALLOC(
+                                                sizeof(struct tok_state));
 	if (tok == NULL)
 		return NULL;
 	tok->buf = tok->cur = tok->end = tok->inp = tok->start = NULL;
@@ -163,7 +164,7 @@
 {
 	tok->decoding_erred = 1;
 	if (tok->fp != NULL && tok->buf != NULL) /* see PyTokenizer_Free */
-		PyMem_DEL(tok->buf);
+		PyMem_FREE(tok->buf);
 	tok->buf = NULL;
 	return NULL;		/* as if it were EOF */
 }
@@ -171,7 +172,7 @@
 static char *
 new_string(const char *s, Py_ssize_t len)
 {
-	char* result = PyMem_NEW(char, len + 1);
+	char* result = (char *)PyMem_MALLOC(len + 1);
 	if (result != NULL) {
 		memcpy(result, s, len);
 		result[len] = '\0';
@@ -236,7 +237,7 @@
 				char* r = new_string(begin, t - begin);
 				char* q = get_normal_name(r);
 				if (r != q) {
-					PyMem_DEL(r);
+					PyMem_FREE(r);
 					r = new_string(q, strlen(q));
 				}
 				return r;
@@ -277,18 +278,18 @@
 					tok->decoding_state = -1;
 				}
 				else
-					PyMem_DEL(cs);
+					PyMem_FREE(cs);
 #else
                                 /* Without Unicode support, we cannot
                                    process the coding spec. Since there
                                    won't be any Unicode literals, that
                                    won't matter. */
-				PyMem_DEL(cs);
+				PyMem_FREE(cs);
 #endif
 			}
 		} else {	/* then, compare cs with BOM */
 			r = (strcmp(tok->encoding, cs) == 0);
-			PyMem_DEL(cs);
+			PyMem_FREE(cs);
 		}
 	}
 	if (!r) {
@@ -334,7 +335,7 @@
 		return 1;
 	}
 	if (tok->encoding != NULL)
-		PyMem_DEL(tok->encoding);
+		PyMem_FREE(tok->encoding);
 	tok->encoding = new_string("utf-8", 5);	/* resulting is in utf-8 */
 	return 1;
   NON_BOM:
@@ -345,7 +346,7 @@
 
 /* Read a line of text from TOK into S, using the stream in TOK.
    Return NULL on failure, else S.
-   
+
    On entry, tok->decoding_buffer will be one of:
      1) NULL: need to call tok->decoding_readline to get a new line
      2) PyUnicodeObject *: decoding_feof has called tok->decoding_readline and
@@ -354,7 +355,7 @@
            (in the s buffer) to copy entire contents of the line read
            by tok->decoding_readline.  tok->decoding_buffer has the overflow.
            In this case, fp_readl is called in a loop (with an expanded buffer)
-           until the buffer ends with a '\n' (or until the end of the file is 
+           until the buffer ends with a '\n' (or until the end of the file is
            reached): see tok_nextc and its calls to decoding_fgets.
 */
 
@@ -470,7 +471,7 @@
 			break;
 		} else if (tok->decoding_state > 0) {
 			/* We want a 'raw' read. */
-			line = Py_UniversalNewlineFgets(s, size, 
+			line = Py_UniversalNewlineFgets(s, size,
 							tok->fp, NULL);
 			break;
 		} else {
@@ -502,11 +503,11 @@
 		char buf[500];
 		/* Need to add 1 to the line number, since this line
 		   has not been counted, yet.  */
-		sprintf(buf, 
+		sprintf(buf,
 			"Non-ASCII character '\\x%.2x' "
 			"in file %.200s on line %i, "
 			"but no encoding declared; "
-			"see http://www.python.org/peps/pep-0263.html for details", 
+			"see http://www.python.org/peps/pep-0263.html for details",
 			badchar, tok->filename, tok->lineno + 1);
 		PyErr_SetString(PyExc_SyntaxError, buf);
 		return error_ret(tok);
@@ -537,13 +538,15 @@
 
 /* Fetch a byte from TOK, using the string buffer. */
 
-static int buf_getc(struct tok_state *tok) {
+static int
+buf_getc(struct tok_state *tok) {
 	return Py_CHARMASK(*tok->str++);
 }
 
 /* Unfetch a byte from TOK, using the string buffer. */
 
-static void buf_ungetc(int c, struct tok_state *tok) {
+static void
+buf_ungetc(int c, struct tok_state *tok) {
 	tok->str--;
 	assert(Py_CHARMASK(*tok->str) == c);	/* tok->cur may point to read-only segment */
 }
@@ -551,7 +554,8 @@
 /* Set the readline function for TOK to ENC. For the string-based
    tokenizer, this means to just record the encoding. */
 
-static int buf_setreadl(struct tok_state *tok, const char* enc) {
+static int
+buf_setreadl(struct tok_state *tok, const char* enc) {
 	tok->enc = enc;
 	return 1;
 }
@@ -653,7 +657,7 @@
 	struct tok_state *tok = tok_new();
 	if (tok == NULL)
 		return NULL;
-	if ((tok->buf = PyMem_NEW(char, BUFSIZ)) == NULL) {
+	if ((tok->buf = (char *)PyMem_MALLOC(BUFSIZ)) == NULL) {
 		PyTokenizer_Free(tok);
 		return NULL;
 	}
@@ -672,14 +676,14 @@
 PyTokenizer_Free(struct tok_state *tok)
 {
 	if (tok->encoding != NULL)
-		PyMem_DEL(tok->encoding);
+		PyMem_FREE(tok->encoding);
 #ifndef PGEN
 	Py_XDECREF(tok->decoding_readline);
 	Py_XDECREF(tok->decoding_buffer);
 #endif
 	if (tok->fp != NULL && tok->buf != NULL)
-		PyMem_DEL(tok->buf);
-	PyMem_DEL(tok);
+		PyMem_FREE(tok->buf);
+	PyMem_FREE(tok);
 }
 
 #if !defined(PGEN) && defined(Py_USING_UNICODE)
@@ -711,7 +715,9 @@
 	if (utf8 == NULL)
 		goto error_clear;
 
-	converted = new_string(PyString_AsString(utf8), PyString_Size(utf8));
+	assert(PyString_Check(utf8));
+	converted = new_string(PyString_AS_STRING(utf8),
+			       PyString_GET_SIZE(utf8));
 	Py_DECREF(utf8);
 	if (converted == NULL)
 		goto error_nomem;
@@ -719,7 +725,7 @@
 	PyMem_FREE(*inp);
 	*inp = converted;
 	if (tok->encoding != NULL)
-		PyMem_DEL(tok->encoding);
+		PyMem_FREE(tok->encoding);
 	tok->encoding = new_string(encoding, strlen(encoding));
 	if (tok->encoding == NULL)
 		goto error_nomem;
@@ -770,38 +776,38 @@
 			return Py_CHARMASK(*tok->cur++);
 		}
 		if (tok->prompt != NULL) {
-			char *new = PyOS_Readline(stdin, stdout, tok->prompt);
+			char *newtok = PyOS_Readline(stdin, stdout, tok->prompt);
 			if (tok->nextprompt != NULL)
 				tok->prompt = tok->nextprompt;
-			if (new == NULL)
+			if (newtok == NULL)
 				tok->done = E_INTR;
-			else if (*new == '\0') {
-				PyMem_FREE(new);
+			else if (*newtok == '\0') {
+				PyMem_FREE(newtok);
 				tok->done = E_EOF;
 			}
 #if !defined(PGEN) && defined(Py_USING_UNICODE)
-			else if (tok_stdin_decode(tok, &new) != 0)
-				PyMem_FREE(new);
+			else if (tok_stdin_decode(tok, &newtok) != 0)
+				PyMem_FREE(newtok);
 #endif
 			else if (tok->start != NULL) {
 				size_t start = tok->start - tok->buf;
 				size_t oldlen = tok->cur - tok->buf;
-				size_t newlen = oldlen + strlen(new);
+				size_t newlen = oldlen + strlen(newtok);
 				char *buf = tok->buf;
-				PyMem_RESIZE(buf, char, newlen+1);
+				buf = (char *)PyMem_REALLOC(buf, newlen+1);
 				tok->lineno++;
 				if (buf == NULL) {
-					PyMem_DEL(tok->buf);
+					PyMem_FREE(tok->buf);
 					tok->buf = NULL;
-					PyMem_FREE(new);
+					PyMem_FREE(newtok);
 					tok->done = E_NOMEM;
 					return EOF;
 				}
 				tok->buf = buf;
 				tok->cur = tok->buf + oldlen;
 				tok->line_start = tok->cur;
-				strcpy(tok->buf + oldlen, new);
-				PyMem_FREE(new);
+				strcpy(tok->buf + oldlen, newtok);
+				PyMem_FREE(newtok);
 				tok->inp = tok->buf + newlen;
 				tok->end = tok->inp + 1;
 				tok->start = tok->buf + start;
@@ -809,8 +815,8 @@
 			else {
 				tok->lineno++;
 				if (tok->buf != NULL)
-					PyMem_DEL(tok->buf);
-				tok->buf = new;
+					PyMem_FREE(tok->buf);
+				tok->buf = newtok;
 				tok->line_start = tok->buf;
 				tok->cur = tok->buf;
 				tok->line_start = tok->buf;
@@ -824,7 +830,8 @@
 			char *pt;
 			if (tok->start == NULL) {
 				if (tok->buf == NULL) {
-					tok->buf = PyMem_NEW(char, BUFSIZ);
+					tok->buf = (char *)
+						PyMem_MALLOC(BUFSIZ);
 					if (tok->buf == NULL) {
 						tok->done = E_NOMEM;
 						return EOF;
@@ -859,7 +866,8 @@
 				Py_ssize_t curvalid = tok->inp - tok->buf;
 				Py_ssize_t newsize = curvalid + BUFSIZ;
 				char *newbuf = tok->buf;
-				PyMem_RESIZE(newbuf, char, newsize);
+				newbuf = (char *)PyMem_REALLOC(newbuf,
+							       newsize);
 				if (newbuf == NULL) {
 					tok->done = E_NOMEM;
 					tok->cur = tok->inp;
@@ -1182,9 +1190,9 @@
 			}
 		}
 	}
-	
+
 	tok->start = tok->cur;
-	
+
 	/* Return pending indents/dedents */
 	if (tok->pendin != 0) {
 		if (tok->pendin < 0) {
@@ -1196,27 +1204,27 @@
 			return INDENT;
 		}
 	}
-	
+
  again:
 	tok->start = NULL;
 	/* Skip spaces */
 	do {
 		c = tok_nextc(tok);
 	} while (c == ' ' || c == '\t' || c == '\014');
-	
+
 	/* Set start of current token */
 	tok->start = tok->cur - 1;
-	
+
 	/* Skip comment */
 	if (c == '#')
 		while (c != EOF && c != '\n')
 			c = tok_nextc(tok);
-	
+
 	/* Check for EOF and errors now */
 	if (c == EOF) {
 		return tok->done == E_EOF ? ENDMARKER : ERRORTOKEN;
 	}
-	
+
 	/* Identifier (most frequent token!) */
 	if (isalpha(c) || c == '_') {
 		/* Process r"", u"" and ur"" */
@@ -1244,7 +1252,7 @@
 		*p_end = tok->cur;
 		return NAME;
 	}
-	
+
 	/* Newline */
 	if (c == '\n') {
 		tok->atbol = 1;
@@ -1255,7 +1263,7 @@
                 tok->cont_line = 0;
 		return NEWLINE;
 	}
-	
+
 	/* Period or number starting with period? */
 	if (c == '.') {
 		c = tok_nextc(tok);
@@ -1418,7 +1426,7 @@
 		*p_end = tok->cur;
 		return STRING;
 	}
-	
+
 	/* Line continuation */
 	if (c == '\\') {
 		c = tok_nextc(tok);
@@ -1430,7 +1438,7 @@
                 tok->cont_line = 1;
 		goto again; /* Read next line */
 	}
-	
+
 	/* Check for two-character token */
 	{
 		int c2 = tok_nextc(tok);
@@ -1449,7 +1457,7 @@
 		}
 		tok_backup(tok, c2);
 	}
-	
+
 	/* Keep track of parentheses nesting level */
 	switch (c) {
 	case '(':
@@ -1463,7 +1471,7 @@
 		tok->level--;
 		break;
 	}
-	
+
 	/* Punctuation character */
 	*p_start = tok->start;
 	*p_end = tok->cur;
diff --git a/Python/Python-ast.c b/Python/Python-ast.c
index 3f8345e..249f70e 100644
--- a/Python/Python-ast.c
+++ b/Python/Python-ast.c
@@ -331,6 +331,8 @@
         "type",
         "name",
         "body",
+        "lineno",
+        "col_offset",
 };
 static PyTypeObject *arguments_type;
 static PyObject* ast2obj_arguments(void*);
@@ -381,10 +383,10 @@
 
 static int add_attributes(PyTypeObject* type, char**attrs, int num_fields)
 {
-    int i;
+    int i, result;
     PyObject *s, *l = PyList_New(num_fields);
     if (!l) return 0;
-    for(i=0; i < num_fields; i++) {
+    for(i = 0; i < num_fields; i++) {
         s = PyString_FromString(attrs[i]);
         if (!s) {
             Py_DECREF(l);
@@ -392,7 +394,9 @@
         }
         PyList_SET_ITEM(l, i, s);
     }
-    return PyObject_SetAttrString((PyObject*)type, "_attributes", l) >=0;
+    result = PyObject_SetAttrString((PyObject*)type, "_attributes", l) >= 0;
+    Py_DECREF(l);
+    return result;
 }
 
 static PyObject* ast2obj_list(asdl_seq *seq, PyObject* (*func)(void*))
@@ -432,9 +436,9 @@
     return PyInt_FromLong(b);
 }
 
-static int initialized;
 static int init_types(void)
 {
+        static int initialized;
         if (initialized) return 1;
         AST_type = make_type("AST", &PyBaseObject_Type, NULL, 0);
         mod_type = make_type("mod", AST_type, NULL, 0);
@@ -710,7 +714,7 @@
                                        comprehension_fields, 3);
         if (!comprehension_type) return 0;
         excepthandler_type = make_type("excepthandler", AST_type,
-                                       excepthandler_fields, 3);
+                                       excepthandler_fields, 5);
         if (!excepthandler_type) return 0;
         arguments_type = make_type("arguments", AST_type, arguments_fields, 4);
         if (!arguments_type) return 0;
@@ -1499,8 +1503,8 @@
 }
 
 expr_ty
-Compare(expr_ty left, asdl_seq * ops, asdl_seq * comparators, int lineno, int
-        col_offset, PyArena *arena)
+Compare(expr_ty left, asdl_int_seq * ops, asdl_seq * comparators, int lineno,
+        int col_offset, PyArena *arena)
 {
         expr_ty p;
         if (!left) {
@@ -1841,7 +1845,8 @@
 }
 
 excepthandler_ty
-excepthandler(expr_ty type, expr_ty name, asdl_seq * body, PyArena *arena)
+excepthandler(expr_ty type, expr_ty name, asdl_seq * body, int lineno, int
+              col_offset, PyArena *arena)
 {
         excepthandler_ty p;
         p = (excepthandler_ty)PyArena_Malloc(arena, sizeof(*p));
@@ -1852,6 +1857,8 @@
         p->type = type;
         p->name = name;
         p->body = body;
+        p->lineno = lineno;
+        p->col_offset = col_offset;
         return p;
 }
 
@@ -2915,6 +2922,16 @@
         if (PyObject_SetAttrString(result, "body", value) == -1)
                 goto failed;
         Py_DECREF(value);
+        value = ast2obj_int(o->lineno);
+        if (!value) goto failed;
+        if (PyObject_SetAttrString(result, "lineno", value) == -1)
+                goto failed;
+        Py_DECREF(value);
+        value = ast2obj_int(o->col_offset);
+        if (!value) goto failed;
+        if (PyObject_SetAttrString(result, "col_offset", value) == -1)
+                goto failed;
+        Py_DECREF(value);
         return result;
 failed:
         Py_XDECREF(value);
@@ -3033,146 +3050,146 @@
                 return;
         if (PyModule_AddStringConstant(m, "__version__", "42753") < 0)
                 return;
-        if(PyDict_SetItemString(d, "mod", (PyObject*)mod_type) < 0) return;
-        if(PyDict_SetItemString(d, "Module", (PyObject*)Module_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Interactive", (PyObject*)Interactive_type)
-           < 0) return;
-        if(PyDict_SetItemString(d, "Expression", (PyObject*)Expression_type) <
-           0) return;
-        if(PyDict_SetItemString(d, "Suite", (PyObject*)Suite_type) < 0) return;
-        if(PyDict_SetItemString(d, "stmt", (PyObject*)stmt_type) < 0) return;
-        if(PyDict_SetItemString(d, "FunctionDef", (PyObject*)FunctionDef_type)
-           < 0) return;
-        if(PyDict_SetItemString(d, "ClassDef", (PyObject*)ClassDef_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Return", (PyObject*)Return_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Delete", (PyObject*)Delete_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Assign", (PyObject*)Assign_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "AugAssign", (PyObject*)AugAssign_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Print", (PyObject*)Print_type) < 0) return;
-        if(PyDict_SetItemString(d, "For", (PyObject*)For_type) < 0) return;
-        if(PyDict_SetItemString(d, "While", (PyObject*)While_type) < 0) return;
-        if(PyDict_SetItemString(d, "If", (PyObject*)If_type) < 0) return;
-        if(PyDict_SetItemString(d, "With", (PyObject*)With_type) < 0) return;
-        if(PyDict_SetItemString(d, "Raise", (PyObject*)Raise_type) < 0) return;
-        if(PyDict_SetItemString(d, "TryExcept", (PyObject*)TryExcept_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "TryFinally", (PyObject*)TryFinally_type) <
-           0) return;
-        if(PyDict_SetItemString(d, "Assert", (PyObject*)Assert_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Import", (PyObject*)Import_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "ImportFrom", (PyObject*)ImportFrom_type) <
-           0) return;
-        if(PyDict_SetItemString(d, "Exec", (PyObject*)Exec_type) < 0) return;
-        if(PyDict_SetItemString(d, "Global", (PyObject*)Global_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Expr", (PyObject*)Expr_type) < 0) return;
-        if(PyDict_SetItemString(d, "Pass", (PyObject*)Pass_type) < 0) return;
-        if(PyDict_SetItemString(d, "Break", (PyObject*)Break_type) < 0) return;
-        if(PyDict_SetItemString(d, "Continue", (PyObject*)Continue_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "expr", (PyObject*)expr_type) < 0) return;
-        if(PyDict_SetItemString(d, "BoolOp", (PyObject*)BoolOp_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "BinOp", (PyObject*)BinOp_type) < 0) return;
-        if(PyDict_SetItemString(d, "UnaryOp", (PyObject*)UnaryOp_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Lambda", (PyObject*)Lambda_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "IfExp", (PyObject*)IfExp_type) < 0) return;
-        if(PyDict_SetItemString(d, "Dict", (PyObject*)Dict_type) < 0) return;
-        if(PyDict_SetItemString(d, "ListComp", (PyObject*)ListComp_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "GeneratorExp",
-           (PyObject*)GeneratorExp_type) < 0) return;
-        if(PyDict_SetItemString(d, "Yield", (PyObject*)Yield_type) < 0) return;
-        if(PyDict_SetItemString(d, "Compare", (PyObject*)Compare_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Call", (PyObject*)Call_type) < 0) return;
-        if(PyDict_SetItemString(d, "Repr", (PyObject*)Repr_type) < 0) return;
-        if(PyDict_SetItemString(d, "Num", (PyObject*)Num_type) < 0) return;
-        if(PyDict_SetItemString(d, "Str", (PyObject*)Str_type) < 0) return;
-        if(PyDict_SetItemString(d, "Attribute", (PyObject*)Attribute_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Subscript", (PyObject*)Subscript_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Name", (PyObject*)Name_type) < 0) return;
-        if(PyDict_SetItemString(d, "List", (PyObject*)List_type) < 0) return;
-        if(PyDict_SetItemString(d, "Tuple", (PyObject*)Tuple_type) < 0) return;
-        if(PyDict_SetItemString(d, "expr_context",
-           (PyObject*)expr_context_type) < 0) return;
-        if(PyDict_SetItemString(d, "Load", (PyObject*)Load_type) < 0) return;
-        if(PyDict_SetItemString(d, "Store", (PyObject*)Store_type) < 0) return;
-        if(PyDict_SetItemString(d, "Del", (PyObject*)Del_type) < 0) return;
-        if(PyDict_SetItemString(d, "AugLoad", (PyObject*)AugLoad_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "AugStore", (PyObject*)AugStore_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Param", (PyObject*)Param_type) < 0) return;
-        if(PyDict_SetItemString(d, "slice", (PyObject*)slice_type) < 0) return;
-        if(PyDict_SetItemString(d, "Ellipsis", (PyObject*)Ellipsis_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Slice", (PyObject*)Slice_type) < 0) return;
-        if(PyDict_SetItemString(d, "ExtSlice", (PyObject*)ExtSlice_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Index", (PyObject*)Index_type) < 0) return;
-        if(PyDict_SetItemString(d, "boolop", (PyObject*)boolop_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "And", (PyObject*)And_type) < 0) return;
-        if(PyDict_SetItemString(d, "Or", (PyObject*)Or_type) < 0) return;
-        if(PyDict_SetItemString(d, "operator", (PyObject*)operator_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Add", (PyObject*)Add_type) < 0) return;
-        if(PyDict_SetItemString(d, "Sub", (PyObject*)Sub_type) < 0) return;
-        if(PyDict_SetItemString(d, "Mult", (PyObject*)Mult_type) < 0) return;
-        if(PyDict_SetItemString(d, "Div", (PyObject*)Div_type) < 0) return;
-        if(PyDict_SetItemString(d, "Mod", (PyObject*)Mod_type) < 0) return;
-        if(PyDict_SetItemString(d, "Pow", (PyObject*)Pow_type) < 0) return;
-        if(PyDict_SetItemString(d, "LShift", (PyObject*)LShift_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "RShift", (PyObject*)RShift_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "BitOr", (PyObject*)BitOr_type) < 0) return;
-        if(PyDict_SetItemString(d, "BitXor", (PyObject*)BitXor_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "BitAnd", (PyObject*)BitAnd_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "FloorDiv", (PyObject*)FloorDiv_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "unaryop", (PyObject*)unaryop_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Invert", (PyObject*)Invert_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "Not", (PyObject*)Not_type) < 0) return;
-        if(PyDict_SetItemString(d, "UAdd", (PyObject*)UAdd_type) < 0) return;
-        if(PyDict_SetItemString(d, "USub", (PyObject*)USub_type) < 0) return;
-        if(PyDict_SetItemString(d, "cmpop", (PyObject*)cmpop_type) < 0) return;
-        if(PyDict_SetItemString(d, "Eq", (PyObject*)Eq_type) < 0) return;
-        if(PyDict_SetItemString(d, "NotEq", (PyObject*)NotEq_type) < 0) return;
-        if(PyDict_SetItemString(d, "Lt", (PyObject*)Lt_type) < 0) return;
-        if(PyDict_SetItemString(d, "LtE", (PyObject*)LtE_type) < 0) return;
-        if(PyDict_SetItemString(d, "Gt", (PyObject*)Gt_type) < 0) return;
-        if(PyDict_SetItemString(d, "GtE", (PyObject*)GtE_type) < 0) return;
-        if(PyDict_SetItemString(d, "Is", (PyObject*)Is_type) < 0) return;
-        if(PyDict_SetItemString(d, "IsNot", (PyObject*)IsNot_type) < 0) return;
-        if(PyDict_SetItemString(d, "In", (PyObject*)In_type) < 0) return;
-        if(PyDict_SetItemString(d, "NotIn", (PyObject*)NotIn_type) < 0) return;
-        if(PyDict_SetItemString(d, "comprehension",
-           (PyObject*)comprehension_type) < 0) return;
-        if(PyDict_SetItemString(d, "excepthandler",
-           (PyObject*)excepthandler_type) < 0) return;
-        if(PyDict_SetItemString(d, "arguments", (PyObject*)arguments_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "keyword", (PyObject*)keyword_type) < 0)
-           return;
-        if(PyDict_SetItemString(d, "alias", (PyObject*)alias_type) < 0) return;
+        if (PyDict_SetItemString(d, "mod", (PyObject*)mod_type) < 0) return;
+        if (PyDict_SetItemString(d, "Module", (PyObject*)Module_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Interactive", (PyObject*)Interactive_type)
+            < 0) return;
+        if (PyDict_SetItemString(d, "Expression", (PyObject*)Expression_type) <
+            0) return;
+        if (PyDict_SetItemString(d, "Suite", (PyObject*)Suite_type) < 0) return;
+        if (PyDict_SetItemString(d, "stmt", (PyObject*)stmt_type) < 0) return;
+        if (PyDict_SetItemString(d, "FunctionDef", (PyObject*)FunctionDef_type)
+            < 0) return;
+        if (PyDict_SetItemString(d, "ClassDef", (PyObject*)ClassDef_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Return", (PyObject*)Return_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Delete", (PyObject*)Delete_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Assign", (PyObject*)Assign_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "AugAssign", (PyObject*)AugAssign_type) <
+            0) return;
+        if (PyDict_SetItemString(d, "Print", (PyObject*)Print_type) < 0) return;
+        if (PyDict_SetItemString(d, "For", (PyObject*)For_type) < 0) return;
+        if (PyDict_SetItemString(d, "While", (PyObject*)While_type) < 0) return;
+        if (PyDict_SetItemString(d, "If", (PyObject*)If_type) < 0) return;
+        if (PyDict_SetItemString(d, "With", (PyObject*)With_type) < 0) return;
+        if (PyDict_SetItemString(d, "Raise", (PyObject*)Raise_type) < 0) return;
+        if (PyDict_SetItemString(d, "TryExcept", (PyObject*)TryExcept_type) <
+            0) return;
+        if (PyDict_SetItemString(d, "TryFinally", (PyObject*)TryFinally_type) <
+            0) return;
+        if (PyDict_SetItemString(d, "Assert", (PyObject*)Assert_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Import", (PyObject*)Import_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "ImportFrom", (PyObject*)ImportFrom_type) <
+            0) return;
+        if (PyDict_SetItemString(d, "Exec", (PyObject*)Exec_type) < 0) return;
+        if (PyDict_SetItemString(d, "Global", (PyObject*)Global_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Expr", (PyObject*)Expr_type) < 0) return;
+        if (PyDict_SetItemString(d, "Pass", (PyObject*)Pass_type) < 0) return;
+        if (PyDict_SetItemString(d, "Break", (PyObject*)Break_type) < 0) return;
+        if (PyDict_SetItemString(d, "Continue", (PyObject*)Continue_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "expr", (PyObject*)expr_type) < 0) return;
+        if (PyDict_SetItemString(d, "BoolOp", (PyObject*)BoolOp_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "BinOp", (PyObject*)BinOp_type) < 0) return;
+        if (PyDict_SetItemString(d, "UnaryOp", (PyObject*)UnaryOp_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Lambda", (PyObject*)Lambda_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "IfExp", (PyObject*)IfExp_type) < 0) return;
+        if (PyDict_SetItemString(d, "Dict", (PyObject*)Dict_type) < 0) return;
+        if (PyDict_SetItemString(d, "ListComp", (PyObject*)ListComp_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "GeneratorExp",
+            (PyObject*)GeneratorExp_type) < 0) return;
+        if (PyDict_SetItemString(d, "Yield", (PyObject*)Yield_type) < 0) return;
+        if (PyDict_SetItemString(d, "Compare", (PyObject*)Compare_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Call", (PyObject*)Call_type) < 0) return;
+        if (PyDict_SetItemString(d, "Repr", (PyObject*)Repr_type) < 0) return;
+        if (PyDict_SetItemString(d, "Num", (PyObject*)Num_type) < 0) return;
+        if (PyDict_SetItemString(d, "Str", (PyObject*)Str_type) < 0) return;
+        if (PyDict_SetItemString(d, "Attribute", (PyObject*)Attribute_type) <
+            0) return;
+        if (PyDict_SetItemString(d, "Subscript", (PyObject*)Subscript_type) <
+            0) return;
+        if (PyDict_SetItemString(d, "Name", (PyObject*)Name_type) < 0) return;
+        if (PyDict_SetItemString(d, "List", (PyObject*)List_type) < 0) return;
+        if (PyDict_SetItemString(d, "Tuple", (PyObject*)Tuple_type) < 0) return;
+        if (PyDict_SetItemString(d, "expr_context",
+            (PyObject*)expr_context_type) < 0) return;
+        if (PyDict_SetItemString(d, "Load", (PyObject*)Load_type) < 0) return;
+        if (PyDict_SetItemString(d, "Store", (PyObject*)Store_type) < 0) return;
+        if (PyDict_SetItemString(d, "Del", (PyObject*)Del_type) < 0) return;
+        if (PyDict_SetItemString(d, "AugLoad", (PyObject*)AugLoad_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "AugStore", (PyObject*)AugStore_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Param", (PyObject*)Param_type) < 0) return;
+        if (PyDict_SetItemString(d, "slice", (PyObject*)slice_type) < 0) return;
+        if (PyDict_SetItemString(d, "Ellipsis", (PyObject*)Ellipsis_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Slice", (PyObject*)Slice_type) < 0) return;
+        if (PyDict_SetItemString(d, "ExtSlice", (PyObject*)ExtSlice_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Index", (PyObject*)Index_type) < 0) return;
+        if (PyDict_SetItemString(d, "boolop", (PyObject*)boolop_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "And", (PyObject*)And_type) < 0) return;
+        if (PyDict_SetItemString(d, "Or", (PyObject*)Or_type) < 0) return;
+        if (PyDict_SetItemString(d, "operator", (PyObject*)operator_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Add", (PyObject*)Add_type) < 0) return;
+        if (PyDict_SetItemString(d, "Sub", (PyObject*)Sub_type) < 0) return;
+        if (PyDict_SetItemString(d, "Mult", (PyObject*)Mult_type) < 0) return;
+        if (PyDict_SetItemString(d, "Div", (PyObject*)Div_type) < 0) return;
+        if (PyDict_SetItemString(d, "Mod", (PyObject*)Mod_type) < 0) return;
+        if (PyDict_SetItemString(d, "Pow", (PyObject*)Pow_type) < 0) return;
+        if (PyDict_SetItemString(d, "LShift", (PyObject*)LShift_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "RShift", (PyObject*)RShift_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "BitOr", (PyObject*)BitOr_type) < 0) return;
+        if (PyDict_SetItemString(d, "BitXor", (PyObject*)BitXor_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "BitAnd", (PyObject*)BitAnd_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "FloorDiv", (PyObject*)FloorDiv_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "unaryop", (PyObject*)unaryop_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Invert", (PyObject*)Invert_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "Not", (PyObject*)Not_type) < 0) return;
+        if (PyDict_SetItemString(d, "UAdd", (PyObject*)UAdd_type) < 0) return;
+        if (PyDict_SetItemString(d, "USub", (PyObject*)USub_type) < 0) return;
+        if (PyDict_SetItemString(d, "cmpop", (PyObject*)cmpop_type) < 0) return;
+        if (PyDict_SetItemString(d, "Eq", (PyObject*)Eq_type) < 0) return;
+        if (PyDict_SetItemString(d, "NotEq", (PyObject*)NotEq_type) < 0) return;
+        if (PyDict_SetItemString(d, "Lt", (PyObject*)Lt_type) < 0) return;
+        if (PyDict_SetItemString(d, "LtE", (PyObject*)LtE_type) < 0) return;
+        if (PyDict_SetItemString(d, "Gt", (PyObject*)Gt_type) < 0) return;
+        if (PyDict_SetItemString(d, "GtE", (PyObject*)GtE_type) < 0) return;
+        if (PyDict_SetItemString(d, "Is", (PyObject*)Is_type) < 0) return;
+        if (PyDict_SetItemString(d, "IsNot", (PyObject*)IsNot_type) < 0) return;
+        if (PyDict_SetItemString(d, "In", (PyObject*)In_type) < 0) return;
+        if (PyDict_SetItemString(d, "NotIn", (PyObject*)NotIn_type) < 0) return;
+        if (PyDict_SetItemString(d, "comprehension",
+            (PyObject*)comprehension_type) < 0) return;
+        if (PyDict_SetItemString(d, "excepthandler",
+            (PyObject*)excepthandler_type) < 0) return;
+        if (PyDict_SetItemString(d, "arguments", (PyObject*)arguments_type) <
+            0) return;
+        if (PyDict_SetItemString(d, "keyword", (PyObject*)keyword_type) < 0)
+            return;
+        if (PyDict_SetItemString(d, "alias", (PyObject*)alias_type) < 0) return;
 }
 
 
diff --git a/Python/asdl.c b/Python/asdl.c
index 225df6e..72329b9 100644
--- a/Python/asdl.c
+++ b/Python/asdl.c
@@ -8,7 +8,24 @@
 	size_t n = sizeof(asdl_seq) +
 			(size ? (sizeof(void *) * (size - 1)) : 0);
 
-    seq = (asdl_seq *)PyArena_Malloc(arena, n);
+	seq = (asdl_seq *)PyArena_Malloc(arena, n);
+	if (!seq) {
+		PyErr_NoMemory();
+		return NULL;
+	}
+	memset(seq, 0, n);
+	seq->size = size;
+	return seq;
+}
+
+asdl_int_seq *
+asdl_int_seq_new(int size, PyArena *arena)
+{
+	asdl_int_seq *seq = NULL;
+	size_t n = sizeof(asdl_seq) +
+			(size ? (sizeof(int) * (size - 1)) : 0);
+
+	seq = (asdl_int_seq *)PyArena_Malloc(arena, n);
 	if (!seq) {
 		PyErr_NoMemory();
 		return NULL;
diff --git a/Python/ast.c b/Python/ast.c
index bb1774b..0b3b485 100644
--- a/Python/ast.c
+++ b/Python/ast.c
@@ -31,7 +31,7 @@
 static expr_ty ast_for_expr(struct compiling *, const node *);
 static stmt_ty ast_for_stmt(struct compiling *, const node *);
 static asdl_seq *ast_for_suite(struct compiling *, const node *);
-static asdl_seq *ast_for_exprlist(struct compiling *, const node *, int);
+static asdl_seq *ast_for_exprlist(struct compiling *, const node *, expr_context_ty);
 static expr_ty ast_for_testlist(struct compiling *, const node *);
 static expr_ty ast_for_testlist_gexp(struct compiling *, const node *);
 
@@ -191,6 +191,10 @@
 
     if (flags && flags->cf_flags & PyCF_SOURCE_IS_UTF8) {
         c.c_encoding = "utf-8";
+        if (TYPE(n) == encoding_decl) {
+                ast_error(n, "encoding declaration in Unicode string");
+                goto error;
+        }
     } else if (TYPE(n) == encoding_decl) {
         c.c_encoding = STR(n);
         n = CHILD(n, 0);
@@ -243,7 +247,8 @@
                 stmts = asdl_seq_new(1, arena);
                 if (!stmts)
 		    goto error;
-                asdl_seq_SET(stmts, 0, Pass(n->n_lineno, n->n_col_offset, arena));
+                asdl_seq_SET(stmts, 0, Pass(n->n_lineno, n->n_col_offset,
+                                            arena));
                 return Interactive(stmts, arena);
             }
             else {
@@ -311,7 +316,7 @@
         case PERCENT:
             return Mod;
         default:
-            return 0;
+            return (operator_ty)0;
     }
 }
 
@@ -419,7 +424,7 @@
 	int i;
 
 	for (i = 0; i < asdl_seq_LEN(s); i++) {
-	    if (!set_context(asdl_seq_GET(s, i), ctx, n))
+	    if (!set_context((expr_ty)asdl_seq_GET(s, i), ctx, n))
 		return 0;
 	}
     }
@@ -460,7 +465,7 @@
                 return Mult;
         default:
             PyErr_Format(PyExc_SystemError, "invalid augassign: %s", STR(n));
-            return 0;
+            return (operator_ty)0;
     }
 }
 
@@ -494,7 +499,7 @@
             default:
                 PyErr_Format(PyExc_SystemError, "invalid comp_op: %s",
                              STR(n));
-                return 0;
+                return (cmpop_ty)0;
 	}
     }
     else if (NCH(n) == 2) {
@@ -508,12 +513,12 @@
             default:
                 PyErr_Format(PyExc_SystemError, "invalid comp_op: %s %s",
                              STR(CHILD(n, 0)), STR(CHILD(n, 1)));
-                return 0;
+                return (cmpop_ty)0;
 	}
     }
     PyErr_Format(PyExc_SystemError, "invalid comp_op: has %d children",
                  NCH(n));
-    return 0;
+    return (cmpop_ty)0;
 }
 
 static asdl_seq *
@@ -564,8 +569,8 @@
 	    		ast_error(child, "assignment to None");
 		    	return NULL;
 		    }   
-            arg = Name(NEW_IDENTIFIER(child), Store, LINENO(child), child->n_col_offset,
-                       c->c_arena);
+            arg = Name(NEW_IDENTIFIER(child), Store, LINENO(child),
+                       child->n_col_offset, c->c_arena);
 	    }
         else {
             arg = compiler_complex_args(c, CHILD(CHILD(n, 2*i), 1));
@@ -641,17 +646,25 @@
 		    goto error;
 		}
                 if (NCH(ch) == 3) {
-                    asdl_seq_SET(args, k++, 
-                                    compiler_complex_args(c, CHILD(ch, 1))); 
-		}
-                else if (TYPE(CHILD(ch, 0)) == NAME) {
+		    ch = CHILD(ch, 1);
+		    /* def foo((x)): is not complex, special case. */
+		    if (NCH(ch) != 1) {
+			/* We have complex arguments, setup for unpacking. */
+			asdl_seq_SET(args, k++, compiler_complex_args(c, ch));
+		    } else {
+			/* def foo((x)): setup for checking NAME below. */
+			ch = CHILD(ch, 0);
+		    }
+                }
+                if (TYPE(CHILD(ch, 0)) == NAME) {
 		    expr_ty name;
 		    if (!strcmp(STR(CHILD(ch, 0)), "None")) {
 			    ast_error(CHILD(ch, 0), "assignment to None");
 			    goto error;
 		    }
                     name = Name(NEW_IDENTIFIER(CHILD(ch, 0)),
-                                Param, LINENO(ch), ch->n_col_offset, c->c_arena);
+                                Param, LINENO(ch), ch->n_col_offset,
+                                c->c_arena);
                     if (!name)
                         goto error;
                     asdl_seq_SET(args, k++, name);
@@ -743,7 +756,8 @@
 	name_expr = NULL;
     }
     else if (NCH(n) == 5) { /* Call with no arguments */
-	d = Call(name_expr, NULL, NULL, NULL, NULL, LINENO(n), n->n_col_offset, c->c_arena);
+	d = Call(name_expr, NULL, NULL, NULL, NULL, LINENO(n),
+                 n->n_col_offset, c->c_arena);
 	if (!d)
 	    return NULL;
 	name_expr = NULL;
@@ -815,7 +829,8 @@
     if (!body)
 	return NULL;
 
-    return FunctionDef(name, args, body, decorator_seq, LINENO(n), n->n_col_offset, c->c_arena);
+    return FunctionDef(name, args, body, decorator_seq, LINENO(n),
+                       n->n_col_offset, c->c_arena);
 }
 
 static expr_ty
@@ -861,7 +876,8 @@
     orelse = ast_for_expr(c, CHILD(n, 4));
     if (!orelse)
 	return NULL;
-    return IfExp(expression, body, orelse, LINENO(n), n->n_col_offset, c->c_arena);
+    return IfExp(expression, body, orelse, LINENO(n), n->n_col_offset,
+                 c->c_arena);
 }
 
 /* Count the number of 'for' loop in a list comprehension.
@@ -969,10 +985,11 @@
             return NULL;
 
 	if (asdl_seq_LEN(t) == 1)
-	    lc = comprehension(asdl_seq_GET(t, 0), expression, NULL,
+	    lc = comprehension((expr_ty)asdl_seq_GET(t, 0), expression, NULL,
                                c->c_arena);
 	else
-	    lc = comprehension(Tuple(t, Store, LINENO(ch), ch->n_col_offset, c->c_arena),
+	    lc = comprehension(Tuple(t, Store, LINENO(ch), ch->n_col_offset,
+                                     c->c_arena),
                                expression, NULL, c->c_arena);
         if (!lc)
             return NULL;
@@ -1114,10 +1131,11 @@
             return NULL;
 
         if (asdl_seq_LEN(t) == 1)
-            ge = comprehension(asdl_seq_GET(t, 0), expression,
+            ge = comprehension((expr_ty)asdl_seq_GET(t, 0), expression,
                                NULL, c->c_arena);
         else
-            ge = comprehension(Tuple(t, Store, LINENO(ch), ch->n_col_offset, c->c_arena),
+            ge = comprehension(Tuple(t, Store, LINENO(ch), ch->n_col_offset,
+                                     c->c_arena),
                                expression, NULL, c->c_arena);
 
         if (!ge)
@@ -1317,16 +1335,20 @@
 
     ch = CHILD(n, NCH(n) - 1);
     if (TYPE(ch) == sliceop) {
-	if (NCH(ch) == 1)
-            /* XXX: If only 1 child, then should just be a colon.  Should we
-               just skip assigning and just get to the return? */
-	    ch = CHILD(ch, 0);
-	else
-	    ch = CHILD(ch, 1);
-	if (TYPE(ch) == test) {
-	    step = ast_for_expr(c, ch);
+        if (NCH(ch) == 1) {
+            /* No expression, so step is None */
+            ch = CHILD(ch, 0);
+            step = Name(new_identifier("None", c->c_arena), Load,
+                        LINENO(ch), ch->n_col_offset, c->c_arena);
             if (!step)
                 return NULL;
+        } else {
+            ch = CHILD(ch, 1);
+            if (TYPE(ch) == test) {
+                step = ast_for_expr(c, ch);
+                if (!step)
+                    return NULL;
+            }
         }
     }
 
@@ -1343,7 +1365,7 @@
 
 	int i, nops;
 	expr_ty expr1, expr2, result;
-        operator_ty operator;
+        operator_ty newoperator;
 
         expr1 = ast_for_expr(c, CHILD(n, 0));
         if (!expr1)
@@ -1353,11 +1375,12 @@
         if (!expr2)
             return NULL;
 
-        operator = get_operator(CHILD(n, 1));
-        if (!operator)
+        newoperator = get_operator(CHILD(n, 1));
+        if (!newoperator)
             return NULL;
 
-	result = BinOp(expr1, operator, expr2, LINENO(n), n->n_col_offset, c->c_arena);
+	result = BinOp(expr1, newoperator, expr2, LINENO(n), n->n_col_offset,
+                       c->c_arena);
 	if (!result)
             return NULL;
 
@@ -1366,16 +1389,17 @@
 		expr_ty tmp_result, tmp;
 		const node* next_oper = CHILD(n, i * 2 + 1);
 
-		operator = get_operator(next_oper);
-                if (!operator)
+		newoperator = get_operator(next_oper);
+                if (!newoperator)
                     return NULL;
 
                 tmp = ast_for_expr(c, CHILD(n, i * 2 + 2));
                 if (!tmp)
                     return NULL;
 
-                tmp_result = BinOp(result, operator, tmp, 
-				   LINENO(next_oper), next_oper->n_col_offset, c->c_arena);
+                tmp_result = BinOp(result, newoperator, tmp, 
+				   LINENO(next_oper), next_oper->n_col_offset,
+                                   c->c_arena);
 		if (!tmp) 
 			return NULL;
 		result = tmp_result;
@@ -1393,7 +1417,8 @@
     REQ(n, trailer);
     if (TYPE(CHILD(n, 0)) == LPAR) {
         if (NCH(n) == 2)
-            return Call(left_expr, NULL, NULL, NULL, NULL, LINENO(n), n->n_col_offset, c->c_arena);
+            return Call(left_expr, NULL, NULL, NULL, NULL, LINENO(n),
+                        n->n_col_offset, c->c_arena);
         else
             return ast_for_call(c, CHILD(n, 1), left_expr);
     }
@@ -1409,7 +1434,8 @@
             slice_ty slc = ast_for_slice(c, CHILD(n, 0));
             if (!slc)
                 return NULL;
-            return Subscript(left_expr, slc, Load, LINENO(n), n->n_col_offset, c->c_arena);
+            return Subscript(left_expr, slc, Load, LINENO(n), n->n_col_offset,
+                             c->c_arena);
         }
         else {
             /* The grammar is ambiguous here. The ambiguity is resolved 
@@ -1550,7 +1576,8 @@
                 asdl_seq_SET(seq, i / 2, e);
             }
             if (!strcmp(STR(CHILD(n, 1)), "and"))
-                return BoolOp(And, seq, LINENO(n), n->n_col_offset, c->c_arena);
+                return BoolOp(And, seq, LINENO(n), n->n_col_offset,
+                              c->c_arena);
             assert(!strcmp(STR(CHILD(n, 1)), "or"));
             return BoolOp(Or, seq, LINENO(n), n->n_col_offset, c->c_arena);
         case not_test:
@@ -1563,7 +1590,8 @@
                 if (!expression)
                     return NULL;
 
-                return UnaryOp(Not, expression, LINENO(n), n->n_col_offset, c->c_arena);
+                return UnaryOp(Not, expression, LINENO(n), n->n_col_offset,
+                               c->c_arena);
             }
         case comparison:
             if (NCH(n) == 1) {
@@ -1572,8 +1600,9 @@
             }
             else {
                 expr_ty expression;
-                asdl_seq *ops, *cmps;
-                ops = asdl_seq_new(NCH(n) / 2, c->c_arena);
+                asdl_int_seq *ops;
+		asdl_seq *cmps;
+                ops = asdl_int_seq_new(NCH(n) / 2, c->c_arena);
                 if (!ops)
                     return NULL;
                 cmps = asdl_seq_new(NCH(n) / 2, c->c_arena);
@@ -1581,11 +1610,10 @@
                     return NULL;
                 }
                 for (i = 1; i < NCH(n); i += 2) {
-                    /* XXX cmpop_ty is just an enum */
-                    cmpop_ty operator;
+                    cmpop_ty newoperator;
 
-                    operator = ast_for_comp_op(CHILD(n, i));
-                    if (!operator) {
+                    newoperator = ast_for_comp_op(CHILD(n, i));
+                    if (!newoperator) {
                         return NULL;
 		    }
 
@@ -1594,7 +1622,7 @@
                         return NULL;
 		    }
                         
-                    asdl_seq_SET(ops, i / 2, (void *)(Py_uintptr_t)operator);
+                    asdl_seq_SET(ops, i / 2, newoperator);
                     asdl_seq_SET(cmps, i / 2, expression);
                 }
                 expression = ast_for_expr(c, CHILD(n, 0));
@@ -1602,7 +1630,8 @@
                     return NULL;
 		}
                     
-                return Compare(expression, ops, cmps, LINENO(n), n->n_col_offset, c->c_arena);
+                return Compare(expression, ops, cmps, LINENO(n),
+                               n->n_col_offset, c->c_arena);
             }
             break;
 
@@ -1853,7 +1882,7 @@
     }
     else if (TYPE(CHILD(n, 1)) == augassign) {
         expr_ty expr1, expr2;
-        operator_ty operator;
+        operator_ty newoperator;
 	node *ch = CHILD(n, 0);
 
 	if (TYPE(ch) == testlist)
@@ -1895,11 +1924,11 @@
         if (!expr2)
             return NULL;
 
-        operator = ast_for_augassign(CHILD(n, 1));
-        if (!operator)
+        newoperator = ast_for_augassign(CHILD(n, 1));
+        if (!newoperator)
             return NULL;
 
-	return AugAssign(expr1, operator, expr2, LINENO(n), n->n_col_offset, c->c_arena);
+	return AugAssign(expr1, newoperator, expr2, LINENO(n), n->n_col_offset, c->c_arena);
     }
     else {
 	int i;
@@ -1973,7 +2002,7 @@
 }
 
 static asdl_seq *
-ast_for_exprlist(struct compiling *c, const node *n, int context)
+ast_for_exprlist(struct compiling *c, const node *n, expr_context_ty context)
 {
     asdl_seq *seq;
     int i;
@@ -2512,8 +2541,8 @@
 	    int off = 5 + (n_elif - i - 1) * 4;
             expr_ty expression;
             asdl_seq *suite_seq;
-	    asdl_seq *new = asdl_seq_new(1, c->c_arena);
-	    if (!new)
+	    asdl_seq *newobj = asdl_seq_new(1, c->c_arena);
+	    if (!newobj)
 		return NULL;
             expression = ast_for_expr(c, CHILD(n, off));
             if (!expression)
@@ -2522,10 +2551,10 @@
             if (!suite_seq)
                 return NULL;
 
-	    asdl_seq_SET(new, 0,
+	    asdl_seq_SET(newobj, 0,
 			 If(expression, suite_seq, orelse, 
 			    LINENO(CHILD(n, off)), CHILD(n, off)->n_col_offset, c->c_arena));
-	    orelse = new;
+	    orelse = newobj;
 	}
 	return If(ast_for_expr(c, CHILD(n, 1)),
 		  ast_for_suite(c, CHILD(n, 3)),
@@ -2597,7 +2626,7 @@
     if (!_target)
         return NULL;
     if (asdl_seq_LEN(_target) == 1)
-	target = asdl_seq_GET(_target, 0);
+	target = (expr_ty)asdl_seq_GET(_target, 0);
     else
 	target = Tuple(_target, Store, LINENO(n), n->n_col_offset, c->c_arena);
 
@@ -2608,7 +2637,8 @@
     if (!suite_seq)
         return NULL;
 
-    return For(target, expression, suite_seq, seq, LINENO(n), n->n_col_offset, c->c_arena);
+    return For(target, expression, suite_seq, seq, LINENO(n), n->n_col_offset,
+               c->c_arena);
 }
 
 static excepthandler_ty
@@ -2623,7 +2653,8 @@
         if (!suite_seq)
             return NULL;
 
-	return excepthandler(NULL, NULL, suite_seq, c->c_arena);
+	return excepthandler(NULL, NULL, suite_seq, LINENO(exc),
+                             exc->n_col_offset, c->c_arena);
     }
     else if (NCH(exc) == 2) {
         expr_ty expression;
@@ -2636,7 +2667,8 @@
         if (!suite_seq)
             return NULL;
 
-	return excepthandler(expression, NULL, suite_seq, c->c_arena);
+	return excepthandler(expression, NULL, suite_seq, LINENO(exc),
+                             exc->n_col_offset, c->c_arena);
     }
     else if (NCH(exc) == 4) {
         asdl_seq *suite_seq;
@@ -2653,7 +2685,8 @@
         if (!suite_seq)
             return NULL;
 
-	return excepthandler(expression, e, suite_seq, c->c_arena);
+	return excepthandler(expression, e, suite_seq, LINENO(exc),
+                             exc->n_col_offset, c->c_arena);
     }
 
     PyErr_Format(PyExc_SystemError,
@@ -2722,7 +2755,8 @@
             asdl_seq_SET(handlers, i, e);
         }
 
-	except_st = TryExcept(body, handlers, orelse, LINENO(n), n->n_col_offset, c->c_arena);
+	except_st = TryExcept(body, handlers, orelse, LINENO(n),
+                              n->n_col_offset, c->c_arena);
         if (!finally)
 	    return except_st;
 
@@ -2797,16 +2831,16 @@
         s = ast_for_suite(c, CHILD(n, 3));
         if (!s)
             return NULL;
-	return ClassDef(NEW_IDENTIFIER(CHILD(n, 1)), NULL, s, LINENO(n), n->n_col_offset,
-                        c->c_arena);
+	return ClassDef(NEW_IDENTIFIER(CHILD(n, 1)), NULL, s, LINENO(n),
+                        n->n_col_offset, c->c_arena);
     }
     /* check for empty base list */
     if (TYPE(CHILD(n,3)) == RPAR) {
 	s = ast_for_suite(c, CHILD(n,5));
 	if (!s)
 		return NULL;
-	return ClassDef(NEW_IDENTIFIER(CHILD(n, 1)), NULL, s, LINENO(n), n->n_col_offset,
-                        c->c_arena);
+	return ClassDef(NEW_IDENTIFIER(CHILD(n, 1)), NULL, s, LINENO(n),
+                        n->n_col_offset, c->c_arena);
     }
 
     /* else handle the base class list */
@@ -2817,8 +2851,8 @@
     s = ast_for_suite(c, CHILD(n, 6));
     if (!s)
         return NULL;
-    return ClassDef(NEW_IDENTIFIER(CHILD(n, 1)), bases, s, LINENO(n), n->n_col_offset,
-                    c->c_arena);
+    return ClassDef(NEW_IDENTIFIER(CHILD(n, 1)), bases, s, LINENO(n),
+                    n->n_col_offset, c->c_arena);
 }
 
 static stmt_ty
@@ -3090,7 +3124,8 @@
 #ifndef Py_USING_UNICODE
 			/* This should not happen - we never see any other
 			   encoding. */
-			Py_FatalError("cannot deal with encodings in this build.");
+			Py_FatalError(
+                            "cannot deal with encodings in this build.");
 #else
 			PyObject *v, *u = PyUnicode_DecodeUTF8(s, len, NULL);
 			if (u == NULL)
diff --git a/Python/bltinmodule.c b/Python/bltinmodule.c
index 342c2db..914e0d1 100644
--- a/Python/bltinmodule.c
+++ b/Python/bltinmodule.c
@@ -31,23 +31,25 @@
 static PyObject *filtertuple (PyObject *, PyObject *);
 
 static PyObject *
-builtin___import__(PyObject *self, PyObject *args)
+builtin___import__(PyObject *self, PyObject *args, PyObject *kwds)
 {
+	static char *kwlist[] = {"name", "globals", "locals", "fromlist",
+				 "level", 0};
 	char *name;
 	PyObject *globals = NULL;
 	PyObject *locals = NULL;
 	PyObject *fromlist = NULL;
 	int level = -1;
 
-	if (!PyArg_ParseTuple(args, "s|OOOi:__import__",
-			&name, &globals, &locals, &fromlist, &level))
+	if (!PyArg_ParseTupleAndKeywords(args, kwds, "s|OOOi:__import__",
+			kwlist, &name, &globals, &locals, &fromlist, &level))
 		return NULL;
 	return PyImport_ImportModuleLevel(name, globals, locals,
 					  fromlist, level);
 }
 
 PyDoc_STRVAR(import_doc,
-"__import__(name, globals, locals, fromlist) -> module\n\
+"__import__(name, globals={}, locals={}, fromlist=[], level=-1) -> module\n\
 \n\
 Import a module.  The globals are only used to determine the context;\n\
 they are not modified.  The locals are currently unused.  The fromlist\n\
@@ -55,7 +57,10 @@
 empty list to emulate ``import name''.\n\
 When importing a module from a package, note that __import__('A.B', ...)\n\
 returns package A when fromlist is empty, but its submodule B when\n\
-fromlist is not empty.");
+fromlist is not empty.  Level is used to determine whether to perform \n\
+absolute or relative imports.  -1 is the original strategy of attempting\n\
+both absolute and relative imports, 0 is absolute, a positive number\n\
+is the number of parent directories to search relative to the current module.");
 
 
 static PyObject *
@@ -1704,32 +1709,34 @@
 
 
 static PyObject *
-builtin_round(PyObject *self, PyObject *args)
+builtin_round(PyObject *self, PyObject *args, PyObject *kwds)
 {
-	double x;
+	double number;
 	double f;
 	int ndigits = 0;
 	int i;
+	static char *kwlist[] = {"number", "ndigits", 0};
 
-	if (!PyArg_ParseTuple(args, "d|i:round", &x, &ndigits))
-			return NULL;
+	if (!PyArg_ParseTupleAndKeywords(args, kwds, "d|i:round",
+                kwlist, &number, &ndigits))
+                return NULL;
 	f = 1.0;
 	i = abs(ndigits);
 	while  (--i >= 0)
 		f = f*10.0;
 	if (ndigits < 0)
-		x /= f;
+		number /= f;
 	else
-		x *= f;
-	if (x >= 0.0)
-		x = floor(x + 0.5);
+		number *= f;
+	if (number >= 0.0)
+		number = floor(number + 0.5);
 	else
-		x = ceil(x - 0.5);
+		number = ceil(number - 0.5);
 	if (ndigits < 0)
-		x *= f;
+		number *= f;
 	else
-		x /= f;
-	return PyFloat_FromDouble(x);
+		number /= f;
+	return PyFloat_FromDouble(number);
 }
 
 PyDoc_STRVAR(round_doc,
@@ -2042,7 +2049,7 @@
 
 
 static PyMethodDef builtin_methods[] = {
- 	{"__import__",	builtin___import__, METH_VARARGS, import_doc},
+ 	{"__import__",	(PyCFunction)builtin___import__, METH_VARARGS | METH_KEYWORDS, import_doc},
  	{"abs",		builtin_abs,        METH_O, abs_doc},
  	{"all",		builtin_all,        METH_O, all_doc},
  	{"any",		builtin_any,        METH_O, any_doc},
@@ -2079,7 +2086,7 @@
  	{"reduce",	builtin_reduce,     METH_VARARGS, reduce_doc},
  	{"reload",	builtin_reload,     METH_O, reload_doc},
  	{"repr",	builtin_repr,       METH_O, repr_doc},
- 	{"round",	builtin_round,      METH_VARARGS, round_doc},
+ 	{"round",	(PyCFunction)builtin_round,      METH_VARARGS | METH_KEYWORDS, round_doc},
  	{"setattr",	builtin_setattr,    METH_VARARGS, setattr_doc},
  	{"sorted",	(PyCFunction)builtin_sorted,     METH_VARARGS | METH_KEYWORDS, sorted_doc},
  	{"sum",		builtin_sum,        METH_VARARGS, sum_doc},
diff --git a/Python/ceval.c b/Python/ceval.c
index 6f76781..6c8afba 100644
--- a/Python/ceval.c
+++ b/Python/ceval.c
@@ -507,7 +507,7 @@
 }
 
 PyObject *
-PyEval_EvalFrameEx(PyFrameObject *f, int throw)
+PyEval_EvalFrameEx(PyFrameObject *f, int throwflag)
 {
 #ifdef DXPAIRS
 	int lastopcode = 0;
@@ -756,7 +756,7 @@
 	x = Py_None;	/* Not a reference, just anything non-NULL */
 	w = NULL;
 
-	if (throw) { /* support for generator.throw() */
+	if (throwflag) { /* support for generator.throw() */
 		why = WHY_EXCEPTION;
 		goto on_error;
 	}
@@ -2153,6 +2153,9 @@
 		case SETUP_LOOP:
 		case SETUP_EXCEPT:
 		case SETUP_FINALLY:
+			/* NOTE: If you add any new block-setup opcodes that are not try/except/finally
+			   handlers, you may need to update the PyGen_NeedsFinalizing() function. */
+
 			PyFrame_BlockSetup(f, opcode, INSTR_OFFSET() + oparg,
 					   STACK_LEVEL());
 			continue;
@@ -3180,132 +3183,29 @@
 		      PyFrameObject *frame, int *instr_lb, int *instr_ub,
 		      int *instr_prev)
 {
-	/* The theory of SET_LINENO-less tracing.
-
-	   In a nutshell, we use the co_lnotab field of the code object
-	   to tell when execution has moved onto a different line.
-
-	   As mentioned above, the basic idea is so set things up so
-	   that
-
-	         *instr_lb <= frame->f_lasti < *instr_ub
-
-	   is true so long as execution does not change lines.
-
-	   This is all fairly simple.  Digging the information out of
-	   co_lnotab takes some work, but is conceptually clear.
-
-	   Somewhat harder to explain is why we don't *always* call the
-	   line trace function when the above test fails.
-
-	   Consider this code:
-
-	   1: def f(a):
-	   2:     if a:
-	   3:        print 1
-	   4:     else:
-	   5:        print 2
-
-	   which compiles to this:
-
-	   2           0 LOAD_FAST                0 (a)
-		       3 JUMP_IF_FALSE            9 (to 15)
-		       6 POP_TOP
-
-	   3           7 LOAD_CONST               1 (1)
-		      10 PRINT_ITEM
-		      11 PRINT_NEWLINE
-		      12 JUMP_FORWARD             6 (to 21)
-		 >>   15 POP_TOP
-
-	   5          16 LOAD_CONST               2 (2)
-		      19 PRINT_ITEM
-		      20 PRINT_NEWLINE
-		 >>   21 LOAD_CONST               0 (None)
-		      24 RETURN_VALUE
-
-	   If 'a' is false, execution will jump to instruction at offset
-	   15 and the co_lnotab will claim that execution has moved to
-	   line 3.  This is at best misleading.  In this case we could
-	   associate the POP_TOP with line 4, but that doesn't make
-	   sense in all cases (I think).
-
-	   What we do is only call the line trace function if the co_lnotab
-	   indicates we have jumped to the *start* of a line, i.e. if the
-	   current instruction offset matches the offset given for the
-	   start of a line by the co_lnotab.
-
-	   This also takes care of the situation where 'a' is true.
-	   Execution will jump from instruction offset 12 to offset 21.
-	   Then the co_lnotab would imply that execution has moved to line
-	   5, which is again misleading.
-
-	   Why do we set f_lineno when tracing?  Well, consider the code
-	   above when 'a' is true.  If stepping through this with 'n' in
-	   pdb, you would stop at line 1 with a "call" type event, then
-	   line events on lines 2 and 3, then a "return" type event -- but
-	   you would be shown line 5 during this event.  This is a change
-	   from the behaviour in 2.2 and before, and I've found it
-	   confusing in practice.  By setting and using f_lineno when
-	   tracing, one can report a line number different from that
-	   suggested by f_lasti on this one occasion where it's desirable.
-	*/
-
 	int result = 0;
 
+        /* If the last instruction executed isn't in the current
+           instruction window, reset the window.  If the last
+           instruction happens to fall at the start of a line or if it
+           represents a jump backwards, call the trace function.
+        */
 	if ((frame->f_lasti < *instr_lb || frame->f_lasti >= *instr_ub)) {
-		PyCodeObject* co = frame->f_code;
-		int size, addr, line;
-		unsigned char* p;
+                int line;
+                PyAddrPair bounds;
 
-		size = PyString_GET_SIZE(co->co_lnotab) / 2;
-		p = (unsigned char*)PyString_AS_STRING(co->co_lnotab);
-
-		addr = 0;
-		line = co->co_firstlineno;
-
-		/* possible optimization: if f->f_lasti == instr_ub
-		   (likely to be a common case) then we already know
-		   instr_lb -- if we stored the matching value of p
-		   somwhere we could skip the first while loop. */
-
-		/* see comments in compile.c for the description of
-		   co_lnotab.  A point to remember: increments to p
-		   should come in pairs -- although we don't care about
-		   the line increments here, treating them as byte
-		   increments gets confusing, to say the least. */
-
-		while (size > 0) {
-			if (addr + *p > frame->f_lasti)
-				break;
-			addr += *p++;
-			if (*p) *instr_lb = addr;
-			line += *p++;
-			--size;
-		}
-
-		if (addr == frame->f_lasti) {
+                line = PyCode_CheckLineNumber(frame->f_code, frame->f_lasti,
+                                              &bounds);
+                if (line >= 0) {
 			frame->f_lineno = line;
 			result = call_trace(func, obj, frame,
 					    PyTrace_LINE, Py_None);
-		}
-
-		if (size > 0) {
-			while (--size >= 0) {
-				addr += *p++;
-				if (*p++)
-					break;
-			}
-			*instr_ub = addr;
-		}
-		else {
-			*instr_ub = INT_MAX;
-		}
+                }
+                *instr_lb = bounds.ap_lower;
+                *instr_ub = bounds.ap_upper;
 	}
 	else if (frame->f_lasti <= *instr_prev) {
-		/* jumping back in the same line forces a trace event */
-		result = call_trace(func, obj, frame,
-				    PyTrace_LINE, Py_None);
+		result = call_trace(func, obj, frame, PyTrace_LINE, Py_None);
 	}
 	*instr_prev = frame->f_lasti;
 	return result;
@@ -3623,9 +3523,9 @@
 		Py_DECREF(func);
 	}
 
-	/* Clear the stack of the function object and the arguments,
-	   in case they weren't consumed already.
-	   XXX(twouters) when are they not consumed already?
+	/* Clear the stack of the function object.  Also removes
+           the arguments in case they weren't consumed already
+           (fast_function() and err_args() leave them on the stack).
 	 */
 	while ((*pp_stack) > pfunc) {
 		w = EXT_POP(*pp_stack);
@@ -3899,7 +3799,7 @@
 	if (v != NULL) {
 		Py_ssize_t x;
 		if (PyInt_Check(v)) {
-			x = PyInt_AsLong(v);
+			x = PyInt_AsSsize_t(v);
 		} 
 		else if (v->ob_type->tp_as_number &&
 			 PyType_HasFeature(v->ob_type, Py_TPFLAGS_HAVE_INDEX)
@@ -4302,8 +4202,8 @@
 		/* Now we own the last reference to 'v', so we can resize it
 		 * in-place.
 		 */
-		int v_len = PyString_GET_SIZE(v);
-		int w_len = PyString_GET_SIZE(w);
+		Py_ssize_t v_len = PyString_GET_SIZE(v);
+		Py_ssize_t w_len = PyString_GET_SIZE(w);
 		if (_PyString_Resize(&v, v_len + w_len) != 0) {
 			/* XXX if _PyString_Resize() fails, 'v' has been
 			 * deallocated so it cannot be put back into 'variable'.
diff --git a/Python/codecs.c b/Python/codecs.c
index 0e8c374..2124824 100644
--- a/Python/codecs.c
+++ b/Python/codecs.c
@@ -56,12 +56,12 @@
     char *p;
     PyObject *v;
     
-	if (len > INT_MAX) {
-		PyErr_SetString(PyExc_OverflowError, "string is too large");
-		return NULL;
-	}
+    if (len > PY_SSIZE_T_MAX) {
+	PyErr_SetString(PyExc_OverflowError, "string is too large");
+	return NULL;
+    }
 	
-    v = PyString_FromStringAndSize(NULL, (int)len);
+    v = PyString_FromStringAndSize(NULL, len);
     if (v == NULL)
 	return NULL;
     p = PyString_AS_STRING(v);
@@ -200,24 +200,65 @@
     return args;
 }
 
-/* Build a codec by calling factory(stream[,errors]) or just
-   factory(errors) depending on whether the given parameters are
-   non-NULL. */
+/* Helper function to get a codec item */
 
 static
-PyObject *build_stream_codec(PyObject *factory,
-			     PyObject *stream,
-			     const char *errors)
+PyObject *codec_getitem(const char *encoding, int index)
 {
-    PyObject *args, *codec;
+    PyObject *codecs;
+    PyObject *v;
 
-    args = args_tuple(stream, errors);
-    if (args == NULL)
+    codecs = _PyCodec_Lookup(encoding);
+    if (codecs == NULL)
 	return NULL;
-    
-    codec = PyEval_CallObject(factory, args);
-    Py_DECREF(args);
-    return codec;
+    v = PyTuple_GET_ITEM(codecs, index);
+    Py_DECREF(codecs);
+    Py_INCREF(v);
+    return v;
+}
+
+/* Helper function to create an incremental codec. */
+
+static
+PyObject *codec_getincrementalcodec(const char *encoding,
+				    const char *errors,
+				    const char *attrname)
+{
+    PyObject *codecs, *ret, *inccodec;
+
+    codecs = _PyCodec_Lookup(encoding);
+    if (codecs == NULL)
+	return NULL;
+    inccodec = PyObject_GetAttrString(codecs, attrname);
+    Py_DECREF(codecs);
+    if (inccodec == NULL)
+	return NULL;
+    if (errors)
+	ret = PyObject_CallFunction(inccodec, "s", errors);
+    else
+	ret = PyObject_CallFunction(inccodec, NULL);
+    Py_DECREF(inccodec);
+    return ret;
+}
+
+/* Helper function to create a stream codec. */
+
+static
+PyObject *codec_getstreamcodec(const char *encoding,
+			       PyObject *stream,
+			       const char *errors,
+			       const int index)
+{
+    PyObject *codecs, *streamcodec;
+
+    codecs = _PyCodec_Lookup(encoding);
+    if (codecs == NULL)
+	return NULL;
+
+    streamcodec = PyEval_CallFunction(
+	PyTuple_GET_ITEM(codecs, index), "Os", stream, errors);
+    Py_DECREF(codecs);
+    return streamcodec;
 }
 
 /* Convenience APIs to query the Codec registry. 
@@ -228,120 +269,38 @@
 
 PyObject *PyCodec_Encoder(const char *encoding)
 {
-    PyObject *codecs;
-    PyObject *v;
-
-    codecs = _PyCodec_Lookup(encoding);
-    if (codecs == NULL)
-	goto onError;
-    v = PyTuple_GET_ITEM(codecs,0);
-    Py_DECREF(codecs);
-    Py_INCREF(v);
-    return v;
-
- onError:
-    return NULL;
+    return codec_getitem(encoding, 0);
 }
 
 PyObject *PyCodec_Decoder(const char *encoding)
 {
-    PyObject *codecs;
-    PyObject *v;
-
-    codecs = _PyCodec_Lookup(encoding);
-    if (codecs == NULL)
-	goto onError;
-    v = PyTuple_GET_ITEM(codecs,1);
-    Py_DECREF(codecs);
-    Py_INCREF(v);
-    return v;
-
- onError:
-    return NULL;
+    return codec_getitem(encoding, 1);
 }
 
 PyObject *PyCodec_IncrementalEncoder(const char *encoding,
 				     const char *errors)
 {
-    PyObject *codecs, *ret, *encoder;
-
-    codecs = _PyCodec_Lookup(encoding);
-    if (codecs == NULL)
-	goto onError;
-    encoder = PyObject_GetAttrString(codecs, "incrementalencoder");
-    if (encoder == NULL) {
-	Py_DECREF(codecs);
-	return NULL;
-    }
-    if (errors)
-	ret = PyObject_CallFunction(encoder, "O", errors);
-    else
-	ret = PyObject_CallFunction(encoder, NULL);
-    Py_DECREF(encoder);
-    Py_DECREF(codecs);
-    return ret;
-
- onError:
-    return NULL;
+    return codec_getincrementalcodec(encoding, errors, "incrementalencoder");
 }
 
 PyObject *PyCodec_IncrementalDecoder(const char *encoding,
 				     const char *errors)
 {
-    PyObject *codecs, *ret, *decoder;
-
-    codecs = _PyCodec_Lookup(encoding);
-    if (codecs == NULL)
-	goto onError;
-    decoder = PyObject_GetAttrString(codecs, "incrementaldecoder");
-    if (decoder == NULL) {
-	Py_DECREF(codecs);
-	return NULL;
-    }
-    if (errors)
-	ret = PyObject_CallFunction(decoder, "O", errors);
-    else
-	ret = PyObject_CallFunction(decoder, NULL);
-    Py_DECREF(decoder);
-    Py_DECREF(codecs);
-    return ret;
-
- onError:
-    return NULL;
+    return codec_getincrementalcodec(encoding, errors, "incrementaldecoder");
 }
 
 PyObject *PyCodec_StreamReader(const char *encoding,
 			       PyObject *stream,
 			       const char *errors)
 {
-    PyObject *codecs, *ret;
-
-    codecs = _PyCodec_Lookup(encoding);
-    if (codecs == NULL)
-	goto onError;
-    ret = build_stream_codec(PyTuple_GET_ITEM(codecs,2),stream,errors);
-    Py_DECREF(codecs);
-    return ret;
-
- onError:
-    return NULL;
+    return codec_getstreamcodec(encoding, stream, errors, 2);
 }
 
 PyObject *PyCodec_StreamWriter(const char *encoding,
 			       PyObject *stream,
 			       const char *errors)
 {
-    PyObject *codecs, *ret;
-
-    codecs = _PyCodec_Lookup(encoding);
-    if (codecs == NULL)
-	goto onError;
-    ret = build_stream_codec(PyTuple_GET_ITEM(codecs,3),stream,errors);
-    Py_DECREF(codecs);
-    return ret;
-
- onError:
-    return NULL;
+    return codec_getstreamcodec(encoding, stream, errors, 3);
 }
 
 /* Encode an object (e.g. an Unicode object) using the given encoding
diff --git a/Python/compile.c b/Python/compile.c
index b92fb62..12b190a 100644
--- a/Python/compile.c
+++ b/Python/compile.c
@@ -58,8 +58,9 @@
 };
 
 typedef struct basicblock_ {
-	/* next block in the list of blocks for a unit (don't confuse with
-	 * b_next) */
+    /* Each basicblock in a compilation unit is linked via b_list in the
+       reverse order that the block are allocated.  b_list points to the next
+       block, not to be confused with b_next, which is next by control flow. */
 	struct basicblock_ *b_list;
 	/* number of instructions used */
 	int b_iused;
@@ -114,7 +115,9 @@
 	PyObject *u_private;	/* for private name mangling */
 
 	int u_argcount;	   /* number of arguments for block */ 
-	basicblock *u_blocks; /* pointer to list of blocks */
+    /* Pointer to the most recently allocated block.  By following b_list
+       members, you can reach all early allocated blocks. */
+	basicblock *u_blocks;
 	basicblock *u_curblock; /* pointer to current block */
 	int u_tmpname;	   /* temporary variables for list comps */
 
@@ -194,19 +197,19 @@
 static PyObject *__doc__;
 
 PyObject *
-_Py_Mangle(PyObject *private, PyObject *ident)
+_Py_Mangle(PyObject *privateobj, PyObject *ident)
 {
 	/* Name mangling: __private becomes _classname__private.
 	   This is independent from how the name is used. */
 	const char *p, *name = PyString_AsString(ident);
 	char *buffer;
 	size_t nlen, plen;
-	if (private == NULL || name == NULL || name[0] != '_' ||
+	if (privateobj == NULL || name == NULL || name[0] != '_' ||
             name[1] != '_') {
 		Py_INCREF(ident);
 		return ident;
 	}
-	p = PyString_AsString(private);
+	p = PyString_AsString(privateobj);
 	nlen = strlen(name);
 	if (name[nlen-1] == '_' && name[nlen-2] == '_') {
 		Py_INCREF(ident);
@@ -311,7 +314,7 @@
 	if (c->c_st)
 		PySymtable_Free(c->c_st);
 	if (c->c_future)
-		PyMem_Free(c->c_future);
+		PyObject_Free(c->c_future);
 	Py_DECREF(c->c_stack);
 }
 
@@ -319,7 +322,9 @@
 list2dict(PyObject *list)
 {
 	Py_ssize_t i, n;
-	PyObject *v, *k, *dict = PyDict_New();
+	PyObject *v, *k;
+	PyObject *dict = PyDict_New();
+	if (!dict) return NULL;
 
 	n = PyList_Size(list);
 	for (i = 0; i < n; i++) {
@@ -602,7 +607,7 @@
 static unsigned int *
 markblocks(unsigned char *code, int len)
 {
-	unsigned int *blocks = PyMem_Malloc(len*sizeof(int));
+	unsigned int *blocks = (unsigned int *)PyMem_Malloc(len*sizeof(int));
 	int i,j, opcode, blockcnt = 0;
 
 	if (blocks == NULL)
@@ -683,10 +688,11 @@
 		goto exitUnchanged;
 
 	/* Make a modifiable copy of the code string */
-	codestr = PyMem_Malloc(codelen);
+	codestr = (unsigned char *)PyMem_Malloc(codelen);
 	if (codestr == NULL)
 		goto exitUnchanged;
-	codestr = memcpy(codestr, PyString_AS_STRING(code), codelen);
+	codestr = (unsigned char *)memcpy(codestr, 
+                                        PyString_AS_STRING(code), codelen);
 
 	/* Verify that RETURN_VALUE terminates the codestring.	This allows
 	   the various transformation patterns to look ahead several
@@ -697,7 +703,7 @@
 		goto exitUnchanged;
 
 	/* Mapping to new jump targets after NOPs are removed */
-	addrmap = PyMem_Malloc(codelen * sizeof(int));
+	addrmap = (int *)PyMem_Malloc(codelen * sizeof(int));
 	if (addrmap == NULL)
 		goto exitUnchanged;
 
@@ -1077,7 +1083,8 @@
 {
 	struct compiler_unit *u;
 
-	u = PyObject_Malloc(sizeof(struct compiler_unit));
+	u = (struct compiler_unit *)PyObject_Malloc(sizeof(
+                                                struct compiler_unit));
 	if (!u) {
 		PyErr_NoMemory();
 		return 0;
@@ -1187,7 +1194,7 @@
 		return NULL;
 	}
 	memset((void *)b, 0, sizeof(basicblock));
-	assert (b->b_next == NULL);
+    /* Extend the singly linked list of blocks with new block. */
 	b->b_list = u->u_blocks;
 	u->u_blocks = b;
 	return b;
@@ -1233,8 +1240,8 @@
 {
 	assert(b != NULL);
 	if (b->b_instr == NULL) {
-		b->b_instr = PyObject_Malloc(sizeof(struct instr) *
-					     DEFAULT_BLOCK_SIZE);
+		b->b_instr = (struct instr *)PyObject_Malloc(
+                                 sizeof(struct instr) * DEFAULT_BLOCK_SIZE);
 		if (b->b_instr == NULL) {
 			PyErr_NoMemory();
 			return -1;
@@ -1252,7 +1259,8 @@
 			return -1;
 		}
 		b->b_ialloc <<= 1;
-		b->b_instr = PyObject_Realloc((void *)b->b_instr, newsize);
+		b->b_instr = (struct instr *)PyObject_Realloc(
+                                                (void *)b->b_instr, newsize);
 		if (b->b_instr == NULL)
 			return -1;
 		memset((char *)b->b_instr + oldsize, 0, newsize - oldsize);
@@ -1260,6 +1268,13 @@
 	return b->b_iused++;
 }
 
+/* Set the i_lineno member of the instruction at offse off if the
+   line number for the current expression/statement (?) has not
+   already been set.  If it has been set, the call has no effect.
+
+   Every time a new node is b
+   */
+
 static void
 compiler_set_lineno(struct compiler *c, int off)
 {
@@ -1600,7 +1615,6 @@
 	off = compiler_next_instr(c, c->u->u_curblock);
 	if (off < 0)
 		return 0;
-	compiler_set_lineno(c, off);
 	i = &c->u->u_curblock->b_instr[off];
 	i->i_opcode = opcode;
 	i->i_target = b;
@@ -1609,6 +1623,7 @@
 		i->i_jabs = 1;
 	else
 		i->i_jrel = 1;
+	compiler_set_lineno(c, off);
 	return 1;
 }
 
@@ -1695,7 +1710,7 @@
 	int _i; \
 	asdl_seq *seq = (SEQ); /* avoid variable capture */ \
 	for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
-		TYPE ## _ty elt = asdl_seq_GET(seq, _i); \
+		TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
 		if (!compiler_visit_ ## TYPE((C), elt)) \
 			return 0; \
 	} \
@@ -1705,7 +1720,7 @@
 	int _i; \
 	asdl_seq *seq = (SEQ); /* avoid variable capture */ \
 	for (_i = 0; _i < asdl_seq_LEN(seq); _i++) { \
-		TYPE ## _ty elt = asdl_seq_GET(seq, _i); \
+		TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, _i); \
 		if (!compiler_visit_ ## TYPE((C), elt)) { \
 			compiler_exit_scope(c); \
 			return 0; \
@@ -1731,7 +1746,7 @@
 
 	if (!asdl_seq_LEN(stmts))
 		return 1;
-	st = asdl_seq_GET(stmts, 0);
+	st = (stmt_ty)asdl_seq_GET(stmts, 0);
 	if (compiler_isdocstring(st)) {
 		i = 1;
 		VISIT(c, expr, st->v.Expr.value);
@@ -1739,7 +1754,7 @@
 			return 0;
 	}
 	for (; i < asdl_seq_LEN(stmts); i++)
-	    VISIT(c, stmt, asdl_seq_GET(stmts, i));
+	    VISIT(c, stmt, (stmt_ty)asdl_seq_GET(stmts, i));
 	return 1;
 }
 
@@ -1765,7 +1780,8 @@
 		break;
 	case Interactive_kind:
 		c->c_interactive = 1;
-		VISIT_SEQ_IN_SCOPE(c, stmt, mod->v.Interactive.body);
+		VISIT_SEQ_IN_SCOPE(c, stmt, 
+                                        mod->v.Interactive.body);
 		break;
 	case Expression_kind:
 		VISIT_IN_SCOPE(c, expr, mod->v.Expression.body);
@@ -1882,7 +1898,7 @@
 		return 1;
 
 	for (i = 0; i < asdl_seq_LEN(decos); i++) {
-		VISIT(c, expr, asdl_seq_GET(decos, i));
+		VISIT(c, expr, (expr_ty)asdl_seq_GET(decos, i));
 	}
 	return 1;
 }
@@ -1894,7 +1910,7 @@
 	int n = asdl_seq_LEN(args->args);
 	/* Correctly handle nested argument lists */
 	for (i = 0; i < n; i++) {
-		expr_ty arg = asdl_seq_GET(args->args, i);
+		expr_ty arg = (expr_ty)asdl_seq_GET(args->args, i);
 		if (arg->kind == Tuple_kind) {
 			PyObject *id = PyString_FromFormat(".%d", i);
 			if (id == NULL) {
@@ -1931,7 +1947,7 @@
 				  s->lineno))
 		return 0;
 
-	st = asdl_seq_GET(s->v.FunctionDef.body, 0);
+	st = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, 0);
 	docstring = compiler_isdocstring(st);
 	if (docstring)
 	    first_const = st->v.Expr.value->v.Str.s;
@@ -1947,7 +1963,7 @@
 	n = asdl_seq_LEN(s->v.FunctionDef.body);
 	/* if there was a docstring, we need to skip the first statement */
 	for (i = docstring; i < n; i++) {
-		stmt_ty s2 = asdl_seq_GET(s->v.FunctionDef.body, i);
+		stmt_ty s2 = (stmt_ty)asdl_seq_GET(s->v.FunctionDef.body, i);
 		if (i == 0 && s2->kind == Expr_kind &&
 		    s2->v.Expr.value->kind == Str_kind)
 			continue;
@@ -2221,7 +2237,7 @@
 		ADDOP(c, POP_BLOCK);
 	}
 	compiler_pop_fblock(c, LOOP, loop);
-	if (orelse != NULL)
+	if (orelse != NULL) /* what if orelse is just pass? */
 		VISIT_SEQ(c, stmt, s->v.While.orelse);
 	compiler_use_next_block(c, end);
 
@@ -2375,10 +2391,12 @@
 	n = asdl_seq_LEN(s->v.TryExcept.handlers);
 	compiler_use_next_block(c, except);
 	for (i = 0; i < n; i++) {
-		excepthandler_ty handler = asdl_seq_GET(
+		excepthandler_ty handler = (excepthandler_ty)asdl_seq_GET(
 						s->v.TryExcept.handlers, i);
 		if (!handler->type && i < n-1)
 		    return compiler_error(c, "default 'except:' must be last");
+        c->u->u_lineno_set = false;
+        c->u->u_lineno = handler->lineno;
 		except = compiler_new_block(c);
 		if (except == NULL)
 			return 0;
@@ -2453,7 +2471,7 @@
 	int i, n = asdl_seq_LEN(s->v.Import.names);
 
 	for (i = 0; i < n; i++) {
-		alias_ty alias = asdl_seq_GET(s->v.Import.names, i);
+		alias_ty alias = (alias_ty)asdl_seq_GET(s->v.Import.names, i);
 		int r;
 		PyObject *level;
 
@@ -2508,7 +2526,7 @@
 
 	/* build up the names */
 	for (i = 0; i < n; i++) {
-		alias_ty alias = asdl_seq_GET(s->v.ImportFrom.names, i);
+		alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
 		Py_INCREF(alias->name);
 		PyTuple_SET_ITEM(names, i, alias->name);
 	}
@@ -2531,7 +2549,7 @@
 	Py_DECREF(names);
 	ADDOP_NAME(c, IMPORT_NAME, s->v.ImportFrom.module, names);
 	for (i = 0; i < n; i++) {
-		alias_ty alias = asdl_seq_GET(s->v.ImportFrom.names, i);
+		alias_ty alias = (alias_ty)asdl_seq_GET(s->v.ImportFrom.names, i);
 		identifier store_name;
 
 		if (i == 0 && *PyString_AS_STRING(alias->name) == '*') {
@@ -2592,8 +2610,10 @@
 {
 	int i, n;
 
+    /* Always assign a lineno to the next instruction for a stmt. */
 	c->u->u_lineno = s->lineno;
 	c->u->u_lineno_set = false;
+
 	switch (s->kind) {
 	case FunctionDef_kind:
 		return compiler_function(c, s);
@@ -2962,11 +2982,11 @@
 	s = e->v.BoolOp.values;
 	n = asdl_seq_LEN(s) - 1;
 	for (i = 0; i < n; ++i) {
-		VISIT(c, expr, asdl_seq_GET(s, i));
+		VISIT(c, expr, (expr_ty)asdl_seq_GET(s, i));
 		ADDOP_JREL(c, jumpi, end);
 		ADDOP(c, POP_TOP)
 	}
-	VISIT(c, expr, asdl_seq_GET(s, n));
+	VISIT(c, expr, (expr_ty)asdl_seq_GET(s, n));
 	compiler_use_next_block(c, end);
 	return 1;
 }
@@ -3013,24 +3033,25 @@
 		cleanup = compiler_new_block(c);
 		if (cleanup == NULL)
 		    return 0;
-		VISIT(c, expr, asdl_seq_GET(e->v.Compare.comparators, 0));
+		VISIT(c, expr, 
+                        (expr_ty)asdl_seq_GET(e->v.Compare.comparators, 0));
 	}
 	for (i = 1; i < n; i++) {
 		ADDOP(c, DUP_TOP);
 		ADDOP(c, ROT_THREE);
-		/* XXX We're casting a void* to cmpop_ty in the next stmt. */
 		ADDOP_I(c, COMPARE_OP,
-			cmpop((cmpop_ty)asdl_seq_GET(e->v.Compare.ops, i - 1)));
+			cmpop((cmpop_ty)(asdl_seq_GET(
+                                                  e->v.Compare.ops, i - 1))));
 		ADDOP_JREL(c, JUMP_IF_FALSE, cleanup);
 		NEXT_BLOCK(c);
 		ADDOP(c, POP_TOP);
 		if (i < (n - 1))
-		    VISIT(c, expr, asdl_seq_GET(e->v.Compare.comparators, i));
+		    VISIT(c, expr, 
+                            (expr_ty)asdl_seq_GET(e->v.Compare.comparators, i));
 	}
-	VISIT(c, expr, asdl_seq_GET(e->v.Compare.comparators, n - 1));
+	VISIT(c, expr, (expr_ty)asdl_seq_GET(e->v.Compare.comparators, n - 1));
 	ADDOP_I(c, COMPARE_OP,
-		/* XXX We're casting a void* to cmpop_ty in the next stmt. */
-	       cmpop((cmpop_ty)asdl_seq_GET(e->v.Compare.ops, n - 1)));
+	       cmpop((cmpop_ty)(asdl_seq_GET(e->v.Compare.ops, n - 1))));
 	if (n > 1) {
 		basicblock *end = compiler_new_block(c);
 		if (end == NULL)
@@ -3043,6 +3064,7 @@
 	}
 	return 1;
 }
+#undef CMPCAST
 
 static int
 compiler_call(struct compiler *c, expr_ty e)
@@ -3102,7 +3124,7 @@
 		anchor == NULL)
 	    return 0;
 
-	l = asdl_seq_GET(generators, gen_index);
+	l = (comprehension_ty)asdl_seq_GET(generators, gen_index);
 	VISIT(c, expr, l->iter);
 	ADDOP(c, GET_ITER);
 	compiler_use_next_block(c, start);
@@ -3113,7 +3135,7 @@
 	/* XXX this needs to be cleaned up...a lot! */
 	n = asdl_seq_LEN(l->ifs);
 	for (i = 0; i < n; i++) {
-		expr_ty e = asdl_seq_GET(l->ifs, i);
+		expr_ty e = (expr_ty)asdl_seq_GET(l->ifs, i);
 		VISIT(c, expr, e);
 		ADDOP_JREL(c, JUMP_IF_FALSE, if_cleanup);
 		NEXT_BLOCK(c);
@@ -3198,7 +3220,7 @@
 	    anchor == NULL || end == NULL)
 		return 0;
 
-	ge = asdl_seq_GET(generators, gen_index);
+	ge = (comprehension_ty)asdl_seq_GET(generators, gen_index);
 	ADDOP_JREL(c, SETUP_LOOP, end);
 	if (!compiler_push_fblock(c, LOOP, start))
 		return 0;
@@ -3221,7 +3243,7 @@
 	/* XXX this needs to be cleaned up...a lot! */
 	n = asdl_seq_LEN(ge->ifs);
 	for (i = 0; i < n; i++) {
-		expr_ty e = asdl_seq_GET(ge->ifs, i);
+		expr_ty e = (expr_ty)asdl_seq_GET(ge->ifs, i);
 		VISIT(c, expr, e);
 		ADDOP_JREL(c, JUMP_IF_FALSE, if_cleanup);
 		NEXT_BLOCK(c);
@@ -3462,6 +3484,9 @@
 {
 	int i, n;
 
+    /* If expr e has a different line number than the last expr/stmt,
+       set a new line number for the next instruction.
+       */
 	if (e->lineno > c->u->u_lineno) {
 		c->u->u_lineno = e->lineno;
 		c->u->u_lineno_set = false;
@@ -3490,9 +3515,11 @@
 		   It wants the stack to look like (value) (dict) (key) */
 		for (i = 0; i < n; i++) {
 			ADDOP(c, DUP_TOP);
-			VISIT(c, expr, asdl_seq_GET(e->v.Dict.values, i));
+			VISIT(c, expr, 
+                                (expr_ty)asdl_seq_GET(e->v.Dict.values, i));
 			ADDOP(c, ROT_TWO);
-			VISIT(c, expr, asdl_seq_GET(e->v.Dict.keys, i));
+			VISIT(c, expr, 
+                                (expr_ty)asdl_seq_GET(e->v.Dict.keys, i));
 			ADDOP(c, STORE_SUBSCR);
 		}
 		break;
@@ -3859,7 +3886,8 @@
 		if (ctx != AugStore) {
 			int i, n = asdl_seq_LEN(s->v.ExtSlice.dims);
 			for (i = 0; i < n; i++) {
-				slice_ty sub = asdl_seq_GET(s->v.ExtSlice.dims, i);
+				slice_ty sub = (slice_ty)asdl_seq_GET(
+                                        s->v.ExtSlice.dims, i);
 				if (!compiler_visit_nested_slice(c, sub, ctx))
 					return 0;
 			}
@@ -4048,7 +4076,7 @@
 {
 	int d_bytecode, d_lineno;
 	int len;
-	char *lnotab;
+	unsigned char *lnotab;
 
 	d_bytecode = a->a_offset - a->a_lineno_off;
 	d_lineno = i->i_lineno - a->a_lineno;
@@ -4071,7 +4099,8 @@
 			if (_PyString_Resize(&a->a_lnotab, len) < 0)
 				return 0;
 		}
-		lnotab = PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
+		lnotab = (unsigned char *)
+			   PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
 		for (j = 0; j < ncodes; j++) {
 			*lnotab++ = 255;
 			*lnotab++ = 0;
@@ -4092,7 +4121,8 @@
 			if (_PyString_Resize(&a->a_lnotab, len) < 0)
 				return 0;
 		}
-		lnotab = PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
+		lnotab = (unsigned char *)
+			   PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
 		*lnotab++ = 255;
 		*lnotab++ = d_bytecode;
 		d_bytecode = 0;
@@ -4109,7 +4139,8 @@
 		if (_PyString_Resize(&a->a_lnotab, len * 2) < 0)
 			return 0;
 	}
-	lnotab = PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
+	lnotab = (unsigned char *)
+			PyString_AS_STRING(a->a_lnotab) + a->a_lnotab_off;
 
 	a->a_lnotab_off += 2;
 	if (d_bytecode) {
diff --git a/Python/dynload_win.c b/Python/dynload_win.c
index 6e8f822..36746e2 100644
--- a/Python/dynload_win.c
+++ b/Python/dynload_win.c
@@ -11,10 +11,16 @@
 const struct filedescr _PyImport_DynLoadFiletab[] = {
 #ifdef _DEBUG
 	{"_d.pyd", "rb", C_EXTENSION},
+	/* Temporarily disable .dll, to avoid conflicts between sqlite3.dll
+	   and the sqlite3 package. If this needs to be reverted for 2.5,
+	   some other solution for the naming conflict must be found.
 	{"_d.dll", "rb", C_EXTENSION},
+	*/
 #else
 	{".pyd", "rb", C_EXTENSION},
+	/* Likewise
 	{".dll", "rb", C_EXTENSION},
+	*/
 #endif
 	{0, 0}
 };
diff --git a/Python/errors.c b/Python/errors.c
index 67f86ed..2ae062f 100644
--- a/Python/errors.c
+++ b/Python/errors.c
@@ -16,6 +16,11 @@
 
 #include <ctype.h>
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
+
 void
 PyErr_Restore(PyObject *type, PyObject *value, PyObject *traceback)
 {
@@ -609,6 +614,7 @@
 				PyFile_WriteString(": ", f);
 				PyFile_WriteObject(v, f, 0);
 			}
+			Py_XDECREF(moduleName);
 		}
 		PyFile_WriteString(" in ", f);
 		PyFile_WriteObject(obj, f, 0);
@@ -796,3 +802,8 @@
 	}
 	return NULL;
 }
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Python/exceptions.c b/Python/exceptions.c
index b146c97..5c824e6 100644
--- a/Python/exceptions.c
+++ b/Python/exceptions.c
@@ -14,6 +14,7 @@
  * Copyright (c) 1998-2000 by Secret Labs AB.  All rights reserved.
  */
 
+#define PY_SSIZE_T_CLEAN
 #include "Python.h"
 #include "osdefs.h"
 
@@ -893,7 +894,7 @@
 	    if (have_filename)
 		bufsize += PyString_GET_SIZE(filename);
 
-	    buffer = PyMem_MALLOC(bufsize);
+	    buffer = (char *)PyMem_MALLOC(bufsize);
 	    if (buffer != NULL) {
 		if (have_filename && have_lineno)
 		    PyOS_snprintf(buffer, bufsize, "%s (%s, line %ld)",
@@ -1450,8 +1451,8 @@
 	assert(length < INT_MAX);
 	assert(start < INT_MAX);
 	assert(end < INT_MAX);
-    return PyObject_CallFunction(PyExc_UnicodeDecodeError, "ss#iis",
-	encoding, object, (int)length, (int)start, (int)end, reason);
+    return PyObject_CallFunction(PyExc_UnicodeDecodeError, "ss#nns",
+	encoding, object, length, start, end, reason);
 }
 
 
@@ -1565,7 +1566,7 @@
 	const Py_UNICODE *object, Py_ssize_t length,
 	Py_ssize_t start, Py_ssize_t end, const char *reason)
 {
-    return PyObject_CallFunction(PyExc_UnicodeTranslateError, "u#iis",
+    return PyObject_CallFunction(PyExc_UnicodeTranslateError, "u#nns",
 	object, length, start, end, reason);
 }
 #endif
diff --git a/Python/future.c b/Python/future.c
index 1902f1d..d23fad6 100644
--- a/Python/future.c
+++ b/Python/future.c
@@ -19,7 +19,7 @@
 
 	names = s->v.ImportFrom.names;
 	for (i = 0; i < asdl_seq_LEN(names); i++) {
-                alias_ty name = asdl_seq_GET(names, i);
+                alias_ty name = (alias_ty)asdl_seq_GET(names, i);
 		const char *feature = PyString_AsString(name->name);
 		if (!feature)
 			return 0;
@@ -29,7 +29,7 @@
 			continue;
 		} else if (strcmp(feature, FUTURE_DIVISION) == 0) {
 			continue;
-		} else if (strcmp(feature, FUTURE_ABSIMPORT) == 0) {
+		} else if (strcmp(feature, FUTURE_ABSOLUTE_IMPORT) == 0) {
 			continue;
 		} else if (strcmp(feature, FUTURE_WITH_STATEMENT) == 0) {
 			continue;
@@ -73,7 +73,7 @@
 	   
 
 	for (i = 0; i < asdl_seq_LEN(mod->v.Module.body); i++) {
-		stmt_ty s = asdl_seq_GET(mod->v.Module.body, i);
+		stmt_ty s = (stmt_ty)asdl_seq_GET(mod->v.Module.body, i);
 
 		if (done && s->lineno > prev_line)
 			return 1;
@@ -120,14 +120,14 @@
 {
 	PyFutureFeatures *ff;
 
-	ff = (PyFutureFeatures *)PyMem_Malloc(sizeof(PyFutureFeatures));
+	ff = (PyFutureFeatures *)PyObject_Malloc(sizeof(PyFutureFeatures));
 	if (ff == NULL)
 		return NULL;
 	ff->ff_features = 0;
 	ff->ff_lineno = -1;
 
 	if (!future_parse(ff, mod, filename)) {
-		PyMem_Free((void *)ff);
+		PyObject_Free(ff);
 		return NULL;
 	}
 	return ff;
diff --git a/Python/getargs.c b/Python/getargs.c
index fac0b6f..b0715ba 100644
--- a/Python/getargs.c
+++ b/Python/getargs.c
@@ -6,6 +6,9 @@
 #include <ctype.h>
 
 
+#ifdef __cplusplus
+extern "C" { 
+#endif
 int PyArg_Parse(PyObject *, const char *, ...);
 int PyArg_ParseTuple(PyObject *, const char *, ...);
 int PyArg_VaParse(PyObject *, const char *, va_list);
@@ -15,6 +18,18 @@
 int PyArg_VaParseTupleAndKeywords(PyObject *, PyObject *,
 				const char *, char **, va_list);
 
+#ifdef HAVE_DECLSPEC_DLL
+/* Export functions */
+PyAPI_FUNC(int) _PyArg_Parse_SizeT(PyObject *, char *, ...);
+PyAPI_FUNC(int) _PyArg_ParseTuple_SizeT(PyObject *, char *, ...);
+PyAPI_FUNC(int) _PyArg_ParseTupleAndKeywords_SizeT(PyObject *, PyObject *,
+                                                  const char *, char **, ...);
+PyAPI_FUNC(PyObject *) _Py_BuildValue_SizeT(const char *, ...);
+PyAPI_FUNC(int) _PyArg_VaParse_SizeT(PyObject *, char *, va_list);
+PyAPI_FUNC(int) _PyArg_VaParseTupleAndKeywords_SizeT(PyObject *, PyObject *,
+                                              const char *, char **, va_list);
+#endif
+
 #define FLAG_COMPAT 1
 #define FLAG_SIZE_T 2
 
@@ -631,8 +646,8 @@
 		unsigned int ival;
 		if (float_argument_error(arg))
 			return converterr("integer<I>", arg, msgbuf, bufsize);
-		ival = PyInt_AsUnsignedLongMask(arg);
-		if (ival == -1 && PyErr_Occurred())
+		ival = (unsigned int)PyInt_AsUnsignedLongMask(arg);
+		if (ival == (unsigned int)-1 && PyErr_Occurred())
 			return converterr("integer<I>", arg, msgbuf, bufsize);
 		else
 			*p = ival;
@@ -645,10 +660,10 @@
 		Py_ssize_t *p = va_arg(*p_va, Py_ssize_t *);
 		Py_ssize_t ival;
 		if (float_argument_error(arg))
-			return converterr("integer<i>", arg, msgbuf, bufsize);
+			return converterr("integer<n>", arg, msgbuf, bufsize);
 		ival = PyInt_AsSsize_t(arg);
 		if (ival == -1 && PyErr_Occurred())
-			return converterr("integer<i>", arg, msgbuf, bufsize);
+			return converterr("integer<n>", arg, msgbuf, bufsize);
 		*p = ival;
 		break;
 	}
@@ -1040,11 +1055,8 @@
 				STORE_SIZE(PyUnicode_GET_SIZE(arg));
 			}
 			else {
-			char *buf;
-			Py_ssize_t count = convertbuffer(arg, p, &buf);
-			if (count < 0)
-				return converterr(buf, arg, msgbuf, bufsize);
-			STORE_SIZE(count/(sizeof(Py_UNICODE))); 
+				return converterr("cannot convert raw buffers",
+						  arg, msgbuf, bufsize);
 			}
 			format++;
 		} else {
@@ -1743,3 +1755,6 @@
 			funcname);
 	return 0;
 }
+#ifdef __cplusplus
+};
+#endif
diff --git a/Python/getmtime.c b/Python/getmtime.c
index f0ac899..54edb53 100644
--- a/Python/getmtime.c
+++ b/Python/getmtime.c
@@ -6,6 +6,10 @@
 #include "Python.h"
 #include "pyconfig.h"
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 time_t
 PyOS_GetLastModificationTime(char *path, FILE *fp)
 {
@@ -15,3 +19,8 @@
 	else
 		return st.st_mtime;
 }
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Python/getopt.c b/Python/getopt.c
index d80f607..5429fac5 100644
--- a/Python/getopt.c
+++ b/Python/getopt.c
@@ -27,6 +27,10 @@
 #include <stdio.h>
 #include <string.h>
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 int _PyOS_opterr = 1;          /* generate error messages */
 int _PyOS_optind = 1;          /* index into argv array   */
 char *_PyOS_optarg = NULL;     /* optional argument       */
@@ -81,3 +85,8 @@
 
 	return option;
 }
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Python/graminit.c b/Python/graminit.c
index 40f1770..a6ac124 100644
--- a/Python/graminit.c
+++ b/Python/graminit.c
@@ -1675,7 +1675,7 @@
 	{91, 1},
 };
 static arc arcs_77_1[1] = {
-	{26, 2},
+	{105, 2},
 };
 static arc arcs_77_2[2] = {
 	{162, 3},
@@ -1732,7 +1732,7 @@
 	{91, 1},
 };
 static arc arcs_80_1[1] = {
-	{26, 2},
+	{105, 2},
 };
 static arc arcs_80_2[2] = {
 	{164, 3},
diff --git a/Python/import.c b/Python/import.c
index b64594d..daae15f 100644
--- a/Python/import.c
+++ b/Python/import.c
@@ -17,6 +17,9 @@
 #ifdef HAVE_FCNTL_H
 #include <fcntl.h>
 #endif
+#ifdef __cplusplus
+extern "C" { 
+#endif
 
 extern time_t PyOS_GetLastModificationTime(char *, FILE *);
 						/* In getmtime.c */
@@ -40,6 +43,7 @@
        Python 1.5:   20121
        Python 1.5.1: 20121
        Python 1.5.2: 20121
+       Python 1.6:   50428
        Python 2.0:   50823
        Python 2.0.1: 50823
        Python 2.1:   60202
@@ -1217,12 +1221,12 @@
 #endif
 		if (!PyString_Check(v))
 			continue;
-		len = PyString_Size(v);
+		len = PyString_GET_SIZE(v);
 		if (len + 2 + namelen + MAXSUFFIXSIZE >= buflen) {
 			Py_XDECREF(copy);
 			continue; /* Too long */
 		}
-		strcpy(buf, PyString_AsString(v));
+		strcpy(buf, PyString_AS_STRING(v));
 		if (strlen(buf) != len) {
 			Py_XDECREF(copy);
 			continue; /* v contains '\0' */
@@ -1934,6 +1938,16 @@
 		}
 		tail = next;
 	}
+	if (tail == Py_None) {
+		/* If tail is Py_None, both get_parent and load_next found
+		   an empty module name: someone called __import__("") or
+		   doctored faulty bytecode */
+		Py_DECREF(tail);
+		Py_DECREF(head);
+		PyErr_SetString(PyExc_ValueError,
+				"Empty module name");
+		return NULL;
+	}
 
 	if (fromlist != NULL) {
 		if (fromlist == Py_None || !PyObject_IsTrue(fromlist))
@@ -2094,7 +2108,8 @@
 	PyObject *result;
 
 	if (strlen(name) == 0) {
-		/* empty module name only happens in 'from . import' */
+		/* completely empty module name should only happen in
+		   'from . import' (or '__import__("")')*/
 		Py_INCREF(mod);
 		*p_name = NULL;
 		return mod;
@@ -2936,3 +2951,7 @@
 
 	return PyImport_ExtendInittab(newtab);
 }
+
+#ifdef __cplusplus
+}
+#endif
diff --git a/Python/mactoolboxglue.c b/Python/mactoolboxglue.c
index 7f0627e..0aa2cfd 100644
--- a/Python/mactoolboxglue.c
+++ b/Python/mactoolboxglue.c
@@ -25,6 +25,7 @@
 
 #include "Python.h"
 #include "pymactoolbox.h"
+#include <arpa/inet.h>	/* for ntohl, htonl */
 
 
 /* Like strerror() but for Mac OS error numbers */
@@ -156,12 +157,14 @@
 int
 PyMac_GetOSType(PyObject *v, OSType *pr)
 {
+	uint32_t tmp;
 	if (!PyString_Check(v) || PyString_Size(v) != 4) {
 		PyErr_SetString(PyExc_TypeError,
 			"OSType arg must be string of 4 chars");
 		return 0;
 	}
-	memcpy((char *)pr, PyString_AsString(v), 4);
+	memcpy((char *)&tmp, PyString_AsString(v), 4);
+	*pr = (OSType)ntohl(tmp);
 	return 1;
 }
 
@@ -169,7 +172,8 @@
 PyObject *
 PyMac_BuildOSType(OSType t)
 {
-	return PyString_FromStringAndSize((char *)&t, 4);
+	uint32_t tmp = htonl((uint32_t)t);
+	return PyString_FromStringAndSize((char *)&tmp, 4);
 }
 
 /* Convert an NumVersion value to a 4-element tuple */
diff --git a/Python/modsupport.c b/Python/modsupport.c
index cb6bdfd..e291014 100644
--- a/Python/modsupport.c
+++ b/Python/modsupport.c
@@ -3,8 +3,11 @@
 
 #include "Python.h"
 
+#define FLAG_SIZE_T 1
 typedef double va_double;
 
+static PyObject *va_build_value(const char *, va_list, int);
+
 /* Package context -- the full module name for package imports */
 char *_Py_PackageContext = NULL;
 
@@ -146,14 +149,14 @@
 /* Generic function to create a value -- the inverse of getargs() */
 /* After an original idea and first implementation by Steven Miale */
 
-static PyObject *do_mktuple(const char**, va_list *, int, int);
-static PyObject *do_mklist(const char**, va_list *, int, int);
-static PyObject *do_mkdict(const char**, va_list *, int, int);
-static PyObject *do_mkvalue(const char**, va_list *);
+static PyObject *do_mktuple(const char**, va_list *, int, int, int);
+static PyObject *do_mklist(const char**, va_list *, int, int, int);
+static PyObject *do_mkdict(const char**, va_list *, int, int, int);
+static PyObject *do_mkvalue(const char**, va_list *, int);
 
 
 static PyObject *
-do_mkdict(const char **p_format, va_list *p_va, int endchar, int n)
+do_mkdict(const char **p_format, va_list *p_va, int endchar, int n, int flags)
 {
 	PyObject *d;
 	int i;
@@ -167,13 +170,13 @@
 	for (i = 0; i < n; i+= 2) {
 		PyObject *k, *v;
 		int err;
-		k = do_mkvalue(p_format, p_va);
+		k = do_mkvalue(p_format, p_va, flags);
 		if (k == NULL) {
 			itemfailed = 1;
 			Py_INCREF(Py_None);
 			k = Py_None;
 		}
-		v = do_mkvalue(p_format, p_va);
+		v = do_mkvalue(p_format, p_va, flags);
 		if (v == NULL) {
 			itemfailed = 1;
 			Py_INCREF(Py_None);
@@ -199,7 +202,7 @@
 }
 
 static PyObject *
-do_mklist(const char **p_format, va_list *p_va, int endchar, int n)
+do_mklist(const char **p_format, va_list *p_va, int endchar, int n, int flags)
 {
 	PyObject *v;
 	int i;
@@ -212,13 +215,13 @@
 	/* Note that we can't bail immediately on error as this will leak
 	   refcounts on any 'N' arguments. */
 	for (i = 0; i < n; i++) {
-		PyObject *w = do_mkvalue(p_format, p_va);
+		PyObject *w = do_mkvalue(p_format, p_va, flags);
 		if (w == NULL) {
 			itemfailed = 1;
 			Py_INCREF(Py_None);
 			w = Py_None;
 		}
-		PyList_SetItem(v, i, w);
+		PyList_SET_ITEM(v, i, w);
 	}
 
 	if (itemfailed) {
@@ -232,7 +235,6 @@
 				"Unmatched paren in format");
 		return NULL;
 	}
-
 	if (endchar)
 		++*p_format;
 	return v;
@@ -250,7 +252,7 @@
 #endif
 
 static PyObject *
-do_mktuple(const char **p_format, va_list *p_va, int endchar, int n)
+do_mktuple(const char **p_format, va_list *p_va, int endchar, int n, int flags)
 {
 	PyObject *v;
 	int i;
@@ -262,45 +264,46 @@
 	/* Note that we can't bail immediately on error as this will leak
 	   refcounts on any 'N' arguments. */
 	for (i = 0; i < n; i++) {
-		PyObject *w = do_mkvalue(p_format, p_va);
+		PyObject *w = do_mkvalue(p_format, p_va, flags);
 		if (w == NULL) {
 			itemfailed = 1;
 			Py_INCREF(Py_None);
 			w = Py_None;
 		}
-		PyTuple_SetItem(v, i, w);
+		PyTuple_SET_ITEM(v, i, w);
 	}
-	if (v != NULL && **p_format != endchar) {
+	if (itemfailed) {
+		/* do_mkvalue() should have already set an error */
 		Py_DECREF(v);
-		v = NULL;
+		return NULL;
+	}
+	if (**p_format != endchar) {
+		Py_DECREF(v);
 		PyErr_SetString(PyExc_SystemError,
 				"Unmatched paren in format");
+		return NULL;
 	}
-	else if (endchar)
+	if (endchar)
 		++*p_format;
-	if (itemfailed) {
-		Py_DECREF(v);
-		v = NULL;
-	}
 	return v;
 }
 
 static PyObject *
-do_mkvalue(const char **p_format, va_list *p_va)
+do_mkvalue(const char **p_format, va_list *p_va, int flags)
 {
 	for (;;) {
 		switch (*(*p_format)++) {
 		case '(':
 			return do_mktuple(p_format, p_va, ')',
-					  countformat(*p_format, ')'));
+					  countformat(*p_format, ')'), flags);
 
 		case '[':
 			return do_mklist(p_format, p_va, ']',
-					 countformat(*p_format, ']'));
+					 countformat(*p_format, ']'), flags);
 
 		case '{':
 			return do_mkdict(p_format, p_va, '}',
-					 countformat(*p_format, '}'));
+					 countformat(*p_format, '}'), flags);
 
 		case 'b':
 		case 'B':
@@ -351,10 +354,13 @@
 		{
 			PyObject *v;
 			Py_UNICODE *u = va_arg(*p_va, Py_UNICODE *);
-			int n;
+			Py_ssize_t n;	
 			if (**p_format == '#') {
 				++*p_format;
-				n = va_arg(*p_va, int);
+				if (flags & FLAG_SIZE_T)
+					n = va_arg(*p_va, Py_ssize_t);
+				else
+					n = va_arg(*p_va, int);
 			}
 			else
 				n = -1;
@@ -393,10 +399,13 @@
 		{
 			PyObject *v;
 			char *str = va_arg(*p_va, char *);
-			int n;
+			Py_ssize_t n;
 			if (**p_format == '#') {
 				++*p_format;
-				n = va_arg(*p_va, int);
+				if (flags & FLAG_SIZE_T)
+					n = va_arg(*p_va, Py_ssize_t);
+				else
+					n = va_arg(*p_va, int);
 			}
 			else
 				n = -1;
@@ -407,7 +416,7 @@
 			else {
 				if (n < 0) {
 					size_t m = strlen(str);
-					if (m > INT_MAX) {
+					if (m > PY_SSIZE_T_MAX) {
 						PyErr_SetString(PyExc_OverflowError,
 							"string too long for Python string");
 						return NULL;
@@ -472,7 +481,18 @@
 	va_list va;
 	PyObject* retval;
 	va_start(va, format);
-	retval = Py_VaBuildValue(format, va);
+	retval = va_build_value(format, va, 0);
+	va_end(va);
+	return retval;
+}
+
+PyObject *
+_Py_BuildValue_SizeT(const char *format, ...)
+{
+	va_list va;
+	PyObject* retval;
+	va_start(va, format);
+	retval = va_build_value(format, va, FLAG_SIZE_T);
 	va_end(va);
 	return retval;
 }
@@ -480,6 +500,18 @@
 PyObject *
 Py_VaBuildValue(const char *format, va_list va)
 {
+	return va_build_value(format, va, 0);
+}
+
+PyObject *
+_Py_VaBuildValue_SizeT(const char *format, va_list va)
+{
+	return va_build_value(format, va, FLAG_SIZE_T);
+}
+
+static PyObject *
+va_build_value(const char *format, va_list va, int flags)
+{
 	const char *f = format;
 	int n = countformat(f, '\0');
 	va_list lva;
@@ -501,8 +533,8 @@
 		return Py_None;
 	}
 	if (n == 1)
-		return do_mkvalue(&f, &lva);
-	return do_mktuple(&f, &lva, '\0', n);
+		return do_mkvalue(&f, &lva, flags);
+	return do_mktuple(&f, &lva, '\0', n, flags);
 }
 
 
diff --git a/Python/pyarena.c b/Python/pyarena.c
index 242ca1d..f11a905 100644
--- a/Python/pyarena.c
+++ b/Python/pyarena.c
@@ -6,9 +6,16 @@
    Measurements with standard library modules suggest the average
    allocation is about 20 bytes and that most compiles use a single
    block.
+
+   TODO(jhylton): Think about a realloc API, maybe just for the last
+   allocation?
 */
 
 #define DEFAULT_BLOCK_SIZE 8192
+#define ALIGNMENT		8
+#define ALIGNMENT_MASK		(ALIGNMENT - 1)
+#define ROUNDUP(x)		(((x) + ALIGNMENT_MASK) & ~ALIGNMENT_MASK)
+
 typedef struct _block {
 	/* Total number of bytes owned by this block available to pass out.
 	 * Read-only after initialization.  The first such byte starts at
@@ -39,9 +46,25 @@
 */
 
 struct _arena {
+        /* Pointer to the first block allocated for the arena, never NULL.
+           It is used only to find the first block when the arena is
+           being freed.
+         */
 	block *a_head;
+
+        /* Pointer to the block currently used for allocation.  It's
+           ab_next field should be NULL.  If it is not-null after a
+           call to block_alloc(), it means a new block has been allocated
+           and a_cur should be reset to point it.
+         */
 	block *a_cur;
+
+        /* A Python list object containing references to all the PyObject
+           pointers associated with this area.  They will be DECREFed
+           when the arena is freed.
+        */
         PyObject *a_objects;
+
 #if defined(Py_DEBUG)
         /* Debug output */
         size_t total_allocs;
@@ -63,7 +86,8 @@
 	b->ab_size = size;
 	b->ab_mem = (void *)(b + 1);
 	b->ab_next = NULL;
-	b->ab_offset = 0;
+	b->ab_offset = ROUNDUP((Py_uintptr_t)(b->ab_mem)) - 
+	  (Py_uintptr_t)(b->ab_mem);
 	return b;
 }
 
@@ -81,19 +105,20 @@
 {
 	void *p;
 	assert(b);
+	size = ROUNDUP(size);
 	if (b->ab_offset + size > b->ab_size) {
 		/* If we need to allocate more memory than will fit in
 		   the default block, allocate a one-off block that is
 		   exactly the right size. */
 		/* TODO(jhylton): Think about space waste at end of block */
-		block *new = block_new(
+		block *newbl = block_new(
 				size < DEFAULT_BLOCK_SIZE ?
 				DEFAULT_BLOCK_SIZE : size);
-		if (!new)
+		if (!newbl)
 			return NULL;
 		assert(!b->ab_next);
-		b->ab_next = new;
-		b = new;
+		b->ab_next = newbl;
+		b = newbl;
 	}
 
 	assert(b->ab_offset + size <= b->ab_size);
@@ -134,6 +159,7 @@
 void
 PyArena_Free(PyArena *arena)
 {
+        int r;
 	assert(arena);
 #if defined(Py_DEBUG)
         /*
@@ -145,7 +171,17 @@
         */
 #endif
 	block_free(arena->a_head);
+	/* This property normally holds, except when the code being compiled
+	   is sys.getobjects(0), in which case there will be two references.
         assert(arena->a_objects->ob_refcnt == 1);
+	*/
+
+        /* Clear all the elements from the list.  This is necessary
+           to guarantee that they will be DECREFed. */
+        r = PyList_SetSlice(arena->a_objects,
+                            0, PyList_GET_SIZE(arena->a_objects), NULL);
+        assert(r == 0);
+        assert(PyList_GET_SIZE(arena->a_objects) == 0);
         Py_DECREF(arena->a_objects);
 	free(arena);
 }
diff --git a/Python/pystate.c b/Python/pystate.c
index 867334e..b8f460f 100644
--- a/Python/pystate.c
+++ b/Python/pystate.c
@@ -23,13 +23,6 @@
 #endif
 
 
-#define ZAP(x) { \
-	PyObject *tmp = (PyObject *)(x); \
-	(x) = NULL; \
-	Py_XDECREF(tmp); \
-}
-
-
 #ifdef WITH_THREAD
 #include "pythread.h"
 static PyThread_type_lock head_mutex = NULL; /* Protects interp->tstate_head */
@@ -37,6 +30,10 @@
 #define HEAD_LOCK() PyThread_acquire_lock(head_mutex, WAIT_LOCK)
 #define HEAD_UNLOCK() PyThread_release_lock(head_mutex)
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 /* The single PyInterpreterState used by this process'
    GILState implementation
 */
@@ -102,12 +99,12 @@
 	for (p = interp->tstate_head; p != NULL; p = p->next)
 		PyThreadState_Clear(p);
 	HEAD_UNLOCK();
-	ZAP(interp->codec_search_path);
-	ZAP(interp->codec_search_cache);
-	ZAP(interp->codec_error_registry);
-	ZAP(interp->modules);
-	ZAP(interp->sysdict);
-	ZAP(interp->builtins);
+	Py_CLEAR(interp->codec_search_path);
+	Py_CLEAR(interp->codec_search_cache);
+	Py_CLEAR(interp->codec_error_registry);
+	Py_CLEAR(interp->modules);
+	Py_CLEAR(interp->sysdict);
+	Py_CLEAR(interp->builtins);
 }
 
 
@@ -211,23 +208,23 @@
 		fprintf(stderr,
 		  "PyThreadState_Clear: warning: thread still has a frame\n");
 
-	ZAP(tstate->frame);
+	Py_CLEAR(tstate->frame);
 
-	ZAP(tstate->dict);
-	ZAP(tstate->async_exc);
+	Py_CLEAR(tstate->dict);
+	Py_CLEAR(tstate->async_exc);
 
-	ZAP(tstate->curexc_type);
-	ZAP(tstate->curexc_value);
-	ZAP(tstate->curexc_traceback);
+	Py_CLEAR(tstate->curexc_type);
+	Py_CLEAR(tstate->curexc_value);
+	Py_CLEAR(tstate->curexc_traceback);
 
-	ZAP(tstate->exc_type);
-	ZAP(tstate->exc_value);
-	ZAP(tstate->exc_traceback);
+	Py_CLEAR(tstate->exc_type);
+	Py_CLEAR(tstate->exc_value);
+	Py_CLEAR(tstate->exc_traceback);
 
 	tstate->c_profilefunc = NULL;
 	tstate->c_tracefunc = NULL;
-	ZAP(tstate->c_profileobj);
-	ZAP(tstate->c_traceobj);
+	Py_CLEAR(tstate->c_profileobj);
+	Py_CLEAR(tstate->c_traceobj);
 }
 
 
@@ -297,23 +294,23 @@
 
 
 PyThreadState *
-PyThreadState_Swap(PyThreadState *new)
+PyThreadState_Swap(PyThreadState *newts)
 {
-	PyThreadState *old = _PyThreadState_Current;
+	PyThreadState *oldts = _PyThreadState_Current;
 
-	_PyThreadState_Current = new;
+	_PyThreadState_Current = newts;
 	/* It should not be possible for more than one thread state
 	   to be used for a thread.  Check this the best we can in debug
 	   builds.
 	*/
 #if defined(Py_DEBUG) && defined(WITH_THREAD)
-	if (new) {
+	if (newts) {
 		PyThreadState *check = PyGILState_GetThisThreadState();
-		if (check && check->interp == new->interp && check != new)
+		if (check && check->interp == newts->interp && check != newts)
 			Py_FatalError("Invalid thread state for this thread");
 	}
 #endif
-	return old;
+	return oldts;
 }
 
 /* An extension mechanism to store arbitrary additional per-thread state.
@@ -356,7 +353,7 @@
 	for (p = interp->tstate_head; p != NULL; p = p->next) {
 		if (p->thread_id != id)
 			continue;
-		ZAP(p->async_exc);
+		Py_CLEAR(p->async_exc);
 		Py_XINCREF(exc);
 		p->async_exc = exc;
 		count += 1;
@@ -491,7 +488,7 @@
 	   called Py_Initialize() and usually PyEval_InitThreads().
 	*/
 	assert(autoInterpreterState); /* Py_Initialize() hasn't been called! */
-	tcur = PyThread_get_key_value(autoTLSkey);
+	tcur = (PyThreadState *)PyThread_get_key_value(autoTLSkey);
 	if (tcur == NULL) {
 		/* Create a new thread state for this thread */
 		tcur = PyThreadState_New(autoInterpreterState);
@@ -518,7 +515,8 @@
 void
 PyGILState_Release(PyGILState_STATE oldstate)
 {
-	PyThreadState *tcur = PyThread_get_key_value(autoTLSkey);
+	PyThreadState *tcur = (PyThreadState *)PyThread_get_key_value(
+                                                                autoTLSkey);
 	if (tcur == NULL)
 		Py_FatalError("auto-releasing thread-state, "
 		              "but no thread-state for this thread");
@@ -551,4 +549,11 @@
 	else if (oldstate == PyGILState_UNLOCKED)
 		PyEval_SaveThread();
 }
+
+#ifdef __cplusplus
+}
+#endif
+
 #endif /* WITH_THREAD */
+
+
diff --git a/Python/pystrtod.c b/Python/pystrtod.c
index 83e792d..db4cad1 100644
--- a/Python/pystrtod.c
+++ b/Python/pystrtod.c
@@ -101,7 +101,7 @@
 		char *copy, *c;
 
 		/* We need to convert the '.' to the locale specific decimal point */
-		copy = malloc(end - nptr + 1 + decimal_point_len);
+		copy = (char *)malloc(end - nptr + 1 + decimal_point_len);
 
 		c = copy;
 		memcpy(c, nptr, decimal_point_pos - nptr);
diff --git a/Python/pythonrun.c b/Python/pythonrun.c
index 2c5400d..99d6d9d 100644
--- a/Python/pythonrun.c
+++ b/Python/pythonrun.c
@@ -30,14 +30,15 @@
 #endif
 
 #ifndef Py_REF_DEBUG
-#  define PRINT_TOTAL_REFS()
+#define PRINT_TOTAL_REFS()
 #else /* Py_REF_DEBUG */
-#  if defined(MS_WIN64)
-#    define PRINT_TOTAL_REFS() fprintf(stderr, "[%Id refs]\n", _Py_RefTotal);
-#  else /* ! MS_WIN64 */
-#    define PRINT_TOTAL_REFS() fprintf(stderr, "[%ld refs]\n", \
-			Py_SAFE_DOWNCAST(_Py_RefTotal, Py_ssize_t, long));
-#  endif /* MS_WIN64 */
+#define PRINT_TOTAL_REFS() fprintf(stderr,				\
+				   "[%" PY_FORMAT_SIZE_T "d refs]\n",	\
+				   _Py_GetRefTotal())
+#endif
+
+#ifdef __cplusplus
+extern "C" {
 #endif
 
 extern char *Py_GetPath(void);
@@ -280,6 +281,16 @@
 		}
 		Py_XDECREF(sys_isatty);
 
+		sys_stream = PySys_GetObject("stderr");
+		sys_isatty = PyObject_CallMethod(sys_stream, "isatty", "");
+		if (!sys_isatty)
+			PyErr_Clear();
+		if(sys_isatty && PyObject_IsTrue(sys_isatty)) {
+			if (!PyFile_SetEncoding(sys_stream, codeset))
+				Py_FatalError("Cannot set codeset of stderr");
+		}
+		Py_XDECREF(sys_isatty);
+
 		if (!Py_FileSystemDefaultEncoding)
 			Py_FileSystemDefaultEncoding = codeset;
 		else
@@ -296,7 +307,7 @@
 
 
 #ifdef COUNT_ALLOCS
-extern void dump_counts(void);
+extern void dump_counts(FILE*);
 #endif
 
 /* Undo the effect of Py_Initialize().
@@ -358,6 +369,13 @@
 	 * XXX I haven't seen a real-life report of either of these.
 	 */
 	PyGC_Collect();
+#ifdef COUNT_ALLOCS
+	/* With COUNT_ALLOCS, it helps to run GC multiple times:
+	   each collection might release some types from the type
+	   list, so they become garbage. */
+	while (PyGC_Collect() > 0)
+		/* nothing */;
+#endif
 
 	/* Destroy all modules */
 	PyImport_Cleanup();
@@ -386,10 +404,10 @@
 
 	/* Debugging stuff */
 #ifdef COUNT_ALLOCS
-	dump_counts();
+	dump_counts(stdout);
 #endif
 
-	PRINT_TOTAL_REFS()
+	PRINT_TOTAL_REFS();
 
 #ifdef Py_TRACE_REFS
 	/* Display all objects still alive -- this can invoke arbitrary
@@ -679,7 +697,7 @@
 	}
 	for (;;) {
 		ret = PyRun_InteractiveOneFlags(fp, filename, flags);
-		PRINT_TOTAL_REFS()
+		PRINT_TOTAL_REFS();
 		if (ret == E_EOF)
 			return 0;
 		/*
@@ -1451,7 +1469,7 @@
 	v = Py_BuildValue("(ziiz)", err->filename,
 			  err->lineno, err->offset, err->text);
 	if (err->text != NULL) {
-		PyMem_DEL(err->text);
+		PyObject_FREE(err->text);
 		err->text = NULL;
 	}
 	w = NULL;
@@ -1666,16 +1684,113 @@
 /* Deprecated C API functions still provided for binary compatiblity */
 
 #undef PyParser_SimpleParseFile
-#undef PyParser_SimpleParseString
-
-node *
+PyAPI_FUNC(node *)
 PyParser_SimpleParseFile(FILE *fp, const char *filename, int start)
 {
 	return PyParser_SimpleParseFileFlags(fp, filename, start, 0);
 }
 
-node *
+#undef PyParser_SimpleParseString
+PyAPI_FUNC(node *)
 PyParser_SimpleParseString(const char *str, int start)
 {
 	return PyParser_SimpleParseStringFlags(str, start, 0);
 }
+
+#undef PyRun_AnyFile
+PyAPI_FUNC(int)
+PyRun_AnyFile(FILE *fp, const char *name)
+{
+	return PyRun_AnyFileExFlags(fp, name, 0, NULL);
+}
+
+#undef PyRun_AnyFileEx
+PyAPI_FUNC(int)
+PyRun_AnyFileEx(FILE *fp, const char *name, int closeit)
+{
+	return PyRun_AnyFileExFlags(fp, name, closeit, NULL);
+}
+
+#undef PyRun_AnyFileFlags
+PyAPI_FUNC(int)
+PyRun_AnyFileFlags(FILE *fp, const char *name, PyCompilerFlags *flags)
+{
+	return PyRun_AnyFileExFlags(fp, name, 0, flags);
+}
+
+#undef PyRun_File
+PyAPI_FUNC(PyObject *)
+PyRun_File(FILE *fp, const char *p, int s, PyObject *g, PyObject *l)
+{
+        return PyRun_FileExFlags(fp, p, s, g, l, 0, NULL);
+}
+
+#undef PyRun_FileEx
+PyAPI_FUNC(PyObject *)
+PyRun_FileEx(FILE *fp, const char *p, int s, PyObject *g, PyObject *l, int c)
+{
+        return PyRun_FileExFlags(fp, p, s, g, l, c, NULL);
+}
+
+#undef PyRun_FileFlags
+PyAPI_FUNC(PyObject *)
+PyRun_FileFlags(FILE *fp, const char *p, int s, PyObject *g, PyObject *l,
+		PyCompilerFlags *flags)
+{
+        return PyRun_FileExFlags(fp, p, s, g, l, 0, flags);
+}
+
+#undef PyRun_SimpleFile
+PyAPI_FUNC(int)
+PyRun_SimpleFile(FILE *f, const char *p)
+{
+	return PyRun_SimpleFileExFlags(f, p, 0, NULL);
+}
+
+#undef PyRun_SimpleFileEx
+PyAPI_FUNC(int)
+PyRun_SimpleFileEx(FILE *f, const char *p, int c)
+{
+	return PyRun_SimpleFileExFlags(f, p, c, NULL);
+}
+
+
+#undef PyRun_String
+PyAPI_FUNC(PyObject *)
+PyRun_String(const char *str, int s, PyObject *g, PyObject *l)
+{
+	return PyRun_StringFlags(str, s, g, l, NULL);
+}
+
+#undef PyRun_SimpleString
+PyAPI_FUNC(int)
+PyRun_SimpleString(const char *s)
+{
+	return PyRun_SimpleStringFlags(s, NULL);
+}
+
+#undef Py_CompileString
+PyAPI_FUNC(PyObject *)
+Py_CompileString(const char *str, const char *p, int s)
+{
+	return Py_CompileStringFlags(str, p, s, NULL);
+}
+
+#undef PyRun_InteractiveOne
+PyAPI_FUNC(int)
+PyRun_InteractiveOne(FILE *f, const char *p)
+{
+	return PyRun_InteractiveOneFlags(f, p, NULL);
+}
+
+#undef PyRun_InteractiveLoop
+PyAPI_FUNC(int)
+PyRun_InteractiveLoop(FILE *f, const char *p)
+{
+	return PyRun_InteractiveLoopFlags(f, p, NULL);
+}
+
+#ifdef __cplusplus
+}
+#endif
+
diff --git a/Python/symtable.c b/Python/symtable.c
index c8eab58..184723d 100644
--- a/Python/symtable.c
+++ b/Python/symtable.c
@@ -227,7 +227,8 @@
 	case Module_kind:
 		seq = mod->v.Module.body;
 		for (i = 0; i < asdl_seq_LEN(seq); i++)
-			if (!symtable_visit_stmt(st, asdl_seq_GET(seq, i)))
+			if (!symtable_visit_stmt(st, 
+                                    (stmt_ty)asdl_seq_GET(seq, i)))
 				goto error;
 		break;
 	case Expression_kind:
@@ -237,7 +238,8 @@
 	case Interactive_kind:
 		seq = mod->v.Interactive.body;
 		for (i = 0; i < asdl_seq_LEN(seq); i++)
-			if (!symtable_visit_stmt(st, asdl_seq_GET(seq, i)))
+			if (!symtable_visit_stmt(st, 
+                                    (stmt_ty)asdl_seq_GET(seq, i)))
 				goto error;
 		break;
 	case Suite_kind:
@@ -506,7 +508,7 @@
 */
 static int
 update_symbols(PyObject *symbols, PyObject *scope, 
-               PyObject *bound, PyObject *free, int class)
+               PyObject *bound, PyObject *free, int classflag)
 {
 	PyObject *name, *v, *u, *w, *free_value = NULL;
 	Py_ssize_t pos = 0;
@@ -541,7 +543,7 @@
 			   the class that has the same name as a local
 			   or global in the class scope.
 			*/
-			if  (class && 
+			if  (classflag && 
 			     PyInt_AS_LONG(o) & (DEF_BOUND | DEF_GLOBAL)) {
 				long i = PyInt_AS_LONG(o) | DEF_FREE_CLASS;
 				o = PyInt_FromLong(i);
@@ -851,7 +853,7 @@
 	int i; \
 	asdl_seq *seq = (SEQ); /* avoid variable capture */ \
 	for (i = 0; i < asdl_seq_LEN(seq); i++) { \
-		TYPE ## _ty elt = asdl_seq_GET(seq, i); \
+		TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \
 		if (!symtable_visit_ ## TYPE((ST), elt)) \
 			return 0; \
 	} \
@@ -861,7 +863,7 @@
 	int i; \
 	asdl_seq *seq = (SEQ); /* avoid variable capture */ \
 	for (i = 0; i < asdl_seq_LEN(seq); i++) { \
-		TYPE ## _ty elt = asdl_seq_GET(seq, i); \
+		TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \
 		if (!symtable_visit_ ## TYPE((ST), elt)) { \
 			symtable_exit_block((ST), (S)); \
 			return 0; \
@@ -873,7 +875,7 @@
 	int i; \
 	asdl_seq *seq = (SEQ); /* avoid variable capture */ \
 	for (i = (START); i < asdl_seq_LEN(seq); i++) { \
-		TYPE ## _ty elt = asdl_seq_GET(seq, i); \
+		TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \
 		if (!symtable_visit_ ## TYPE((ST), elt)) \
 			return 0; \
 	} \
@@ -883,7 +885,7 @@
 	int i; \
 	asdl_seq *seq = (SEQ); /* avoid variable capture */ \
 	for (i = (START); i < asdl_seq_LEN(seq); i++) { \
-		TYPE ## _ty elt = asdl_seq_GET(seq, i); \
+		TYPE ## _ty elt = (TYPE ## _ty)asdl_seq_GET(seq, i); \
 		if (!symtable_visit_ ## TYPE((ST), elt)) { \
 			symtable_exit_block((ST), (S)); \
 			return 0; \
@@ -1036,7 +1038,7 @@
 		int i;
 		asdl_seq *seq = s->v.Global.names;
 		for (i = 0; i < asdl_seq_LEN(seq); i++) {
-			identifier name = asdl_seq_GET(seq, i);
+			identifier name = (identifier)asdl_seq_GET(seq, i);
 			char *c_name = PyString_AS_STRING(name);
 			long cur = symtable_lookup(st, name);
 			if (cur < 0)
@@ -1200,7 +1202,7 @@
 	
         /* go through all the toplevel arguments first */
 	for (i = 0; i < asdl_seq_LEN(args); i++) {
-		expr_ty arg = asdl_seq_GET(args, i);
+		expr_ty arg = (expr_ty)asdl_seq_GET(args, i);
 		if (arg->kind == Name_kind) {
 			assert(arg->v.Name.ctx == Param ||
                                (arg->v.Name.ctx == Store && !toplevel));
@@ -1236,7 +1238,7 @@
 {
 	int i;
 	for (i = 0; i < asdl_seq_LEN(args); i++) {
-		expr_ty arg = asdl_seq_GET(args, i);
+		expr_ty arg = (expr_ty)asdl_seq_GET(args, i);
 		if (arg->kind == Tuple_kind &&
 		    !symtable_visit_params(st, arg->v.Tuple.elts, 0))
 			return 0;
diff --git a/Python/sysmodule.c b/Python/sysmodule.c
index 875f73f..c6eb91f 100644
--- a/Python/sysmodule.c
+++ b/Python/sysmodule.c
@@ -600,10 +600,9 @@
 static PyObject *
 sys_gettotalrefcount(PyObject *self)
 {
-	return PyInt_FromSsize_t(_Py_RefTotal);
+	return PyInt_FromSsize_t(_Py_GetRefTotal());
 }
-
-#endif /* Py_TRACE_REFS */
+#endif /* Py_REF_DEBUG */
 
 PyDoc_STRVAR(getrefcount_doc,
 "getrefcount(object) -> integer\n\
@@ -697,6 +696,10 @@
 10. Number of stack pops performed by call_function()"
 );
 
+#ifdef __cplusplus
+extern "C" {
+#endif
+
 #ifdef Py_TRACE_REFS
 /* Defined in objects.c because it uses static globals if that file */
 extern PyObject *_Py_GetObjects(PyObject *, PyObject *);
@@ -707,6 +710,10 @@
 extern PyObject *_Py_GetDXProfile(PyObject *,  PyObject *);
 #endif
 
+#ifdef __cplusplus
+}
+#endif
+
 static PyMethodDef sys_methods[] = {
 	/* Might as well keep this in alphabetic order */
 	{"callstats", (PyCFunction)PyEval_GetCallStats, METH_NOARGS,
@@ -1065,6 +1072,11 @@
 		if (!PyFile_SetEncoding(sysout, buf))
 			return NULL;
 	}
+	if(isatty(_fileno(stderr))) {
+		sprintf(buf, "cp%d", GetConsoleOutputCP());
+		if (!PyFile_SetEncoding(syserr, buf))
+			return NULL;
+	}
 #endif
 
 	PyDict_SetItemString(sysdict, "stdin", sysin);
@@ -1406,7 +1418,7 @@
 			PyErr_Clear();
 			fputs(buffer, fp);
 		}
-		if (written < 0 || written >= sizeof(buffer)) {
+		if (written < 0 || (size_t)written >= sizeof(buffer)) {
 			const char *truncated = "... truncated";
 			if (PyFile_WriteString(truncated, file) != 0) {
 				PyErr_Clear();
diff --git a/Python/thread_pthread.h b/Python/thread_pthread.h
index 781467f..c29a61c 100644
--- a/Python/thread_pthread.h
+++ b/Python/thread_pthread.h
@@ -26,6 +26,16 @@
 #endif
 #endif
 
+/* Before FreeBSD 5.4, system scope threads was very limited resource
+   in default setting.  So the process scope is preferred to get
+   enough number of threads to work. */
+#ifdef __FreeBSD__
+#include <osreldate.h>
+#if __FreeBSD_version >= 500000 && __FreeBSD_version < 504101
+#undef PTHREAD_SYSTEM_SCHED_SUPPORTED
+#endif
+#endif
+
 #if !defined(pthread_attr_default)
 #  define pthread_attr_default ((pthread_attr_t *)NULL)
 #endif
@@ -138,7 +148,7 @@
 #ifdef THREAD_STACK_SIZE
 	pthread_attr_setstacksize(&attrs, THREAD_STACK_SIZE);
 #endif
-#if defined(PTHREAD_SYSTEM_SCHED_SUPPORTED) && !defined(__FreeBSD__)
+#if defined(PTHREAD_SYSTEM_SCHED_SUPPORTED)
         pthread_attr_setscope(&attrs, PTHREAD_SCOPE_SYSTEM);
 #endif
 
diff --git a/Python/traceback.c b/Python/traceback.c
index 6c11cf5..cfbd833 100644
--- a/Python/traceback.c
+++ b/Python/traceback.c
@@ -39,24 +39,16 @@
 static int
 tb_traverse(PyTracebackObject *tb, visitproc visit, void *arg)
 {
-	int err = 0;
-	if (tb->tb_next) {
-		err = visit((PyObject *)tb->tb_next, arg);
-		if (err)
-			return err;
-	}
-	if (tb->tb_frame) 
-		err = visit((PyObject *)tb->tb_frame, arg);
-	return err;
+	Py_VISIT(tb->tb_next);
+	Py_VISIT(tb->tb_frame);
+	return 0;
 }
 
 static void
 tb_clear(PyTracebackObject *tb)
 {
-	Py_XDECREF(tb->tb_next);
-	Py_XDECREF(tb->tb_frame);
-	tb->tb_next = NULL;
-	tb->tb_frame = NULL;
+	Py_CLEAR(tb->tb_next);
+	Py_CLEAR(tb->tb_frame);
 }
 
 PyTypeObject PyTraceBack_Type = {
@@ -165,7 +157,7 @@
 				}
 				if (PyString_Check(v)) {
 					size_t len;
-					len = PyString_Size(v);
+					len = PyString_GET_SIZE(v);
 					if (len + 1 + taillen >= MAXPATHLEN)
 						continue; /* Too long */
 					strcpy(namebuf, PyString_AsString(v));
diff --git a/README b/README
index 919af4c..1da5430 100644
--- a/README
+++ b/README
@@ -89,7 +89,7 @@
 authors, translators, and people with special formatting requirements.
 
 Unfortunately, new-style classes (new in Python 2.2) have not yet been
-integrated into Python's standard documention.  A collection of
+integrated into Python's standard documentation.  A collection of
 pointers to what has been written is at:
 
     http://www.python.org/doc/newstyle.html
@@ -114,7 +114,7 @@
 mailing lists: see http://www.python.org/community/lists.html for an
 overview of these and many other Python-related mailing lists.
 
-Archives are accessible via the Google Groups usenet archive; see
+Archives are accessible via the Google Groups Usenet archive; see
 http://groups.google.com/.  The mailing lists are also archived, see
 http://www.python.org/community/lists.html for details.
 
@@ -257,28 +257,28 @@
 above) so we can remove them!)
 
 Unix platforms: If your vendor still ships (and you still use) Berkeley DB
-	1.85 you will need to edit Modules/Setup to build the bsddb185
-	module and add a line to sitecustomize.py which makes it the
-	default.  In Modules/Setup a line like
+        1.85 you will need to edit Modules/Setup to build the bsddb185
+        module and add a line to sitecustomize.py which makes it the
+        default.  In Modules/Setup a line like
 
-	    bsddb185 bsddbmodule.c
+            bsddb185 bsddbmodule.c
 
-	should work.  (You may need to add -I, -L or -l flags to direct the
-	compiler and linker to your include files and libraries.)
+        should work.  (You may need to add -I, -L or -l flags to direct the
+        compiler and linker to your include files and libraries.)
 
 XXX I think this next bit is out of date:
 
 64-bit platforms: The modules audioop, imageop and rgbimg don't work.
-	The setup.py script disables them on 64-bit installations.
-	Don't try to enable them in the Modules/Setup file.  They
-	contain code that is quite wordsize sensitive.  (If you have a
-	fix, let us know!)
+        The setup.py script disables them on 64-bit installations.
+        Don't try to enable them in the Modules/Setup file.  They
+        contain code that is quite wordsize sensitive.  (If you have a
+        fix, let us know!)
 
 Solaris: When using Sun's C compiler with threads, at least on Solaris
-	2.5.1, you need to add the "-mt" compiler option (the simplest
-	way is probably to specify the compiler with this option as
-	the "CC" environment variable when running the configure
-	script).
+        2.5.1, you need to add the "-mt" compiler option (the simplest
+        way is probably to specify the compiler with this option as
+        the "CC" environment variable when running the configure
+        script).
 
         When using GCC on Solaris, beware of binutils 2.13 or GCC
         versions built using it.  This mistakenly enables the
@@ -290,136 +290,136 @@
         and 2.8, but may also affect earlier and later versions of the
         OS.
 
-	When the dynamic loader complains about errors finding shared
-	libraries, such as
+        When the dynamic loader complains about errors finding shared
+        libraries, such as
 
-	ld.so.1: ./python: fatal: libstdc++.so.5: open failed:
-	No such file or directory
+        ld.so.1: ./python: fatal: libstdc++.so.5: open failed:
+        No such file or directory
 
-	you need to first make sure that the library is available on
-	your system. Then, you need to instruct the dynamic loader how
-	to find it. You can choose any of the following strategies:
+        you need to first make sure that the library is available on
+        your system. Then, you need to instruct the dynamic loader how
+        to find it. You can choose any of the following strategies:
 
-	1. When compiling Python, set LD_RUN_PATH to the directories
-	   containing missing libraries.
-	2. When running Python, set LD_LIBRARY_PATH to these directories.
-	3. Use crle(8) to extend the search path of the loader.
-	4. Modify the installed GCC specs file, adding -R options into the
-	   *link: section.
+        1. When compiling Python, set LD_RUN_PATH to the directories
+           containing missing libraries.
+        2. When running Python, set LD_LIBRARY_PATH to these directories.
+        3. Use crle(8) to extend the search path of the loader.
+        4. Modify the installed GCC specs file, adding -R options into the
+           *link: section.
 
         The complex object fails to compile on Solaris 10 with gcc 3.4 (at
         least up to 3.4.3).  To work around it, define Py_HUGE_VAL as
         HUGE_VAL(), e.g.:
 
           make CPPFLAGS='-D"Py_HUGE_VAL=HUGE_VAL()" -I. -I$(srcdir)/Include'
-	  ./python setup.py CPPFLAGS='-D"Py_HUGE_VAL=HUGE_VAL()"'
+          ./python setup.py CPPFLAGS='-D"Py_HUGE_VAL=HUGE_VAL()"'
 
 Linux:  A problem with threads and fork() was tracked down to a bug in
-	the pthreads code in glibc version 2.0.5; glibc version 2.0.7
-	solves the problem.  This causes the popen2 test to fail;
-	problem and solution reported by Pablo Bleyer.
+        the pthreads code in glibc version 2.0.5; glibc version 2.0.7
+        solves the problem.  This causes the popen2 test to fail;
+        problem and solution reported by Pablo Bleyer.
 
 Red Hat Linux: Red Hat 9 built Python2.2 in UCS-4 mode and hacked
-	Tcl to support it. To compile Python2.3 with Tkinter, you will
-	need to pass --enable-unicode=ucs4 flag to ./configure.
+        Tcl to support it. To compile Python2.3 with Tkinter, you will
+        need to pass --enable-unicode=ucs4 flag to ./configure.
 
-	There's an executable /usr/bin/python which is Python
-	1.5.2 on most older Red Hat installations; several key Red Hat tools
-	require this version.  Python 2.1.x may be installed as
-	/usr/bin/python2.  The Makefile installs Python as
-	/usr/local/bin/python, which may or may not take precedence
-	over /usr/bin/python, depending on how you have set up $PATH.
+        There's an executable /usr/bin/python which is Python
+        1.5.2 on most older Red Hat installations; several key Red Hat tools
+        require this version.  Python 2.1.x may be installed as
+        /usr/bin/python2.  The Makefile installs Python as
+        /usr/local/bin/python, which may or may not take precedence
+        over /usr/bin/python, depending on how you have set up $PATH.
 
 FreeBSD 3.x and probably platforms with NCurses that use libmytinfo or
-	similar: When using cursesmodule, the linking is not done in
-	the correct order with the defaults.  Remove "-ltermcap" from
-	the readline entry in Setup, and use as curses entry: "curses
-	cursesmodule.c -lmytinfo -lncurses -ltermcap" - "mytinfo" (so
-	called on FreeBSD) should be the name of the auxiliary library
-	required on your platform.  Normally, it would be linked
-	automatically, but not necessarily in the correct order.
+        similar: When using cursesmodule, the linking is not done in
+        the correct order with the defaults.  Remove "-ltermcap" from
+        the readline entry in Setup, and use as curses entry: "curses
+        cursesmodule.c -lmytinfo -lncurses -ltermcap" - "mytinfo" (so
+        called on FreeBSD) should be the name of the auxiliary library
+        required on your platform.  Normally, it would be linked
+        automatically, but not necessarily in the correct order.
 
-BSDI:	BSDI versions before 4.1 have known problems with threads,
-	which can cause strange errors in a number of modules (for
-	instance, the 'test_signal' test script will hang forever.)
-	Turning off threads (with --with-threads=no) or upgrading to
-	BSDI 4.1 solves this problem.
+BSDI:   BSDI versions before 4.1 have known problems with threads,
+        which can cause strange errors in a number of modules (for
+        instance, the 'test_signal' test script will hang forever.)
+        Turning off threads (with --with-threads=no) or upgrading to
+        BSDI 4.1 solves this problem.
 
 DEC Unix: Run configure with --with-dec-threads, or with
-	--with-threads=no if no threads are desired (threads are on by
-	default).  When using GCC, it is possible to get an internal
-	compiler error if optimization is used.  This was reported for
-	GCC 2.7.2.3 on selectmodule.c.  Manually compile the affected
-	file without optimization to solve the problem.
+        --with-threads=no if no threads are desired (threads are on by
+        default).  When using GCC, it is possible to get an internal
+        compiler error if optimization is used.  This was reported for
+        GCC 2.7.2.3 on selectmodule.c.  Manually compile the affected
+        file without optimization to solve the problem.
 
 DEC Ultrix: compile with GCC to avoid bugs in the native compiler,
-	and pass SHELL=/bin/sh5 to Make when installing.
+        and pass SHELL=/bin/sh5 to Make when installing.
 
-AIX:	A complete overhaul of the shared library support is now in
-	place.  See Misc/AIX-NOTES for some notes on how it's done.
-	(The optimizer bug reported at this place in previous releases
-	has been worked around by a minimal code change.) If you get
-	errors about pthread_* functions, during compile or during
-	testing, try setting CC to a thread-safe (reentrant) compiler,
-	like "cc_r".  For full C++ module support, set CC="xlC_r" (or
-	CC="xlC" without thread support).
+AIX:    A complete overhaul of the shared library support is now in
+        place.  See Misc/AIX-NOTES for some notes on how it's done.
+        (The optimizer bug reported at this place in previous releases
+        has been worked around by a minimal code change.) If you get
+        errors about pthread_* functions, during compile or during
+        testing, try setting CC to a thread-safe (reentrant) compiler,
+        like "cc_r".  For full C++ module support, set CC="xlC_r" (or
+        CC="xlC" without thread support).
 
 AIX 5.3: To build a 64-bit version with IBM's compiler, I used the
         following:
 
         export PATH=/usr/bin:/usr/vacpp/bin
-	./configure --with-gcc="xlc_r -q64" --with-cxx="xlC_r -q64" \
+        ./configure --with-gcc="xlc_r -q64" --with-cxx="xlC_r -q64" \
                     --disable-ipv6 AR="ar -X64"
-	make
+        make
 
 HP-UX:  When using threading, you may have to add -D_REENTRANT to the
-	OPT variable in the top-level Makefile; reported by Pat Knight,
-	this seems to make a difference (at least for HP-UX 10.20)
-	even though pyconfig.h defines it. This seems unnecessary when
-	using HP/UX 11 and later - threading seems to work "out of the
-	box".
+        OPT variable in the top-level Makefile; reported by Pat Knight,
+        this seems to make a difference (at least for HP-UX 10.20)
+        even though pyconfig.h defines it. This seems unnecessary when
+        using HP/UX 11 and later - threading seems to work "out of the
+        box".
 
 HP-UX ia64: When building on the ia64 (Itanium) platform using HP's
-	compiler, some experience has shown that the compiler's
-	optimiser produces a completely broken version of python
-	(see http://www.python.org/sf/814976). To work around this,
-	edit the Makefile and remove -O from the OPT line.
+        compiler, some experience has shown that the compiler's
+        optimiser produces a completely broken version of python
+        (see http://www.python.org/sf/814976). To work around this,
+        edit the Makefile and remove -O from the OPT line.
 
-	To build a 64-bit executable on an Itanium 2 system using HP's
-	compiler, use these environment variables:
+        To build a 64-bit executable on an Itanium 2 system using HP's
+        compiler, use these environment variables:
 
-		CC=cc
-		CXX=aCC
-		BASECFLAGS="+DD64"
-		LDFLAGS="+DD64 -lxnet"
+                CC=cc
+                CXX=aCC
+                BASECFLAGS="+DD64"
+                LDFLAGS="+DD64 -lxnet"
 
-	and call configure as:
+        and call configure as:
 
-		./configure --without-gcc
+                ./configure --without-gcc
 
-	then *unset* the environment variables again before running
-	make.  (At least one of these flags causes the build to fail
-	if it remains set.)  You still have to edit the Makefile and
-	remove -O from the OPT line.
+        then *unset* the environment variables again before running
+        make.  (At least one of these flags causes the build to fail
+        if it remains set.)  You still have to edit the Makefile and
+        remove -O from the OPT line.
 
 HP PA-RISC 2.0: A recent bug report (http://www.python.org/sf/546117)
-	suggests that the C compiler in this 64-bit system has bugs
-	in the optimizer that break Python.  Compiling without
-	optimization solves the problems.
+        suggests that the C compiler in this 64-bit system has bugs
+        in the optimizer that break Python.  Compiling without
+        optimization solves the problems.
 
-SCO:	The following apply to SCO 3 only; Python builds out of the box
-	on SCO 5 (or so we've heard).
+SCO:    The following apply to SCO 3 only; Python builds out of the box
+        on SCO 5 (or so we've heard).
 
-	1) Everything works much better if you add -U__STDC__ to the
-	defs.  This is because all the SCO header files are broken.
-	Anything that isn't mentioned in the C standard is
-	conditionally excluded when __STDC__ is defined.
+        1) Everything works much better if you add -U__STDC__ to the
+        defs.  This is because all the SCO header files are broken.
+        Anything that isn't mentioned in the C standard is
+        conditionally excluded when __STDC__ is defined.
 
-	2) Due to the U.S. export restrictions, SCO broke the crypt
-	stuff out into a separate library, libcrypt_i.a so the LIBS
-	needed be set to:
+        2) Due to the U.S. export restrictions, SCO broke the crypt
+        stuff out into a separate library, libcrypt_i.a so the LIBS
+        needed be set to:
 
-		LIBS=' -lsocket -lcrypt_i'
+                LIBS=' -lsocket -lcrypt_i'
 
 UnixWare: There are known bugs in the math library of the system, as well as
         problems in the handling of threads (calling fork in one
@@ -427,61 +427,61 @@
         tests involving threads will fail until those problems are fixed.
 
 SunOS 4.x: When using the SunPro C compiler, you may want to use the
-	'-Xa' option instead of '-Xc', to enable some needed non-ANSI
-	Sunisms.
-	THIS SYSTEM IS NO LONGER SUPPORTED.
+        '-Xa' option instead of '-Xc', to enable some needed non-ANSI
+        Sunisms.
+        THIS SYSTEM IS NO LONGER SUPPORTED.
 
 NeXT:   Not supported anymore. Start with the MacOSX/Darwin code if you
-	want to revive it.
+        want to revive it.
 
-QNX:	Chris Herborth (chrish@qnx.com) writes:
-	configure works best if you use GNU bash; a port is available on
-	ftp.qnx.com in /usr/free.  I used the following process to build,
-	test and install Python 1.5.x under QNX:
+QNX:    Chris Herborth (chrish@qnx.com) writes:
+        configure works best if you use GNU bash; a port is available on
+        ftp.qnx.com in /usr/free.  I used the following process to build,
+        test and install Python 1.5.x under QNX:
 
-	1) CONFIG_SHELL=/usr/local/bin/bash CC=cc RANLIB=: \
-	    ./configure --verbose --without-gcc --with-libm=""
+        1) CONFIG_SHELL=/usr/local/bin/bash CC=cc RANLIB=: \
+            ./configure --verbose --without-gcc --with-libm=""
 
-	2) edit Modules/Setup to activate everything that makes sense for
-	   your system... tested here at QNX with the following modules:
+        2) edit Modules/Setup to activate everything that makes sense for
+           your system... tested here at QNX with the following modules:
 
-		array, audioop, binascii, cPickle, cStringIO, cmath,
-		crypt, curses, errno, fcntl, gdbm, grp, imageop,
-		_locale, math, md5, new, operator, parser, pcre,
-		posix, pwd, readline, regex, reop, rgbimg, rotor,
-		select, signal, socket, soundex, strop, struct,
-		syslog, termios, time, timing, zlib, audioop, imageop, rgbimg
+                array, audioop, binascii, cPickle, cStringIO, cmath,
+                crypt, curses, errno, fcntl, gdbm, grp, imageop,
+                _locale, math, md5, new, operator, parser, pcre,
+                posix, pwd, readline, regex, reop, rgbimg, rotor,
+                select, signal, socket, soundex, strop, struct,
+                syslog, termios, time, timing, zlib, audioop, imageop, rgbimg
 
-	3) make SHELL=/usr/local/bin/bash
+        3) make SHELL=/usr/local/bin/bash
 
-	   or, if you feel the need for speed:
+           or, if you feel the need for speed:
 
-	   make SHELL=/usr/local/bin/bash OPT="-5 -Oil+nrt"
+           make SHELL=/usr/local/bin/bash OPT="-5 -Oil+nrt"
 
-	4) make SHELL=/usr/local/bin/bash test
+        4) make SHELL=/usr/local/bin/bash test
 
-	   Using GNU readline 2.2 seems to behave strangely, but I
-	   think that's a problem with my readline 2.2 port.  :-\
+           Using GNU readline 2.2 seems to behave strangely, but I
+           think that's a problem with my readline 2.2 port.  :-\
 
-	5) make SHELL=/usr/local/bin/bash install
+        5) make SHELL=/usr/local/bin/bash install
 
-	If you get SIGSEGVs while running Python (I haven't yet, but
-	I've only run small programs and the test cases), you're
-	probably running out of stack; the default 32k could be a
-	little tight.  To increase the stack size, edit the Makefile
-	to read: LDFLAGS = -N 48k
+        If you get SIGSEGVs while running Python (I haven't yet, but
+        I've only run small programs and the test cases), you're
+        probably running out of stack; the default 32k could be a
+        little tight.  To increase the stack size, edit the Makefile
+        to read: LDFLAGS = -N 48k
 
-BeOS:	See Misc/BeOS-NOTES for notes about compiling/installing
-	Python on BeOS R3 or later.  Note that only the PowerPC
-	platform is supported for R3; both PowerPC and x86 are
-	supported for R4.
+BeOS:   See Misc/BeOS-NOTES for notes about compiling/installing
+        Python on BeOS R3 or later.  Note that only the PowerPC
+        platform is supported for R3; both PowerPC and x86 are
+        supported for R4.
 
 Cray T3E: Mark Hadfield (m.hadfield@niwa.co.nz) writes:
-	Python can be built satisfactorily on a Cray T3E but based on
-	my experience with the NIWA T3E (2002-05-22, version 2.2.1)
-	there are a few bugs and gotchas. For more information see a
-	thread on comp.lang.python in May 2002 entitled "Building
-	Python on Cray T3E".
+        Python can be built satisfactorily on a Cray T3E but based on
+        my experience with the NIWA T3E (2002-05-22, version 2.2.1)
+        there are a few bugs and gotchas. For more information see a
+        thread on comp.lang.python in May 2002 entitled "Building
+        Python on Cray T3E".
 
         1) Use Cray's cc and not gcc. The latter was reported not to
            work by Konrad Hinsen. It may work now, but it may not.
@@ -491,16 +491,16 @@
 
              MACHDEP=unicosmk
 
-	2) Run configure with option "--enable-unicode=ucs4".
+        2) Run configure with option "--enable-unicode=ucs4".
 
-	3) The Cray T3E does not support dynamic linking, so extension
-	   modules have to be built by adding (or uncommenting) lines
-	   in Modules/Setup. The minimum set of modules is
+        3) The Cray T3E does not support dynamic linking, so extension
+           modules have to be built by adding (or uncommenting) lines
+           in Modules/Setup. The minimum set of modules is
 
-	     posix, new, _sre, unicodedata
+             posix, new, _sre, unicodedata
 
-	   On NIWA's vanilla T3E system the following have also been
-	   included successfully:
+           On NIWA's vanilla T3E system the following have also been
+           included successfully:
 
 	     _codecs, _locale, _socket, _symtable, _testcapi, _weakref
 	     array, binascii, cmath, cPickle, crypt, cStringIO, dbm
@@ -508,28 +508,28 @@
 	     rotor, select, struct, strop, syslog, termios,
 	     time, timing
 
-	4) Once the python executable and library have been built, make
-	   will execute setup.py, which will attempt to build remaining
-	   extensions and link them dynamically. Each of these attempts
-	   will fail but should not halt the make process. This is
-	   normal.
+        4) Once the python executable and library have been built, make
+           will execute setup.py, which will attempt to build remaining
+           extensions and link them dynamically. Each of these attempts
+           will fail but should not halt the make process. This is
+           normal.
 
-	5) Running "make test" uses a lot of resources and causes
-	   problems on our system. You might want to try running tests
-	   singly or in small groups.
+        5) Running "make test" uses a lot of resources and causes
+           problems on our system. You might want to try running tests
+           singly or in small groups.
 
-SGI:	SGI's standard "make" utility (/bin/make or /usr/bin/make)
-	does not check whether a command actually changed the file it
-	is supposed to build.  This means that whenever you say "make"
-	it will redo the link step.  The remedy is to use SGI's much
-	smarter "smake" utility (/usr/sbin/smake), or GNU make.  If
-	you set the first line of the Makefile to #!/usr/sbin/smake
-	smake will be invoked by make (likewise for GNU make).
+SGI:    SGI's standard "make" utility (/bin/make or /usr/bin/make)
+        does not check whether a command actually changed the file it
+        is supposed to build.  This means that whenever you say "make"
+        it will redo the link step.  The remedy is to use SGI's much
+        smarter "smake" utility (/usr/sbin/smake), or GNU make.  If
+        you set the first line of the Makefile to #!/usr/sbin/smake
+        smake will be invoked by make (likewise for GNU make).
 
-	WARNING: There are bugs in the optimizer of some versions of
-	SGI's compilers that can cause bus errors or other strange
-	behavior, especially on numerical operations.  To avoid this,
-	try building with "make OPT=".
+        WARNING: There are bugs in the optimizer of some versions of
+        SGI's compilers that can cause bus errors or other strange
+        behavior, especially on numerical operations.  To avoid this,
+        try building with "make OPT=".
 
 OS/2:   If you are running Warp3 or Warp4 and have IBM's VisualAge C/C++
         compiler installed, just change into the pc\os2vacpp directory
@@ -569,8 +569,8 @@
         additions.
 
         Some people have reported problems building Python after using "fink"
-        to install additional unix software. Disabling fink (remove all references
-        to /sw from your .profile or .login) should solve this.
+        to install additional unix software. Disabling fink (remove all 
+        references to /sw from your .profile or .login) should solve this.
 
         You may want to try the configure option "--enable-framework"
         which installs Python as a framework. The location can be set
@@ -602,8 +602,8 @@
 
         #SSL=/usr/local/ssl
         #_socket socketmodule.c \
-        #	-DUSE_SSL -I$(SSL)/include -I$(SSL)/include/openssl \
-        #	-L$(SSL)/lib -lssl -lcrypto
+        #       -DUSE_SSL -I$(SSL)/include -I$(SSL)/include/openssl \
+        #       -L$(SSL)/lib -lssl -lcrypto
 
         and remove "local/" from the SSL variable.  Finally, just run
         "make"!
@@ -648,69 +648,69 @@
 
 AtheOS: From Octavian Cerna <tavy at ylabs.com>:
 
-	Before building:
+        Before building:
 
-	    Make sure you have shared versions of the libraries you
-	    want to use with Python. You will have to compile them
-	    yourself, or download precompiled packages.
+            Make sure you have shared versions of the libraries you
+            want to use with Python. You will have to compile them
+            yourself, or download precompiled packages.
 
-	    Recommended libraries:
+            Recommended libraries:
 
-		ncurses-4.2
-		readline-4.2a
-		zlib-1.1.4
+                ncurses-4.2
+                readline-4.2a
+                zlib-1.1.4
 
-	Build:
+        Build:
 
-	    $ ./configure --prefix=/usr/python
-	    $ make
+            $ ./configure --prefix=/usr/python
+            $ make
 
-	    Python is always built as a shared library, otherwise
-	    dynamic loading would not work.
+            Python is always built as a shared library, otherwise
+            dynamic loading would not work.
 
-	Testing:
+        Testing:
 
-	    $ make test
+            $ make test
 
-	Install:
+        Install:
 
-	    # make install
-	    # pkgmanager -a /usr/python
+            # make install
+            # pkgmanager -a /usr/python
 
 
-	AtheOS issues:
+        AtheOS issues:
 
-	    - large file support: due to a stdio bug in glibc/libio,
-	      access to large files may not work correctly.  fseeko()
-	      tries to seek to a negative offset.  ftello() returns a
-	      negative offset, it looks like a 32->64bit
-	      sign-extension issue.  The lowlevel functions (open,
-	      lseek, etc) are OK.
-	    - sockets: AF_UNIX is defined in the C library and in
-	      Python, but not implemented in the system.
-	    - select: poll is available in the C library, but does not
-	      work (It does not return POLLNVAL for bad fds and
-	      hangs).
-	    - posix: statvfs and fstatvfs always return ENOSYS.
-	    - disabled modules:
-		- mmap: not yet implemented in AtheOS
-		- nis: broken (on an unconfigured system
-		  yp_get_default_domain() returns junk instead of
-		  error)
-		- dl: dynamic loading doesn't work via dlopen()
-		- resource: getrimit and setrlimit are not yet
-		  implemented
+            - large file support: due to a stdio bug in glibc/libio,
+              access to large files may not work correctly.  fseeko()
+              tries to seek to a negative offset.  ftello() returns a
+              negative offset, it looks like a 32->64bit
+              sign-extension issue.  The lowlevel functions (open,
+              lseek, etc) are OK.
+            - sockets: AF_UNIX is defined in the C library and in
+              Python, but not implemented in the system.
+            - select: poll is available in the C library, but does not
+              work (It does not return POLLNVAL for bad fds and
+              hangs).
+            - posix: statvfs and fstatvfs always return ENOSYS.
+            - disabled modules:
+                - mmap: not yet implemented in AtheOS
+                - nis: broken (on an unconfigured system
+                  yp_get_default_domain() returns junk instead of
+                  error)
+                - dl: dynamic loading doesn't work via dlopen()
+                - resource: getrimit and setrlimit are not yet
+                  implemented
 
-	    - if you are getting segmentation faults, you probably are
-	      low on memory.  AtheOS doesn't handle very well an
-	      out-of-memory condition and simply SEGVs the process.
+            - if you are getting segmentation faults, you probably are
+              low on memory.  AtheOS doesn't handle very well an
+              out-of-memory condition and simply SEGVs the process.
 
-	Tested on:
+        Tested on:
 
-	    AtheOS-0.3.7
-	    gcc-2.95
-	    binutils-2.10
-	    make-3.78
+            AtheOS-0.3.7
+            gcc-2.95
+            binutils-2.10
+            make-3.78
 
 
 Configuring the bsddb and dbm modules
@@ -719,15 +719,25 @@
 Beginning with Python version 2.3, the PyBsddb package
 <http://pybsddb.sf.net/> was adopted into Python as the bsddb package,
 exposing a set of package-level functions which provide
-backwards-compatible behavior.  Only versions 3.1 through 4.1 of
+backwards-compatible behavior.  Only versions 3.3 through 4.4 of
 Sleepycat's libraries provide the necessary API, so older versions
 aren't supported through this interface.  The old bsddb module has
 been retained as bsddb185, though it is not built by default.  Users
 wishing to use it will have to tweak Modules/Setup to build it.  The
 dbm module will still be built against the Sleepycat libraries if
-other preferred alternatives (ndbm, gdbm) are not found, though
-versions of the Sleepycat library prior to 3.1 are not considered.
+other preferred alternatives (ndbm, gdbm) are not found.
 
+Building the sqlite3 module
+---------------------------
+
+To build the sqlite3 module, you'll need the sqlite3 or libsqlite3
+packages installed, including the header files. Many modern operating
+systems distribute the headers in a separate package to the library -
+often it will be the same name as the main package, but with a -dev or
+-devel suffix. 
+
+The version of pysqlite2 that's including in Python needs sqlite3 3.0.8
+or later. setup.py attempts to check that it can find a correct version.
 
 Configuring threads
 -------------------
@@ -757,17 +767,17 @@
     SunOS 5.{1-5}/{gcc,SunPro cc}/solaris   -mt
     SunOS 5.5/{gcc,SunPro cc}/POSIX         (nothing)
     DEC OSF/1 3.x/cc/DCE                    -threads
-	    (butenhof@zko.dec.com)
+            (butenhof@zko.dec.com)
     Digital UNIX 4.x/cc/DCE                 -threads
-	    (butenhof@zko.dec.com)
+            (butenhof@zko.dec.com)
     Digital UNIX 4.x/cc/POSIX               -pthread
-	    (butenhof@zko.dec.com)
+            (butenhof@zko.dec.com)
     AIX 4.1.4/cc_r/d7                       (nothing)
-	    (buhrt@iquest.net)
+            (buhrt@iquest.net)
     AIX 4.1.4/cc_r4/DCE                     (nothing)
-	    (buhrt@iquest.net)
+            (buhrt@iquest.net)
     IRIX 6.2/cc/POSIX                       (nothing)
-	    (robertl@cwi.nl)
+            (robertl@cwi.nl)
 
 
 Linker (ld) libraries and flags for threads
@@ -778,15 +788,15 @@
     SunOS 5.{1-5}/solaris               -lthread
     SunOS 5.5/POSIX                     -lpthread
     DEC OSF/1 3.x/DCE                   -lpthreads -lmach -lc_r -lc
-	    (butenhof@zko.dec.com)
+            (butenhof@zko.dec.com)
     Digital UNIX 4.x/DCE                -lpthreads -lpthread -lmach -lexc -lc
-	    (butenhof@zko.dec.com)
+            (butenhof@zko.dec.com)
     Digital UNIX 4.x/POSIX              -lpthread -lmach -lexc -lc
-	    (butenhof@zko.dec.com)
+            (butenhof@zko.dec.com)
     AIX 4.1.4/{draft7,DCE}              (nothing)
-	    (buhrt@iquest.net)
+            (buhrt@iquest.net)
     IRIX 6.2/POSIX                      -lpthread
-	    (jph@emilia.engr.sgi.com)
+            (jph@emilia.engr.sgi.com)
 
 
 Building a shared libpython
@@ -896,7 +906,7 @@
 *don't* include the output of "make test".  It is useless.  Run the
 failing test manually, as follows:
 
-	./python ./Lib/test/test_whatever.py
+        ./python ./Lib/test/test_whatever.py
 
 (substituting the top of the source tree for '.' if you built in a
 different directory).  This runs the test in verbose mode.
@@ -909,7 +919,7 @@
 (see below), include files, configuration files, and the manual page,
 just type
 
-	make install
+        make install
 
 This will install all platform-independent files in subdirectories of
 the directory given with the --prefix option to configure or to the
@@ -934,7 +944,7 @@
 If you have a previous installation of Python that you don't
 want to replace yet, use
 
-	make altinstall
+        make altinstall
 
 This installs the same set of files as "make install" except it
 doesn't create the hard link to "python<version>" named "python" and
@@ -963,104 +973,124 @@
 Modules/getpath.o.
 
 --with(out)-gcc: The configure script uses gcc (the GNU C compiler) if
-	it finds it.  If you don't want this, or if this compiler is
-	installed but broken on your platform, pass the option
-	--without-gcc.  You can also pass "CC=cc" (or whatever the
-	name of the proper C compiler is) in the environment, but the
-	advantage of using --without-gcc is that this option is
-	remembered by the config.status script for its --recheck
-	option.
+        it finds it.  If you don't want this, or if this compiler is
+        installed but broken on your platform, pass the option
+        --without-gcc.  You can also pass "CC=cc" (or whatever the
+        name of the proper C compiler is) in the environment, but the
+        advantage of using --without-gcc is that this option is
+        remembered by the config.status script for its --recheck
+        option.
 
 --prefix, --exec-prefix: If you want to install the binaries and the
-	Python library somewhere else than in /usr/local/{bin,lib},
-	you can pass the option --prefix=DIRECTORY; the interpreter
-	binary will be installed as DIRECTORY/bin/python and the
-	library files as DIRECTORY/lib/python/*.  If you pass
-	--exec-prefix=DIRECTORY (as well) this overrides the
-	installation prefix for architecture-dependent files (like the
-	interpreter binary).  Note that --prefix=DIRECTORY also
-	affects the default module search path (sys.path), when
-	Modules/config.c is compiled.  Passing make the option
-	prefix=DIRECTORY (and/or exec_prefix=DIRECTORY) overrides the
-	prefix set at configuration time; this may be more convenient
-	than re-running the configure script if you change your mind
-	about the install prefix.
+        Python library somewhere else than in /usr/local/{bin,lib},
+        you can pass the option --prefix=DIRECTORY; the interpreter
+        binary will be installed as DIRECTORY/bin/python and the
+        library files as DIRECTORY/lib/python/*.  If you pass
+        --exec-prefix=DIRECTORY (as well) this overrides the
+        installation prefix for architecture-dependent files (like the
+        interpreter binary).  Note that --prefix=DIRECTORY also
+        affects the default module search path (sys.path), when
+        Modules/config.c is compiled.  Passing make the option
+        prefix=DIRECTORY (and/or exec_prefix=DIRECTORY) overrides the
+        prefix set at configuration time; this may be more convenient
+        than re-running the configure script if you change your mind
+        about the install prefix.
 
 --with-readline: This option is no longer supported.  GNU
-	readline is automatically enabled by setup.py when present.
+        readline is automatically enabled by setup.py when present.
 
 --with-threads: On most Unix systems, you can now use multiple
-	threads, and support for this is enabled by default.  To
-	disable this, pass --with-threads=no.  If the library required
-	for threads lives in a peculiar place, you can use
-	--with-thread=DIRECTORY.  IMPORTANT: run "make clean" after
-	changing (either enabling or disabling) this option, or you
-	will get link errors!  Note: for DEC Unix use
-	--with-dec-threads instead.
+        threads, and support for this is enabled by default.  To
+        disable this, pass --with-threads=no.  If the library required
+        for threads lives in a peculiar place, you can use
+        --with-thread=DIRECTORY.  IMPORTANT: run "make clean" after
+        changing (either enabling or disabling) this option, or you
+        will get link errors!  Note: for DEC Unix use
+        --with-dec-threads instead.
 
 --with-sgi-dl: On SGI IRIX 4, dynamic loading of extension modules is
-	supported by the "dl" library by Jack Jansen, which is
-	ftp'able from ftp://ftp.cwi.nl/pub/dynload/dl-1.6.tar.Z.
-	This is enabled (after you've ftp'ed and compiled the dl
-	library) by passing --with-sgi-dl=DIRECTORY where DIRECTORY
-	is the absolute pathname of the dl library.  (Don't bother on
-	IRIX 5, it already has dynamic linking using SunOS style
-	shared libraries.)  THIS OPTION IS UNSUPPORTED.
+        supported by the "dl" library by Jack Jansen, which is
+        ftp'able from ftp://ftp.cwi.nl/pub/dynload/dl-1.6.tar.Z.
+        This is enabled (after you've ftp'ed and compiled the dl
+        library) by passing --with-sgi-dl=DIRECTORY where DIRECTORY
+        is the absolute pathname of the dl library.  (Don't bother on
+        IRIX 5, it already has dynamic linking using SunOS style
+        shared libraries.)  THIS OPTION IS UNSUPPORTED.
 
 --with-dl-dld: Dynamic loading of modules is rumored to be supported
-	on some other systems: VAX (Ultrix), Sun3 (SunOS 3.4), Sequent
-	Symmetry (Dynix), and Atari ST.  This is done using a
-	combination of the GNU dynamic loading package
-	(ftp://ftp.cwi.nl/pub/dynload/dl-dld-1.1.tar.Z) and an
-	emulation of the SGI dl library mentioned above (the emulation
-	can be found at
-	ftp://ftp.cwi.nl/pub/dynload/dld-3.2.3.tar.Z).  To
-	enable this, ftp and compile both libraries, then call
-	configure, passing it the option
-	--with-dl-dld=DL_DIRECTORY,DLD_DIRECTORY where DL_DIRECTORY is
-	the absolute pathname of the dl emulation library and
-	DLD_DIRECTORY is the absolute pathname of the GNU dld library.
-	(Don't bother on SunOS 4 or 5, they already have dynamic
-	linking using shared libraries.)  THIS OPTION IS UNSUPPORTED.
+        on some other systems: VAX (Ultrix), Sun3 (SunOS 3.4), Sequent
+        Symmetry (Dynix), and Atari ST.  This is done using a
+        combination of the GNU dynamic loading package
+        (ftp://ftp.cwi.nl/pub/dynload/dl-dld-1.1.tar.Z) and an
+        emulation of the SGI dl library mentioned above (the emulation
+        can be found at
+        ftp://ftp.cwi.nl/pub/dynload/dld-3.2.3.tar.Z).  To
+        enable this, ftp and compile both libraries, then call
+        configure, passing it the option
+        --with-dl-dld=DL_DIRECTORY,DLD_DIRECTORY where DL_DIRECTORY is
+        the absolute pathname of the dl emulation library and
+        DLD_DIRECTORY is the absolute pathname of the GNU dld library.
+        (Don't bother on SunOS 4 or 5, they already have dynamic
+        linking using shared libraries.)  THIS OPTION IS UNSUPPORTED.
 
 --with-libm, --with-libc: It is possible to specify alternative
-	versions for the Math library (default -lm) and the C library
-	(default the empty string) using the options
-	--with-libm=STRING and --with-libc=STRING, respectively.  For
-	example, if your system requires that you pass -lc_s to the C
-	compiler to use the shared C library, you can pass
-	--with-libc=-lc_s. These libraries are passed after all other
-	libraries, the C library last.
+        versions for the Math library (default -lm) and the C library
+        (default the empty string) using the options
+        --with-libm=STRING and --with-libc=STRING, respectively.  For
+        example, if your system requires that you pass -lc_s to the C
+        compiler to use the shared C library, you can pass
+        --with-libc=-lc_s. These libraries are passed after all other
+        libraries, the C library last.
 
 --with-libs='libs': Add 'libs' to the LIBS that the python interpreter
-	is linked against.
+        is linked against.
 
---with-cxx=<compiler>: Some C++ compilers require that main() is
-        compiled with the C++ if there is any C++ code in the application.
-        Specifically, g++ on a.out systems may require that to support
-        construction of global objects. With this option, the main() function
-        of Python will be compiled with <compiler>; use that only if you
-        plan to use C++ extension modules, and if your compiler requires
-        compilation of main() as a C++ program.
+--with-cxx-main=<compiler>: If you plan to use C++ extension modules,
+        then -- on some platforms -- you need to compile python's main()
+        function with the C++ compiler. With this option, make will use
+        <compiler> to compile main() *and* to link the python executable.
+        It is likely that the resulting executable depends on the C++
+        runtime library of <compiler>. (The default is --without-cxx-main.)
+
+        There are platforms that do not require you to build Python
+        with a C++ compiler in order to use C++ extension modules.
+        E.g., x86 Linux with ELF shared binaries and GCC 3.x, 4.x is such
+        a platform. We recommend that you configure Python
+        --without-cxx-main on those platforms because a mismatch
+        between the C++ compiler version used to build Python and to
+        build a C++ extension module is likely to cause a crash at
+        runtime.
+
+        The Python installation also stores the variable CXX that
+        determines, e.g., the C++ compiler distutils calls by default
+        to build C++ extensions. If you set CXX on the configure command
+        line to any string of non-zero length, then configure won't
+        change CXX. If you do not preset CXX but pass
+        --with-cxx-main=<compiler>, then configure sets CXX=<compiler>.
+        In all other cases, configure looks for a C++ compiler by
+        some common names (c++, g++, gcc, CC, cxx, cc++, cl) and sets
+        CXX to the first compiler it finds. If it does not find any
+        C++ compiler, then it sets CXX="".
+
+        Similarly, if you want to change the command used to link the
+        python executable, then set LINKCC on the configure command line.
 
 
 --with-pydebug:  Enable additional debugging code to help track down
-	memory management problems.  This allows printing a list of all
-	live objects when the interpreter terminates.
+        memory management problems.  This allows printing a list of all
+        live objects when the interpreter terminates.
 
 --with(out)-universal-newlines: enable reading of text files with
-	foreign newline convention (default: enabled). In other words,
-	any of \r, \n or \r\n is acceptable as end-of-line character.
-	If enabled import and execfile will automatically accept any newline
-	in files. Python code can open a file with open(file, 'U') to
-	read it in universal newline mode. THIS OPTION IS UNSUPPORTED.
+        foreign newline convention (default: enabled). In other words,
+        any of \r, \n or \r\n is acceptable as end-of-line character.
+        If enabled import and execfile will automatically accept any newline
+        in files. Python code can open a file with open(file, 'U') to
+        read it in universal newline mode. THIS OPTION IS UNSUPPORTED.
 
 --with-tsc: Profile using the Pentium timestamping counter (TSC).
 
---with-fpectl:  Enable building the ``fpectl'' module which can be used
-	to control the generation of SIGFPE and its conversion into a
-	Python exception. Note: this module is dangerous or useless
-	except in the hands of experts.
+--with-system-ffi:  Build the _ctypes extension module using an ffi
+	library installed on the system.
 
 
 Building for multiple architectures (using the VPATH feature)
@@ -1081,13 +1111,13 @@
 in /usr/tmp/python (assuming ~guido/src/python is the toplevel
 directory and you want to build in /usr/tmp/python):
 
-	$ mkdir /usr/tmp/python
-	$ cd /usr/tmp/python
-	$ ~guido/src/python/configure
-	[...]
-	$ make
-	[...]
-	$
+        $ mkdir /usr/tmp/python
+        $ cd /usr/tmp/python
+        $ ~guido/src/python/configure
+        [...]
+        $ make
+        [...]
+        $
 
 Note that configure copies the original Setup file to the build
 directory if it finds no Setup file there.  This means that you can
@@ -1184,13 +1214,12 @@
 Most subdirectories have their own README files.  Most files have
 comments.
 
-.cvsignore	Additional filename matching patterns for CVS to ignore
-BeOS/		Files specific to the BeOS port
+BeOS/           Files specific to the BeOS port
 Demo/           Demonstration scripts, modules and programs
-Doc/		Documentation sources (LaTeX)
+Doc/            Documentation sources (LaTeX)
 Grammar/        Input for the parser generator
 Include/        Public header files
-LICENSE		Licensing information
+LICENSE         Licensing information
 Lib/            Python library modules
 Mac/            Macintosh specific resources
 Makefile.pre.in Source from which config.status creates the Makefile.pre
@@ -1198,7 +1227,7 @@
 Modules/        Implementation of most built-in modules
 Objects/        Implementation of most built-in object types
 PC/             Files specific to PC ports (DOS, Windows, OS/2)
-PCbuild/	Build directory for Microsoft Visual C++
+PCbuild/        Build directory for Microsoft Visual C++
 Parser/         The parser and tokenizer and their input handling
 Python/         The byte-compiler and interpreter
 README          The file you're reading now
@@ -1207,6 +1236,7 @@
 configure       Configuration shell script (GNU autoconf output)
 configure.in    Configuration specification (input for GNU autoconf)
 install-sh      Shell script used to install files
+setup.py        Python script used to build extension modules
 
 The following files will (may) be created in the toplevel directory by
 the configuration and build processes:
@@ -1218,8 +1248,8 @@
 pyconfig.h      Configuration header
 config.log      Log from last configure run
 config.status   Status from last run of the configure script
-getbuildinfo.o	Object file from Modules/getbuildinfo.c
-libpython<version>.a	The library archive
+getbuildinfo.o  Object file from Modules/getbuildinfo.c
+libpython<version>.a    The library archive
 python          The executable interpreter
 tags, TAGS      Tags files for vi and Emacs
 
diff --git a/RISCOS/Makefile b/RISCOS/Makefile
index 92f5272..1788b5c 100644
--- a/RISCOS/Makefile
+++ b/RISCOS/Makefile
@@ -74,7 +74,6 @@
 	@.^.Lib.md5/pyd\
 	@.^.Lib.operator/pyd\
 	@.^.Lib.parser/pyd\
-	@.^.Lib.regex/pyd\
 	@.^.Lib.rgbimg/pyd\
 	@.^.Lib.sha/pyd\
 	@.^.Lib.signal/pyd\
@@ -284,10 +283,6 @@
 @.^.Lib.parser/pyd: @.^.Modules.o.parsermodule s.linktab
 	$(MAKEDLK) -d @.^.Lib.parser/pyd -s s.linktab -o @.^.Modules.o.parsermodule -e initparser
 
-@.^.Lib.regex/pyd: @.^.Modules.o.regexmodule @.^.Modules.o.regexpr s.linktab
-	$(LINK) -aof -o @.^.Modules.o.regexlink @.^.Modules.o.regexmodule @.^.Modules.o.regexpr
-	$(MAKEDLK) -d @.^.Lib.regex/pyd -s s.linktab -o @.^.Modules.o.regexlink -e initregex
-
 @.^.Lib.rgbimg/pyd: @.^.Modules.o.rgbimgmodule s.linktab
 	$(MAKEDLK) -d @.^.Lib.rgbimg/pyd -s s.linktab -o @.^.Modules.o.rgbimgmodule -e initrgbimg
 
diff --git a/Tools/bgen/bgen/bgenObjectDefinition.py b/Tools/bgen/bgen/bgenObjectDefinition.py
index a802f93..6f9bd53 100644
--- a/Tools/bgen/bgen/bgenObjectDefinition.py
+++ b/Tools/bgen/bgen/bgenObjectDefinition.py
@@ -383,6 +383,8 @@
         Output("%s_tp_free, /* tp_free */", self.prefix)
 
     def output_tp_initBody_basecall(self):
+        """If a type shares its init call with its base type set output_tp_initBody
+        to output_tp_initBody_basecall"""
         if self.basetype:
             Output("if (%s.tp_init)", self.basetype)
             OutLbrace()
@@ -395,7 +397,6 @@
         if self.output_tp_initBody:
             Output("static int %s_tp_init(PyObject *_self, PyObject *_args, PyObject *_kwds)", self.prefix)
             OutLbrace()
-            self.output_tp_initBody_basecall()
             self.output_tp_initBody()
             OutRbrace()
         else:
@@ -425,7 +426,7 @@
         if self.basetype:
             Output("if (%s.tp_new)", self.basetype)
             OutLbrace()
-            Output("if ( (*%s.tp_init)(_self, _args, _kwds) == NULL) return NULL;", self.basetype)
+            Output("if ( (*%s.tp_new)(type, _args, _kwds) == NULL) return NULL;", self.basetype)
             Dedent()
             Output("} else {")
             Indent()
diff --git a/Tools/buildbot/build.bat b/Tools/buildbot/build.bat
index e3b77be..e96323c 100644
--- a/Tools/buildbot/build.bat
+++ b/Tools/buildbot/build.bat
@@ -1,4 +1,5 @@
 @rem Used by the buildbot "compile" step.
 cmd /c Tools\buildbot\external.bat
 call "%VS71COMNTOOLS%vsvars32.bat"
+cmd /q/c Tools\buildbot\kill_python.bat
 devenv.com /useenv /build Debug PCbuild\pcbuild.sln
diff --git a/Tools/buildbot/external.bat b/Tools/buildbot/external.bat
index 1b032ae..c6d252d 100644
--- a/Tools/buildbot/external.bat
+++ b/Tools/buildbot/external.bat
@@ -12,3 +12,24 @@
 if not exist db-4.4.20\build_win32\debug\libdb44sd.lib (

    devenv db-4.4.20\build_win32\Berkeley_DB.sln /build Debug /project db_static

 )

+

+@rem OpenSSL

+if not exist openssl-0.9.8a svn export http://svn.python.org/projects/external/openssl-0.9.8a

+

+@rem tcltk

+if not exist tcl8.4.12 (

+   if exist tcltk rd /s/q tcltk

+   svn export http://svn.python.org/projects/external/tcl8.4.12

+   svn export http://svn.python.org/projects/external/tk8.4.12

+   cd tcl8.4.12\win

+   nmake -f makefile.vc

+   nmake -f makefile.vc INSTALLDIR=..\..\tcltk install

+   cd ..\..

+   cd tk8.4.12\win

+   nmake -f makefile.vc TCLDIR=..\..\tcl8.4.12

+   nmake -f makefile.vc TCLDIR=..\..\tcl8.4.12 INSTALLDIR=..\..\tcltk install

+)

+

+@rem sqlite

+if not exist sqlite-source-3.3.4 svn export http://svn.python.org/projects/external/sqlite-source-3.3.4

+if not exist build\PCbuild\sqlite3.dll copy sqlite-source-3.3.4\sqlite3.dll build\PCbuild

diff --git a/Tools/buildbot/kill_python.bat b/Tools/buildbot/kill_python.bat
new file mode 100644
index 0000000..d78b6d4
--- /dev/null
+++ b/Tools/buildbot/kill_python.bat
@@ -0,0 +1,3 @@
+cd Tools\buildbot
+nmake /C /S /f kill_python.mak
+kill_python.exe
diff --git a/Tools/buildbot/kill_python.c b/Tools/buildbot/kill_python.c
new file mode 100644
index 0000000..ebc9aa4
--- /dev/null
+++ b/Tools/buildbot/kill_python.c
@@ -0,0 +1,56 @@
+/* This program looks for processes which have build\PCbuild\python.exe
+   in their path and terminates them. */
+#include <windows.h>
+#include <psapi.h>
+#include <stdio.h>
+
+int main()
+{
+	DWORD pids[1024], cbNeeded;
+	int i, num_processes;
+	if (!EnumProcesses(pids, sizeof(pids), &cbNeeded)) {
+		printf("EnumProcesses failed\n");
+		return 1;
+	}
+	num_processes = cbNeeded/sizeof(pids[0]);
+	for (i = 0; i < num_processes; i++) {
+		HANDLE hProcess;
+		char path[MAX_PATH];
+		HMODULE mods[1024];
+		int k, num_mods;
+		hProcess = OpenProcess(PROCESS_QUERY_INFORMATION 
+					| PROCESS_VM_READ 
+					|  PROCESS_TERMINATE ,
+					FALSE, pids[i]);
+		if (!hProcess)
+			/* process not accessible */
+			continue;
+		if (!EnumProcessModules(hProcess, mods, sizeof(mods), &cbNeeded)) {
+			/* For unknown reasons, this sometimes returns ERROR_PARTIAL_COPY;
+			   this apparently means we are not supposed to read the process. */
+			if (GetLastError() == ERROR_PARTIAL_COPY) {
+				CloseHandle(hProcess);
+				continue;
+			}
+			printf("EnumProcessModules failed: %d\n", GetLastError());
+			return 1;
+		}
+		if (!GetModuleFileNameEx(hProcess, NULL, path, sizeof(path))) {
+			printf("GetProcessImageFileName failed\n");
+			return 1;
+		}
+
+		_strlwr(path);
+		/* printf("%s\n", path); */
+		if (strstr(path, "build\\pcbuild\\python_d.exe") != NULL) {
+			printf("Terminating %s (pid %d)\n", path, pids[i]);
+			if (!TerminateProcess(hProcess, 1)) {
+				printf("Termination failed: %d\n", GetLastError());
+				return 1;
+			}
+			return 0;
+		}
+
+		CloseHandle(hProcess);
+	}
+}
diff --git a/Tools/buildbot/kill_python.mak b/Tools/buildbot/kill_python.mak
new file mode 100644
index 0000000..6027d3f
--- /dev/null
+++ b/Tools/buildbot/kill_python.mak
@@ -0,0 +1,2 @@
+kill_python.exe:	kill_python.c
+		cl -nologo -o kill_python.exe kill_python.c psapi.lib
diff --git a/Tools/i18n/msgfmt.py b/Tools/i18n/msgfmt.py
index b4ae3e9..6433131 100755
--- a/Tools/i18n/msgfmt.py
+++ b/Tools/i18n/msgfmt.py
@@ -127,7 +127,7 @@
             section = None
             fuzzy = 0
         # Record a fuzzy mark
-        if l[:2] == '#,' and l.find('fuzzy'):
+        if l[:2] == '#,' and 'fuzzy' in l:
             fuzzy = 1
         # Skip comments
         if l[0] == '#':
diff --git a/Tools/msi/msi.py b/Tools/msi/msi.py
index 6665d36..efa1696 100644
--- a/Tools/msi/msi.py
+++ b/Tools/msi/msi.py
@@ -6,6 +6,7 @@
 import uisample
 from win32com.client import constants
 from distutils.spawn import find_executable
+from uuids import product_codes
 
 # Settings can be overridden in config.py below
 # 0 for official python.org releases
@@ -23,6 +24,8 @@
 full_current_version = None
 # Is Tcl available at all?
 have_tcl = True
+# Where is sqlite3.dll located, relative to srcdir?
+sqlite_dir = "../sqlite-source-3.3.4"
 
 try:
     from config import *
@@ -62,30 +65,6 @@
 upgrade_code_snapshot='{92A24481-3ECB-40FC-8836-04B7966EC0D5}'
 upgrade_code='{65E6DE48-A358-434D-AA4F-4AF72DB4718F}'
 
-# This should be extended for each Python release.
-# The product code must change whenever the name of the MSI file
-# changes, and when new component codes are issued for existing
-# components. See "Changing the Product Code". As we change the
-# component codes with every build, we need a new product code
-# each time. For intermediate (snapshot) releases, they are automatically
-# generated. For official releases, we record the product codes,
-# so people can refer to them.
-product_codes = {
-    '2.4.101': '{0e9b4d8e-6cda-446e-a208-7b92f3ddffa0}', # 2.4a1, released as a snapshot
-    '2.4.102': '{1b998745-4901-4edb-bc52-213689e1b922}', # 2.4a2
-    '2.4.103': '{33fc8bd2-1e8f-4add-a40a-ade2728d5942}', # 2.4a3
-    '2.4.111': '{51a7e2a8-2025-4ef0-86ff-e6aab742d1fa}', # 2.4b1
-    '2.4.112': '{4a5e7c1d-c659-4fe3-b8c9-7c65bd9c95a5}', # 2.4b2
-    '2.4.121': '{75508821-a8e9-40a8-95bd-dbe6033ddbea}', # 2.4c1
-    '2.4.122': '{83a9118b-4bdd-473b-afc3-bcb142feca9e}', # 2.4c2
-    '2.4.150': '{82d9302e-f209-4805-b548-52087047483a}', # 2.4.0
-    '2.4.1121':'{be027411-8e6b-4440-a29b-b07df0690230}', # 2.4.1c1
-    '2.4.1122':'{02818752-48bf-4074-a281-7a4114c4f1b1}', # 2.4.1c2
-    '2.4.1150':'{4d4f5346-7e4a-40b5-9387-fdb6181357fc}', # 2.4.1
-    '2.4.2121':'{5ef9d6b6-df78-45d2-ab09-14786a3c5a99}', # 2.4.2c1
-    '2.4.2150':'{b191e49c-ea23-43b2-b28a-14e0784069b8}', # 2.4.2
-}
-
 if snapshot:
     current_version = "%s.%s.%s" % (major, minor, int(time.time()/3600/24))
     product_code = msilib.gen_uuid()
@@ -109,14 +88,10 @@
     '_tkinter.pyd',
     '_msi.pyd',
     '_ctypes.pyd',
-    '_ctypes_test.pyd'
+    '_ctypes_test.pyd',
+    '_sqlite3.pyd'
 ]
 
-if major+minor <= "24":
-    extensions.extend([
-    'zlib.pyd',
-    ])
-
 # Well-known component UUIDs
 # These are needed for SharedDLLs reference counter; if
 # a different UUID was used for each incarnation of, say,
@@ -392,7 +367,7 @@
               ("VerdanaRed9", "Verdana", 9, 255, 0),
              ])
 
-    compileargs = r"-Wi [TARGETDIR]Lib\compileall.py -f -x badsyntax [TARGETDIR]Lib"
+    compileargs = r"-Wi [TARGETDIR]Lib\compileall.py -f -x bad_coding|badsyntax|site-packages [TARGETDIR]Lib"
     # See "CustomAction Table"
     add_data(db, "CustomAction", [
         # msidbCustomActionTypeFirstSequence + msidbCustomActionTypeTextData + msidbCustomActionTypeProperty
@@ -491,7 +466,7 @@
     c = exit_dialog.text("warning", 135, 200, 220, 40, 0x30003,
             "{\\VerdanaRed9}Warning: Python 2.5.x is the last "
             "Python release for Windows 9x.")
-    c.condition("Hide", "NOT Version9x")
+    c.condition("Hide", "NOT Version9X")
 
     exit_dialog.text("Description", 135, 235, 220, 20, 0x30003,
                "Click the Finish button to exit the Installer.")
@@ -914,7 +889,7 @@
                 continue
             tcltk.set_current()
         elif dir in ['test', 'tests', 'data', 'output']:
-            # test: Lib, Lib/email, Lib/bsddb
+            # test: Lib, Lib/email, Lib/bsddb, Lib/ctypes, Lib/sqlite3
             # tests: Lib/distutils
             # data: Lib/email/test
             # output: Lib/test
@@ -941,6 +916,8 @@
             lib.add_file("test.xml.out")
             lib.add_file("testtar.tar")
             lib.add_file("test_difflib_expect.html")
+            lib.add_file("check_soundcard.vbs")
+            lib.add_file("empty.vbs")
             lib.glob("*.uue")
             lib.add_file("readme.txt", src="README")
         if dir=='decimaltestdata':
@@ -990,6 +967,14 @@
             tcldir = os.path.normpath(srcdir+"/../tcltk/bin")
             for f in glob.glob1(tcldir, "*.dll"):
                 lib.add_file(f, src=os.path.join(tcldir, f))
+    # Add sqlite
+    if msilib.msi_type=="Intel64;1033":
+        sqlite_arch = "/ia64"
+    elif msilib.msi_type=="x64;1033":
+        sqlite_arch = "/amd64"
+    else:
+        sqlite_arch = ""
+    lib.add_file(srcdir+"/"+sqlite_dir+sqlite_arch+"/sqlite3.dll")
     # check whether there are any unknown extensions
     for f in glob.glob1(srcdir+"/PCBuild", "*.pyd"):
         if f.endswith("_d.pyd"): continue # debug version
diff --git a/Tools/msi/uuids.py b/Tools/msi/uuids.py
new file mode 100644
index 0000000..ce7e604
--- /dev/null
+++ b/Tools/msi/uuids.py
@@ -0,0 +1,33 @@
+# This should be extended for each Python release.
+# The product code must change whenever the name of the MSI file
+# changes, and when new component codes are issued for existing
+# components. See "Changing the Product Code". As we change the
+# component codes with every build, we need a new product code
+# each time. For intermediate (snapshot) releases, they are automatically
+# generated. For official releases, we record the product codes,
+# so people can refer to them.
+product_codes = {
+    '2.4.101': '{0e9b4d8e-6cda-446e-a208-7b92f3ddffa0}', # 2.4a1, released as a snapshot
+    '2.4.102': '{1b998745-4901-4edb-bc52-213689e1b922}', # 2.4a2
+    '2.4.103': '{33fc8bd2-1e8f-4add-a40a-ade2728d5942}', # 2.4a3
+    '2.4.111': '{51a7e2a8-2025-4ef0-86ff-e6aab742d1fa}', # 2.4b1
+    '2.4.112': '{4a5e7c1d-c659-4fe3-b8c9-7c65bd9c95a5}', # 2.4b2
+    '2.4.121': '{75508821-a8e9-40a8-95bd-dbe6033ddbea}', # 2.4c1
+    '2.4.122': '{83a9118b-4bdd-473b-afc3-bcb142feca9e}', # 2.4c2
+    '2.4.150': '{82d9302e-f209-4805-b548-52087047483a}', # 2.4.0
+    '2.4.1121':'{be027411-8e6b-4440-a29b-b07df0690230}', # 2.4.1c1
+    '2.4.1122':'{02818752-48bf-4074-a281-7a4114c4f1b1}', # 2.4.1c2
+    '2.4.1150':'{4d4f5346-7e4a-40b5-9387-fdb6181357fc}', # 2.4.1
+    '2.4.2121':'{5ef9d6b6-df78-45d2-ab09-14786a3c5a99}', # 2.4.2c1
+    '2.4.2150':'{b191e49c-ea23-43b2-b28a-14e0784069b8}', # 2.4.2
+    '2.4.3121':'{f669ed4d-1dce-41c4-9617-d985397187a1}', # 2.4.3c1
+    '2.4.3150':'{75e71add-042c-4f30-bfac-a9ec42351313}', # 2.4.3
+    '2.5.101': '{bc14ce3e-5e72-4a64-ac1f-bf59a571898c}', # 2.5a1
+    '2.5.102': '{5eed51c1-8e9d-4071-94c5-b40de5d49ba5}', # 2.5a2
+    '2.5.103': '{73dcd966-ffec-415f-bb39-8342c1f47017}', # 2.5a3
+    '2.5.111': '{c797ecf8-a8e6-4fec-bb99-526b65f28626}', # 2.5b1
+    '2.5.112': '{32beb774-f625-439d-b587-7187487baf15}', # 2.5b2
+    '2.5.121': '{8e9321bc-6b24-48a3-8fd4-c95f8e531e5f}', # 2.5c1
+    '2.5.122': '{a6cd508d-9599-45da-a441-cbffa9f7e070}', # 2.5c2
+    '2.5.150': '{0a2c5854-557e-48c8-835a-3b9f074bdcaa}', # 2.5.0
+}
diff --git a/Tools/pybench/Arithmetic.py b/Tools/pybench/Arithmetic.py
new file mode 100644
index 0000000..e95c30a
--- /dev/null
+++ b/Tools/pybench/Arithmetic.py
@@ -0,0 +1,778 @@
+from pybench import Test
+
+class SimpleIntegerArithmetic(Test):
+
+    version = 0.3
+    operations = 5 * (3 + 5 + 5 + 3 + 3 + 3)
+    rounds = 120000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            a = 2
+            b = 3
+            c = 3
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2
+            b = 3
+            c = 3
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2
+            b = 3
+            c = 3
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2
+            b = 3
+            c = 3
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2
+            b = 3
+            c = 3
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
+class SimpleFloatArithmetic(Test):
+
+    version = 0.3
+    operations = 5 * (3 + 5 + 5 + 3 + 3 + 3)
+    rounds = 100000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            a = 2.1
+            b = 3.3332
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2.1
+            b = 3.3332
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2.1
+            b = 3.3332
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2.1
+            b = 3.3332
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2.1
+            b = 3.3332
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
+class SimpleIntFloatArithmetic(Test):
+
+    version = 0.3
+    operations = 5 * (3 + 5 + 5 + 3 + 3 + 3)
+    rounds = 120000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            a = 2
+            b = 3
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2
+            b = 3
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2
+            b = 3
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2
+            b = 3
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2
+            b = 3
+            c = 3.14159
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
+
+class SimpleLongArithmetic(Test):
+
+    version = 0.3
+    operations = 5 * (3 + 5 + 5 + 3 + 3 + 3)
+    rounds = 30000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            a = 2220001L
+            b = 100001L
+            c = 30005L
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2220001L
+            b = 100001L
+            c = 30005L
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2220001L
+            b = 100001L
+            c = 30005L
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2220001L
+            b = 100001L
+            c = 30005L
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2220001L
+            b = 100001L
+            c = 30005L
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
+class SimpleComplexArithmetic(Test):
+
+    version = 0.3
+    operations = 5 * (3 + 5 + 5 + 3 + 3 + 3)
+    rounds = 40000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            a = 2 + 3j
+            b = 2.5 + 4.5j
+            c = 1.2 + 6.2j
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2 + 3j
+            b = 2.5 + 4.5j
+            c = 1.2 + 6.2j
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2 + 3j
+            b = 2.5 + 4.5j
+            c = 1.2 + 6.2j
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2 + 3j
+            b = 2.5 + 4.5j
+            c = 1.2 + 6.2j
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            a = 2 + 3j
+            b = 2.5 + 4.5j
+            c = 1.2 + 6.2j
+
+            c = a + b
+            c = b + c
+            c = c + a
+            c = a + b
+            c = b + c
+
+            c = c - a
+            c = a - b
+            c = b - c
+            c = c - a
+            c = b - c
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+            c = a * b
+            c = b * a
+            c = c * b
+
+            c = a / b
+            c = b / a
+            c = c / b
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
diff --git a/Tools/pybench/Calls.py b/Tools/pybench/Calls.py
new file mode 100644
index 0000000..82e7a91
--- /dev/null
+++ b/Tools/pybench/Calls.py
@@ -0,0 +1,410 @@
+from pybench import Test
+
+class PythonFunctionCalls(Test):
+
+    version = 0.3
+    operations = 5*(1+4+4+2)
+    rounds = 60000
+
+    def test(self):
+
+        global f,f1,g,h
+
+        # define functions
+        def f():
+            pass
+
+        def f1(x):
+            pass
+
+        def g(a,b,c):
+            return a,b,c
+
+        def h(a,b,c,d=1,e=2,f=3):
+            return d,e,f
+
+        # do calls
+        for i in xrange(self.rounds):
+
+            f()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            h(i,i,3,i,i)
+            h(i,i,i,2,i,3)
+
+            f()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            h(i,i,3,i,i)
+            h(i,i,i,2,i,3)
+
+            f()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            h(i,i,3,i,i)
+            h(i,i,i,2,i,3)
+
+            f()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            h(i,i,3,i,i)
+            h(i,i,i,2,i,3)
+
+            f()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            g(i,i,i)
+            h(i,i,3,i,i)
+            h(i,i,i,2,i,3)
+
+    def calibrate(self):
+
+        global f,f1,g,h
+
+        # define functions
+        def f():
+            pass
+
+        def f1(x):
+            pass
+
+        def g(a,b,c):
+            return a,b,c
+
+        def h(a,b,c,d=1,e=2,f=3):
+            return d,e,f
+
+        # do calls
+        for i in xrange(self.rounds):
+            pass
+
+###
+
+class BuiltinFunctionCalls(Test):
+
+    version = 0.4
+    operations = 5*(2+5+5+5)
+    rounds = 30000
+
+    def test(self):
+
+        # localize functions
+        f0 = globals
+        f1 = hash
+        f2 = cmp
+        f3 = range
+
+        # do calls
+        for i in xrange(self.rounds):
+
+            f0()
+            f0()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+
+            f0()
+            f0()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+
+            f0()
+            f0()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+
+            f0()
+            f0()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+
+            f0()
+            f0()
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f1(i)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f2(1,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+            f3(1,3,2)
+
+    def calibrate(self):
+
+        # localize functions
+        f0 = dir
+        f1 = hash
+        f2 = range
+        f3 = range
+
+        # do calls
+        for i in xrange(self.rounds):
+            pass
+
+###
+
+class PythonMethodCalls(Test):
+
+    version = 0.3
+    operations = 5*(6 + 5 + 4)
+    rounds = 20000
+
+    def test(self):
+
+        class c:
+
+            x = 2
+            s = 'string'
+
+            def f(self):
+
+                return self.x
+
+            def j(self,a,b):
+
+                self.y = a
+                self.t = b
+                return self.y
+
+            def k(self,a,b,c=3):
+
+                self.y = a
+                self.s = b
+                self.t = c
+
+        o = c()
+
+        for i in xrange(self.rounds):
+
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.j(i,i)
+            o.j(i,i)
+            o.j(i,2)
+            o.j(i,2)
+            o.j(2,2)
+            o.k(i,i)
+            o.k(i,2)
+            o.k(i,2,3)
+            o.k(i,i,c=4)
+
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.j(i,i)
+            o.j(i,i)
+            o.j(i,2)
+            o.j(i,2)
+            o.j(2,2)
+            o.k(i,i)
+            o.k(i,2)
+            o.k(i,2,3)
+            o.k(i,i,c=4)
+
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.j(i,i)
+            o.j(i,i)
+            o.j(i,2)
+            o.j(i,2)
+            o.j(2,2)
+            o.k(i,i)
+            o.k(i,2)
+            o.k(i,2,3)
+            o.k(i,i,c=4)
+
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.j(i,i)
+            o.j(i,i)
+            o.j(i,2)
+            o.j(i,2)
+            o.j(2,2)
+            o.k(i,i)
+            o.k(i,2)
+            o.k(i,2,3)
+            o.k(i,i,c=4)
+
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.f()
+            o.j(i,i)
+            o.j(i,i)
+            o.j(i,2)
+            o.j(i,2)
+            o.j(2,2)
+            o.k(i,i)
+            o.k(i,2)
+            o.k(i,2,3)
+            o.k(i,i,c=4)
+
+    def calibrate(self):
+
+        class c:
+
+            x = 2
+            s = 'string'
+
+            def f(self):
+
+                return self.x
+
+            def j(self,a,b):
+
+                self.y = a
+                self.t = b
+
+            def k(self,a,b,c=3):
+
+                self.y = a
+                self.s = b
+                self.t = c
+
+        o = c
+
+        for i in xrange(self.rounds):
+            pass
+
+###
+
+class Recursion(Test):
+
+    version = 0.3
+    operations = 5
+    rounds = 50000
+
+    def test(self):
+
+        global f
+
+        def f(x):
+
+            if x > 1:
+                return f(x-1)
+            return 1
+
+        for i in xrange(self.rounds):
+            f(10)
+            f(10)
+            f(10)
+            f(10)
+            f(10)
+
+    def calibrate(self):
+
+        global f
+
+        def f(x):
+
+            if x > 0:
+                return f(x-1)
+            return 1
+
+        for i in xrange(self.rounds):
+            pass
+
diff --git a/Tools/pybench/CommandLine.py b/Tools/pybench/CommandLine.py
new file mode 100644
index 0000000..fb7e07b
--- /dev/null
+++ b/Tools/pybench/CommandLine.py
@@ -0,0 +1,634 @@
+""" CommandLine - Get and parse command line options
+
+    NOTE: This still is very much work in progress !!!
+
+    Different version are likely to be incompatible.
+
+    TODO:
+
+    * Incorporate the changes made by (see Inbox)
+    * Add number range option using srange() 
+
+"""
+
+__copyright__ = """\
+Copyright (c), 1997-2006, Marc-Andre Lemburg (mal@lemburg.com)
+Copyright (c), 2000-2006, eGenix.com Software GmbH (info@egenix.com)
+See the documentation for further information on copyrights,
+or contact the author. All Rights Reserved.
+"""
+
+__version__ = '1.2'
+
+import sys, getopt, string, glob, os, re, exceptions, traceback
+
+### Helpers
+
+def _getopt_flags(options):
+
+    """ Convert the option list to a getopt flag string and long opt
+        list
+
+    """
+    s = []
+    l = []
+    for o in options:
+        if o.prefix == '-':
+            # short option
+            s.append(o.name)
+            if o.takes_argument:
+                s.append(':')
+        else:
+            # long option
+            if o.takes_argument:
+                l.append(o.name+'=')
+            else:
+                l.append(o.name)
+    return string.join(s,''),l
+
+def invisible_input(prompt='>>> '):
+
+    """ Get raw input from a terminal without echoing the characters to
+        the terminal, e.g. for password queries.
+
+    """
+    import getpass
+    entry = getpass.getpass(prompt)
+    if entry is None:
+        raise KeyboardInterrupt
+    return entry
+
+def fileopen(name, mode='wb', encoding=None):
+
+    """ Open a file using mode.
+
+        Default mode is 'wb' meaning to open the file for writing in
+        binary mode. If encoding is given, I/O to and from the file is
+        transparently encoded using the given encoding.
+
+        Files opened for writing are chmod()ed to 0600.
+
+    """
+    if name == 'stdout':
+        return sys.stdout
+    elif name == 'stderr':
+        return sys.stderr
+    elif name == 'stdin':
+        return sys.stdin
+    else:
+        if encoding is not None:
+            import codecs
+            f = codecs.open(name, mode, encoding)
+        else:
+            f = open(name, mode)
+        if 'w' in mode:
+            os.chmod(name, 0600)
+        return f
+
+def option_dict(options):
+
+    """ Return a dictionary mapping option names to Option instances.
+    """
+    d = {}
+    for option in options:
+        d[option.name] = option
+    return d
+
+# Alias
+getpasswd = invisible_input
+
+_integerRE = re.compile('\s*(-?\d+)\s*$')
+_integerRangeRE = re.compile('\s*(-?\d+)\s*-\s*(-?\d+)\s*$')
+
+def srange(s,
+
+           split=string.split,integer=_integerRE,
+           integerRange=_integerRangeRE):
+
+    """ Converts a textual representation of integer numbers and ranges
+        to a Python list.
+
+        Supported formats: 2,3,4,2-10,-1 - -3, 5 - -2
+
+        Values are appended to the created list in the order specified
+        in the string.
+
+    """
+    l = []
+    append = l.append
+    for entry in split(s,','):
+        m = integer.match(entry)
+        if m:
+            append(int(m.groups()[0]))
+            continue
+        m = integerRange.match(entry)
+        if m:
+            start,end = map(int,m.groups())
+            l[len(l):] = range(start,end+1)
+    return l
+
+def abspath(path,
+
+            expandvars=os.path.expandvars,expanduser=os.path.expanduser,
+            join=os.path.join,getcwd=os.getcwd):
+
+    """ Return the corresponding absolute path for path.
+
+        path is expanded in the usual shell ways before
+        joining it with the current working directory.
+
+    """
+    try:
+        path = expandvars(path)
+    except AttributeError:
+        pass
+    try:
+        path = expanduser(path)
+    except AttributeError:
+        pass
+    return join(getcwd(), path)
+
+### Option classes
+
+class Option:
+
+    """ Option base class. Takes no argument.
+
+    """
+    default = None
+    helptext = ''
+    prefix = '-'
+    takes_argument = 0
+    has_default = 0
+    tab = 15
+
+    def __init__(self,name,help=None):
+
+        if not name[:1] == '-':
+            raise TypeError,'option names must start with "-"'
+        if name[1:2] == '-':
+            self.prefix = '--'
+            self.name = name[2:]
+        else:
+            self.name = name[1:]
+        if help:
+            self.help = help
+
+    def __str__(self):
+
+        o = self
+        name = o.prefix + o.name
+        if o.takes_argument:
+            name = name + ' arg'
+        if len(name) > self.tab:
+            name = name + '\n' + ' ' * (self.tab + 1 + len(o.prefix))
+        else:
+            name = '%-*s ' % (self.tab, name)
+        description = o.help
+        if o.has_default:
+            description = description + ' (%s)' % o.default
+        return '%s %s' % (name, description)
+
+class ArgumentOption(Option):
+
+    """ Option that takes an argument.
+
+        An optional default argument can be given.
+        
+    """
+    def __init__(self,name,help=None,default=None):
+
+        # Basemethod
+        Option.__init__(self,name,help)
+
+        if default is not None:
+            self.default = default
+            self.has_default = 1
+        self.takes_argument = 1
+
+class SwitchOption(Option):
+
+    """ Options that can be on or off. Has an optional default value.
+
+    """
+    def __init__(self,name,help=None,default=None):
+
+        # Basemethod
+        Option.__init__(self,name,help)
+
+        if default is not None:
+            self.default = default
+            self.has_default = 1
+
+### Application baseclass
+
+class Application:
+
+    """ Command line application interface with builtin argument
+        parsing.
+
+    """
+    # Options the program accepts (Option instances)
+    options = []
+
+    # Standard settings; these are appended to options in __init__
+    preset_options = [SwitchOption('-v',
+                                   'generate verbose output'),
+                      SwitchOption('-h',
+                                   'show this help text'),
+                      SwitchOption('--help',
+                                   'show this help text'),
+                      SwitchOption('--debug',
+                                   'enable debugging'),
+                      SwitchOption('--copyright',
+                                   'show copyright'),
+                      SwitchOption('--examples',
+                                   'show examples of usage')]
+
+    # The help layout looks like this:
+    # [header]   - defaults to ''
+    #
+    # [synopsis] - formatted as '<self.name> %s' % self.synopsis
+    #
+    # options:
+    # [options]  - formatted from self.options
+    #
+    # [version]  - formatted as 'Version:\n %s' % self.version, if given
+    #
+    # [about]    - defaults to ''
+    #
+    # Note: all fields that do not behave as template are formatted
+    #       using the instances dictionary as substitution namespace,
+    #       e.g. %(name)s will be replaced by the applications name.
+    #
+
+    # Header (default to program name)
+    header = ''
+
+    # Name (defaults to program name)
+    name = ''
+
+    # Synopsis (%(name)s is replaced by the program name)
+    synopsis = '%(name)s [option] files...'
+
+    # Version (optional)
+    version = ''
+
+    # General information printed after the possible options (optional)
+    about = ''
+
+    # Examples of usage to show when the --examples option is given (optional)
+    examples = ''
+
+    # Copyright to show
+    copyright = __copyright__
+
+    # Apply file globbing ?
+    globbing = 1
+
+    # Generate debug output ?
+    debug = 0
+
+    # Generate verbose output ?
+    verbose = 0
+
+    # Internal errors to catch
+    InternalError = exceptions.Exception
+
+    # Instance variables:
+    values = None       # Dictionary of passed options (or default values)
+                        # indexed by the options name, e.g. '-h'
+    files = None        # List of passed filenames
+    optionlist = None	# List of passed options
+
+    def __init__(self,argv=None):
+
+        # Setup application specs
+        if argv is None:
+            argv = sys.argv
+        self.filename = os.path.split(argv[0])[1]
+        if not self.name:
+            self.name = os.path.split(self.filename)[1]
+        else:
+            self.name = self.name
+        if not self.header:
+            self.header = self.name
+        else:
+            self.header = self.header
+
+        # Init .arguments list
+        self.arguments = argv[1:]
+        
+        # Setup Option mapping
+        self.option_map = option_dict(self.options)
+        
+        # Append preset options
+        for option in self.preset_options:
+            if not self.option_map.has_key(option.name):
+                self.add_option(option)
+                
+        # Init .files list
+        self.files = []
+
+        # Start Application
+        try:
+            # Process startup
+            rc = self.startup()
+            if rc is not None:
+                raise SystemExit,rc
+            
+            # Parse command line
+            rc = self.parse()
+            if rc is not None:
+                raise SystemExit,rc
+            
+            # Start application
+            rc = self.main()
+            if rc is None:
+                rc = 0
+
+        except SystemExit,rc:
+            pass
+
+        except KeyboardInterrupt:
+            print
+            print '* User Break'
+            print
+            rc = 1
+
+        except self.InternalError:
+            print
+            print '* Internal Error'
+            if self.debug:
+                print
+                traceback.print_exc(20, sys.stdout)
+            elif self.verbose:
+                print '  %s: %s' % sys.exc_info()[:2]
+            print
+            rc = 1
+
+        raise SystemExit,rc
+
+    def add_option(self, option):
+
+        """ Add a new Option instance to the Application dynamically.
+
+            Note that this has to be done *before* .parse() is being
+            executed.
+        
+        """
+        self.options.append(option)
+        self.option_map[option.name] = option
+
+    def startup(self):
+
+        """ Set user defined instance variables.
+
+            If this method returns anything other than None, the
+            process is terminated with the return value as exit code.
+
+        """
+        return None
+
+    def exit(self, rc=0):
+
+        """ Exit the program.
+
+            rc is used as exit code and passed back to the calling
+            program. It defaults to 0 which usually means: OK.
+
+        """
+        raise SystemExit, rc
+
+    def parse(self):
+
+        """ Parse the command line and fill in self.values and self.files.
+
+            After having parsed the options, the remaining command line
+            arguments are interpreted as files and passed to .handle_files()
+            for processing.
+
+            As final step the option handlers are called in the order
+            of the options given on the command line.
+
+        """
+        # Parse arguments
+        self.values = values = {}
+        for o in self.options:
+            if o.has_default:
+                values[o.prefix+o.name] = o.default
+            else:
+                values[o.prefix+o.name] = 0
+        flags,lflags = _getopt_flags(self.options)
+        try:
+            optlist,files = getopt.getopt(self.arguments,flags,lflags)
+            if self.globbing:
+                l = []
+                for f in files:
+                    gf = glob.glob(f)
+                    if not gf:
+                        l.append(f)
+                    else:
+                        l[len(l):] = gf
+                files = l
+            self.optionlist = optlist
+            self.files = files + self.files
+        except getopt.error,why:
+            self.help(why)
+            sys.exit(1)
+
+        # Call file handler
+        rc = self.handle_files(self.files)
+        if rc is not None:
+            sys.exit(rc)
+
+        # Call option handlers
+        for optionname, value in optlist:
+
+            # Try to convert value to integer
+            try:
+                value = string.atoi(value)
+            except ValueError:
+                pass
+
+            # Find handler and call it (or count the number of option
+            # instances on the command line)
+            handlername = 'handle' + string.replace(optionname, '-', '_')
+            try:
+                handler = getattr(self, handlername)
+            except AttributeError:
+                if value == '':
+                    # count the number of occurances
+                    if values.has_key(optionname):
+                        values[optionname] = values[optionname] + 1
+                    else:
+                        values[optionname] = 1
+                else:
+                    values[optionname] = value
+            else:
+                rc = handler(value)
+                if rc is not None:
+                    raise SystemExit, rc
+
+        # Apply final file check (for backward compatibility)
+        rc = self.check_files(self.files)
+        if rc is not None:
+            sys.exit(rc)
+
+    def check_files(self,filelist):
+
+        """ Apply some user defined checks on the files given in filelist.
+
+            This may modify filelist in place. A typical application
+            is checking that at least n files are given.
+            
+            If this method returns anything other than None, the
+            process is terminated with the return value as exit code.
+            
+        """
+        return None
+
+    def help(self,note=''):
+
+        self.print_header()
+        if self.synopsis:
+            print 'Synopsis:'
+            # To remain backward compatible:
+            try:
+                synopsis = self.synopsis % self.name
+            except (NameError, KeyError, TypeError):
+                synopsis = self.synopsis % self.__dict__
+            print ' ' + synopsis
+        print
+        self.print_options()
+        if self.version:
+            print 'Version:'
+            print ' %s' % self.version
+            print
+        if self.about:
+            print string.strip(self.about % self.__dict__)
+            print
+        if note:
+            print '-'*72
+            print 'Note:',note
+            print
+
+    def notice(self,note):
+
+        print '-'*72
+        print 'Note:',note
+        print '-'*72
+        print
+
+    def print_header(self):
+
+        print '-'*72
+        print self.header % self.__dict__
+        print '-'*72
+        print
+
+    def print_options(self):
+
+        options = self.options
+        print 'Options and default settings:'
+        if not options:
+            print '  None'
+            return
+        long = filter(lambda x: x.prefix == '--', options)
+        short = filter(lambda x: x.prefix == '-', options)
+        items = short + long
+        for o in options:
+            print ' ',o
+        print
+
+    #
+    # Example handlers:
+    #
+    # If a handler returns anything other than None, processing stops
+    # and the return value is passed to sys.exit() as argument.
+    #
+
+    # File handler
+    def handle_files(self,files):
+
+        """ This may process the files list in place.
+        """
+        return None
+        
+    # Short option handler
+    def handle_h(self,arg):
+
+        self.help()
+        return 0
+    
+    def handle_v(self, value):
+
+        """ Turn on verbose output.
+        """
+        self.verbose = 1
+        
+    # Handlers for long options have two underscores in their name
+    def handle__help(self,arg):
+
+        self.help()
+        return 0
+
+    def handle__debug(self,arg):
+
+        self.debug = 1
+        # We don't want to catch internal errors:
+        self.InternalError = None
+
+    def handle__copyright(self,arg):
+
+        self.print_header()
+        print string.strip(self.copyright % self.__dict__)
+        print
+        return 0
+
+    def handle__examples(self,arg):
+
+        self.print_header()
+        if self.examples:
+            print 'Examples:'
+            print
+            print string.strip(self.examples % self.__dict__)
+            print
+        else:
+            print 'No examples available.'
+            print
+        return 0
+
+    def main(self):
+
+        """ Override this method as program entry point.
+
+            The return value is passed to sys.exit() as argument.  If
+            it is None, 0 is assumed (meaning OK). Unhandled
+            exceptions are reported with exit status code 1 (see
+            __init__ for further details).
+            
+        """
+        return None
+
+# Alias
+CommandLine = Application
+
+def _test():
+
+    class MyApplication(Application):
+        header = 'Test Application'
+        version = __version__
+        options = [Option('-v','verbose')]
+        
+        def handle_v(self,arg):
+            print 'VERBOSE, Yeah !'
+
+    cmd = MyApplication()
+    if not cmd.values['-h']:
+        cmd.help()
+    print 'files:',cmd.files
+    print 'Bye...'
+
+if __name__ == '__main__':
+    _test()
diff --git a/Tools/pybench/Constructs.py b/Tools/pybench/Constructs.py
new file mode 100644
index 0000000..aba888f
--- /dev/null
+++ b/Tools/pybench/Constructs.py
@@ -0,0 +1,565 @@
+from pybench import Test
+
+class IfThenElse(Test):
+
+    version = 0.31
+    operations = 30*3 # hard to say...
+    rounds = 150000
+
+    def test(self):
+
+        a,b,c = 1,2,3
+        for i in xrange(self.rounds):
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+            if a == 1:
+                if b == 2:
+                    if c != 3:
+                        c = 3
+                        b = 3
+                    else:
+                        c = 2
+                elif b == 3:
+                    b = 2
+                    a = 2
+            elif a == 2:
+                a = 3
+            else:
+                a = 1
+
+    def calibrate(self):
+
+        a,b,c = 1,2,3
+        for i in xrange(self.rounds):
+            pass
+
+class NestedForLoops(Test):
+
+    version = 0.3
+    operations = 1000*10*5
+    rounds = 150
+
+    def test(self):
+
+        l1 = range(1000)
+        l2 = range(10)
+        l3 = range(5)
+        for i in xrange(self.rounds):
+            for i in l1:
+                for j in l2:
+                    for k in l3:
+                        pass
+
+    def calibrate(self):
+
+        l1 = range(1000)
+        l2 = range(10)
+        l3 = range(5)
+        for i in xrange(self.rounds):
+            pass
+
+class ForLoops(Test):
+
+    version = 0.1
+    operations = 5 * 5
+    rounds = 8000
+
+    def test(self):
+
+        l1 = range(100)
+        for i in xrange(self.rounds):
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+            for i in l1:
+                pass
+
+    def calibrate(self):
+
+        l1 = range(1000)
+        for i in xrange(self.rounds):
+            pass
+
diff --git a/Tools/pybench/Dict.py b/Tools/pybench/Dict.py
new file mode 100644
index 0000000..207d88f
--- /dev/null
+++ b/Tools/pybench/Dict.py
@@ -0,0 +1,503 @@
+from pybench import Test
+
+class DictCreation(Test):
+
+    version = 0.3
+    operations = 5*(5 + 5)
+    rounds = 60000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            d1 = {}
+            d2 = {}
+            d3 = {}
+            d4 = {}
+            d5 = {}
+
+            d1 = {1:2,3:4,5:6}
+            d2 = {2:3,4:5,6:7}
+            d3 = {3:4,5:6,7:8}
+            d4 = {4:5,6:7,8:9}
+            d5 = {6:7,8:9,10:11}
+
+            d1 = {}
+            d2 = {}
+            d3 = {}
+            d4 = {}
+            d5 = {}
+
+            d1 = {1:2,3:4,5:6}
+            d2 = {2:3,4:5,6:7}
+            d3 = {3:4,5:6,7:8}
+            d4 = {4:5,6:7,8:9}
+            d5 = {6:7,8:9,10:11}
+
+            d1 = {}
+            d2 = {}
+            d3 = {}
+            d4 = {}
+            d5 = {}
+
+            d1 = {1:2,3:4,5:6}
+            d2 = {2:3,4:5,6:7}
+            d3 = {3:4,5:6,7:8}
+            d4 = {4:5,6:7,8:9}
+            d5 = {6:7,8:9,10:11}
+
+            d1 = {}
+            d2 = {}
+            d3 = {}
+            d4 = {}
+            d5 = {}
+
+            d1 = {1:2,3:4,5:6}
+            d2 = {2:3,4:5,6:7}
+            d3 = {3:4,5:6,7:8}
+            d4 = {4:5,6:7,8:9}
+            d5 = {6:7,8:9,10:11}
+
+            d1 = {}
+            d2 = {}
+            d3 = {}
+            d4 = {}
+            d5 = {}
+
+            d1 = {1:2,3:4,5:6}
+            d2 = {2:3,4:5,6:7}
+            d3 = {3:4,5:6,7:8}
+            d4 = {4:5,6:7,8:9}
+            d5 = {6:7,8:9,10:11}
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
+class DictWithStringKeys(Test):
+
+    version = 0.1
+    operations = 5*(6 + 6)
+    rounds = 200000
+
+    def test(self):
+
+        d = {}
+
+        for i in xrange(self.rounds):
+
+            d['abc'] = 1
+            d['def'] = 2
+            d['ghi'] = 3
+            d['jkl'] = 4
+            d['mno'] = 5
+            d['pqr'] = 6
+              
+            d['abc']
+            d['def']
+            d['ghi']
+            d['jkl']
+            d['mno']
+            d['pqr']
+              
+            d['abc'] = 1
+            d['def'] = 2
+            d['ghi'] = 3
+            d['jkl'] = 4
+            d['mno'] = 5
+            d['pqr'] = 6
+              
+            d['abc']
+            d['def']
+            d['ghi']
+            d['jkl']
+            d['mno']
+            d['pqr']
+              
+            d['abc'] = 1
+            d['def'] = 2
+            d['ghi'] = 3
+            d['jkl'] = 4
+            d['mno'] = 5
+            d['pqr'] = 6
+              
+            d['abc']
+            d['def']
+            d['ghi']
+            d['jkl']
+            d['mno']
+            d['pqr']
+              
+            d['abc'] = 1
+            d['def'] = 2
+            d['ghi'] = 3
+            d['jkl'] = 4
+            d['mno'] = 5
+            d['pqr'] = 6
+              
+            d['abc']
+            d['def']
+            d['ghi']
+            d['jkl']
+            d['mno']
+            d['pqr']
+              
+            d['abc'] = 1
+            d['def'] = 2
+            d['ghi'] = 3
+            d['jkl'] = 4
+            d['mno'] = 5
+            d['pqr'] = 6
+              
+            d['abc']
+            d['def']
+            d['ghi']
+            d['jkl']
+            d['mno']
+            d['pqr']
+              
+    def calibrate(self):
+
+        d = {}
+
+        for i in xrange(self.rounds):
+            pass
+
+class DictWithFloatKeys(Test):
+
+    version = 0.1
+    operations = 5*(6 + 6)
+    rounds = 200000
+
+    def test(self):
+
+        d = {}
+
+        for i in xrange(self.rounds):
+
+            d[1.234] = 1
+            d[2.345] = 2
+            d[3.456] = 3
+            d[4.567] = 4
+            d[5.678] = 5
+            d[6.789] = 6
+              
+            d[1.234]
+            d[2.345]
+            d[3.456]
+            d[4.567]
+            d[5.678]
+            d[6.789]
+              
+            d[1.234] = 1
+            d[2.345] = 2
+            d[3.456] = 3
+            d[4.567] = 4
+            d[5.678] = 5
+            d[6.789] = 6
+              
+            d[1.234]
+            d[2.345]
+            d[3.456]
+            d[4.567]
+            d[5.678]
+            d[6.789]
+              
+            d[1.234] = 1
+            d[2.345] = 2
+            d[3.456] = 3
+            d[4.567] = 4
+            d[5.678] = 5
+            d[6.789] = 6
+              
+            d[1.234]
+            d[2.345]
+            d[3.456]
+            d[4.567]
+            d[5.678]
+            d[6.789]
+              
+            d[1.234] = 1
+            d[2.345] = 2
+            d[3.456] = 3
+            d[4.567] = 4
+            d[5.678] = 5
+            d[6.789] = 6
+              
+            d[1.234]
+            d[2.345]
+            d[3.456]
+            d[4.567]
+            d[5.678]
+            d[6.789]
+              
+            d[1.234] = 1
+            d[2.345] = 2
+            d[3.456] = 3
+            d[4.567] = 4
+            d[5.678] = 5
+            d[6.789] = 6
+              
+            d[1.234]
+            d[2.345]
+            d[3.456]
+            d[4.567]
+            d[5.678]
+            d[6.789]
+              
+    def calibrate(self):
+
+        d = {}
+
+        for i in xrange(self.rounds):
+            pass
+
+class DictWithIntegerKeys(Test):
+
+    version = 0.1
+    operations = 5*(6 + 6)
+    rounds = 200000
+
+    def test(self):
+
+        d = {}
+
+        for i in xrange(self.rounds):
+
+            d[1] = 1
+            d[2] = 2
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            d[6] = 6
+              
+            d[1]
+            d[2]
+            d[3]
+            d[4]
+            d[5]
+            d[6]
+              
+            d[1] = 1
+            d[2] = 2
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            d[6] = 6
+              
+            d[1]
+            d[2]
+            d[3]
+            d[4]
+            d[5]
+            d[6]
+              
+            d[1] = 1
+            d[2] = 2
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            d[6] = 6
+              
+            d[1]
+            d[2]
+            d[3]
+            d[4]
+            d[5]
+            d[6]
+              
+            d[1] = 1
+            d[2] = 2
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            d[6] = 6
+              
+            d[1]
+            d[2]
+            d[3]
+            d[4]
+            d[5]
+            d[6]
+              
+            d[1] = 1
+            d[2] = 2
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            d[6] = 6
+              
+            d[1]
+            d[2]
+            d[3]
+            d[4]
+            d[5]
+            d[6]
+              
+    def calibrate(self):
+
+        d = {}
+
+        for i in xrange(self.rounds):
+            pass
+
+class SimpleDictManipulation(Test):
+
+    version = 0.3
+    operations = 5*(6 + 6 + 6 + 6)
+    rounds = 50000
+
+    def test(self):
+
+        d = {}
+
+        for i in xrange(self.rounds):
+
+            d[0] = 3
+            d[1] = 4
+            d[2] = 5
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            
+            x = d[0]
+            x = d[1]
+            x = d[2]
+            x = d[3]
+            x = d[4]
+            x = d[5]
+
+            d.has_key(0)
+            d.has_key(2)
+            d.has_key(4)
+            d.has_key(6)
+            d.has_key(8)
+            d.has_key(10)
+
+            del d[0]
+            del d[1]
+            del d[2]
+            del d[3]
+            del d[4]
+            del d[5]
+
+            d[0] = 3
+            d[1] = 4
+            d[2] = 5
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            
+            x = d[0]
+            x = d[1]
+            x = d[2]
+            x = d[3]
+            x = d[4]
+            x = d[5]
+
+            d.has_key(0)
+            d.has_key(2)
+            d.has_key(4)
+            d.has_key(6)
+            d.has_key(8)
+            d.has_key(10)
+
+            del d[0]
+            del d[1]
+            del d[2]
+            del d[3]
+            del d[4]
+            del d[5]
+
+            d[0] = 3
+            d[1] = 4
+            d[2] = 5
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            
+            x = d[0]
+            x = d[1]
+            x = d[2]
+            x = d[3]
+            x = d[4]
+            x = d[5]
+
+            d.has_key(0)
+            d.has_key(2)
+            d.has_key(4)
+            d.has_key(6)
+            d.has_key(8)
+            d.has_key(10)
+
+            del d[0]
+            del d[1]
+            del d[2]
+            del d[3]
+            del d[4]
+            del d[5]
+
+            d[0] = 3
+            d[1] = 4
+            d[2] = 5
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            
+            x = d[0]
+            x = d[1]
+            x = d[2]
+            x = d[3]
+            x = d[4]
+            x = d[5]
+
+            d.has_key(0)
+            d.has_key(2)
+            d.has_key(4)
+            d.has_key(6)
+            d.has_key(8)
+            d.has_key(10)
+
+            del d[0]
+            del d[1]
+            del d[2]
+            del d[3]
+            del d[4]
+            del d[5]
+
+            d[0] = 3
+            d[1] = 4
+            d[2] = 5
+            d[3] = 3
+            d[4] = 4
+            d[5] = 5
+            
+            x = d[0]
+            x = d[1]
+            x = d[2]
+            x = d[3]
+            x = d[4]
+            x = d[5]
+
+            d.has_key(0)
+            d.has_key(2)
+            d.has_key(4)
+            d.has_key(6)
+            d.has_key(8)
+            d.has_key(10)
+
+            del d[0]
+            del d[1]
+            del d[2]
+            del d[3]
+            del d[4]
+            del d[5]
+
+    def calibrate(self):
+
+        d = {}
+
+        for i in xrange(self.rounds):
+            pass
+
diff --git a/Tools/pybench/Exceptions.py b/Tools/pybench/Exceptions.py
new file mode 100644
index 0000000..295c83a
--- /dev/null
+++ b/Tools/pybench/Exceptions.py
@@ -0,0 +1,681 @@
+from pybench import Test
+
+class TryRaiseExcept(Test):
+
+    version = 0.1
+    operations = 2 + 3
+    rounds = 60000
+
+    def test(self):
+
+        error = ValueError
+
+        for i in xrange(self.rounds):
+            try:
+                raise error
+            except:
+                pass
+            try:
+                raise error
+            except:
+                pass
+            try:
+                raise error,"something"
+            except:
+                pass
+            try:
+                raise error,"something"
+            except:
+                pass
+            try:
+                raise error,"something"
+            except:
+                pass
+
+    def calibrate(self):
+
+        error = ValueError
+
+        for i in xrange(self.rounds):
+            pass
+            
+
+class TryExcept(Test):
+
+    version = 0.1
+    operations = 15 * 10
+    rounds = 200000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+            try:
+                pass
+            except:
+                pass
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+            
+
diff --git a/Tools/pybench/Imports.py b/Tools/pybench/Imports.py
new file mode 100644
index 0000000..eb458b4
--- /dev/null
+++ b/Tools/pybench/Imports.py
@@ -0,0 +1,139 @@
+from pybench import Test
+
+# First imports:
+import os
+import package.submodule
+
+class SecondImport(Test):
+
+    version = 0.1
+    operations = 5 * 5
+    rounds = 20000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+            import os
+            import os
+            import os
+            import os
+            import os
+
+            import os
+            import os
+            import os
+            import os
+            import os
+
+            import os
+            import os
+            import os
+            import os
+            import os
+
+            import os
+            import os
+            import os
+            import os
+            import os
+
+            import os
+            import os
+            import os
+            import os
+            import os
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+            
+
+class SecondPackageImport(Test):
+
+    version = 0.1
+    operations = 5 * 5
+    rounds = 20000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+            import package
+            import package
+            import package
+            import package
+            import package
+
+            import package
+            import package
+            import package
+            import package
+            import package
+
+            import package
+            import package
+            import package
+            import package
+            import package
+
+            import package
+            import package
+            import package
+            import package
+            import package
+
+            import package
+            import package
+            import package
+            import package
+            import package
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+            
+class SecondSubmoduleImport(Test):
+
+    version = 0.1
+    operations = 5 * 5
+    rounds = 20000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+            import package.submodule
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+            
diff --git a/Tools/pybench/Instances.py b/Tools/pybench/Instances.py
new file mode 100644
index 0000000..7663e23
--- /dev/null
+++ b/Tools/pybench/Instances.py
@@ -0,0 +1,68 @@
+from pybench import Test
+
+class CreateInstances(Test):
+
+    version = 0.2
+    operations = 3 + 7 + 4
+    rounds = 60000
+
+    def test(self):
+
+        class c:
+            pass
+
+        class d:
+            def __init__(self,a,b,c):
+                self.a = a
+                self.b = b
+                self.c = c
+
+        class e:
+            def __init__(self,a,b,c=4):
+                self.a = a
+                self.b = b
+                self.c = c
+                self.d = a
+                self.e = b
+                self.f = c
+
+        for i in xrange(self.rounds):
+            o = c()
+            o1 = c()
+            o2 = c()
+            p = d(i,i,3)
+            p1 = d(i,i,3)
+            p2 = d(i,3,3)
+            p3 = d(3,i,3)
+            p4 = d(i,i,i)
+            p5 = d(3,i,3)
+            p6 = d(i,i,i)
+            q = e(i,i,3)
+            q1 = e(i,i,3)
+            q2 = e(i,i,3)
+            q3 = e(i,i)
+
+    def calibrate(self):
+
+        class c:
+            pass
+
+        class d:
+            def __init__(self,a,b,c):
+                self.a = a
+                self.b = b
+                self.c = c
+
+        class e:
+            def __init__(self,a,b,c=4):
+                self.a = a
+                self.b = b
+                self.c = c
+                self.d = a
+                self.e = b
+                self.f = c
+
+        for i in xrange(self.rounds):
+            pass
+
+
diff --git a/Tools/pybench/LICENSE b/Tools/pybench/LICENSE
new file mode 100644
index 0000000..17c6a6b
--- /dev/null
+++ b/Tools/pybench/LICENSE
@@ -0,0 +1,25 @@
+pybench License
+---------------
+
+This copyright notice and license applies to all files in the pybench
+directory of the pybench distribution.
+
+Copyright (c), 1997-2006, Marc-Andre Lemburg (mal@lemburg.com)
+Copyright (c), 2000-2006, eGenix.com Software GmbH (info@egenix.com)
+
+                   All Rights Reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee or royalty is hereby
+granted, provided that the above copyright notice appear in all copies
+and that both that copyright notice and this permission notice appear
+in supporting documentation or portions thereof, including
+modifications, that you make.
+
+THE AUTHOR MARC-ANDRE LEMBURG DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
+INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
+FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
+WITH THE USE OR PERFORMANCE OF THIS SOFTWARE !
diff --git a/Tools/pybench/Lists.py b/Tools/pybench/Lists.py
new file mode 100644
index 0000000..a06b44c
--- /dev/null
+++ b/Tools/pybench/Lists.py
@@ -0,0 +1,292 @@
+from pybench import Test
+
+class SimpleListManipulation(Test):
+
+    version = 0.3
+    operations = 5* (6 + 6 + 6)
+    rounds = 60000
+
+    def test(self):
+
+        l = []
+
+        for i in xrange(self.rounds):
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            x = l[0]
+            x = l[1]
+            x = l[2]
+            x = l[3]
+            x = l[4]
+            x = l[5]
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            x = l[0]
+            x = l[1]
+            x = l[2]
+            x = l[3]
+            x = l[4]
+            x = l[5]
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            x = l[0]
+            x = l[1]
+            x = l[2]
+            x = l[3]
+            x = l[4]
+            x = l[5]
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            x = l[0]
+            x = l[1]
+            x = l[2]
+            x = l[3]
+            x = l[4]
+            x = l[5]
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            x = l[0]
+            x = l[1]
+            x = l[2]
+            x = l[3]
+            x = l[4]
+            x = l[5]
+
+            if len(l) > 10000:
+                # cut down the size
+                del l[:]
+
+    def calibrate(self):
+
+        l = []
+
+        for i in xrange(self.rounds):
+            pass
+
+class ListSlicing(Test):
+
+    version = 0.4
+    operations = 25*(3+1+2+1)
+    rounds = 400
+
+    def test(self):
+
+        n = range(100)
+        r = range(25)
+
+        for i in xrange(self.rounds):
+
+            l = range(100)
+
+            for j in r:
+
+                m = l[50:]
+                m = l[:25]
+                m = l[50:55]
+                l[:3] = n
+                m = l[:-1]
+                m = l[1:]
+                l[-1:] = n
+
+    def calibrate(self):
+
+        n = range(100)
+        r = range(25)
+
+        for i in xrange(self.rounds):
+
+            l = range(100)
+
+            for j in r:
+                pass
+
+class SmallLists(Test):
+
+    version = 0.3
+    operations = 5*(1+ 6 + 6 + 3 + 1)
+    rounds = 60000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            l = []
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            l[:3] = [1,2,3]
+            m = l[:-1]
+            m = l[1:]
+            
+            l[-1:] = [4,5,6]
+
+            l = []
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            l[:3] = [1,2,3]
+            m = l[:-1]
+            m = l[1:]
+            
+            l[-1:] = [4,5,6]
+
+            l = []
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            l[:3] = [1,2,3]
+            m = l[:-1]
+            m = l[1:]
+            
+            l[-1:] = [4,5,6]
+
+            l = []
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            l[:3] = [1,2,3]
+            m = l[:-1]
+            m = l[1:]
+            
+            l[-1:] = [4,5,6]
+
+            l = []
+
+            l.append(2)
+            l.append(3)
+            l.append(4)
+            l.append(2)
+            l.append(3)
+            l.append(4)
+
+            l[0] = 3
+            l[1] = 4
+            l[2] = 5
+            l[3] = 3
+            l[4] = 4
+            l[5] = 5
+            
+            l[:3] = [1,2,3]
+            m = l[:-1]
+            m = l[1:]
+            
+            l[-1:] = [4,5,6]
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            l = []
+
diff --git a/Tools/pybench/Lookups.py b/Tools/pybench/Lookups.py
new file mode 100644
index 0000000..fbbc0ed
--- /dev/null
+++ b/Tools/pybench/Lookups.py
@@ -0,0 +1,946 @@
+from pybench import Test
+
+class SpecialClassAttribute(Test):
+
+    version = 0.3
+    operations = 5*(12 + 12)
+    rounds = 100000
+
+    def test(self):
+
+        class c:
+            pass
+
+        for i in xrange(self.rounds):
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            c.__a = 2
+            c.__b = 3
+            c.__c = 4
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+            x = c.__a
+            x = c.__b
+            x = c.__c
+
+    def calibrate(self):
+
+        class c:
+            pass
+
+        for i in xrange(self.rounds):
+            pass
+
+class NormalClassAttribute(Test):
+
+    version = 0.3
+    operations = 5*(12 + 12)
+    rounds = 100000
+
+    def test(self):
+
+        class c:
+            pass
+
+        for i in xrange(self.rounds):
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+            c.a = 2
+            c.b = 3
+            c.c = 4
+
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+            x = c.a
+            x = c.b
+            x = c.c
+
+    def calibrate(self):
+
+        class c:
+            pass
+
+        for i in xrange(self.rounds):
+            pass
+
+class SpecialInstanceAttribute(Test):
+
+    version = 0.3
+    operations = 5*(12 + 12)
+    rounds = 100000
+
+    def test(self):
+
+        class c:
+            pass
+        o = c()
+
+        for i in xrange(self.rounds):
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+            o.__a__ = 2
+            o.__b__ = 3
+            o.__c__ = 4
+
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+            x = o.__a__
+            x = o.__b__
+            x = o.__c__
+
+    def calibrate(self):
+
+        class c:
+            pass
+        o = c()
+
+        for i in xrange(self.rounds):
+            pass
+
+class NormalInstanceAttribute(Test):
+
+    version = 0.3
+    operations = 5*(12 + 12)
+    rounds = 100000
+
+    def test(self):
+
+        class c:
+            pass
+        o = c()
+
+        for i in xrange(self.rounds):
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+            o.a = 2
+            o.b = 3
+            o.c = 4
+
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+            x = o.a
+            x = o.b
+            x = o.c
+
+    def calibrate(self):
+
+        class c:
+            pass
+        o = c()
+
+        for i in xrange(self.rounds):
+            pass
+
+class BuiltinMethodLookup(Test):
+
+    version = 0.3
+    operations = 5*(3*5 + 3*5)
+    rounds = 70000
+
+    def test(self):
+
+        l = []
+        d = {}
+
+        for i in xrange(self.rounds):
+
+            l.append
+            l.append
+            l.append
+            l.append
+            l.append
+
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+
+            d.items
+            d.items
+            d.items
+            d.items
+            d.items
+
+            d.get
+            d.get
+            d.get
+            d.get
+            d.get
+
+            l.append
+            l.append
+            l.append
+            l.append
+            l.append
+
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+
+            d.items
+            d.items
+            d.items
+            d.items
+            d.items
+
+            d.get
+            d.get
+            d.get
+            d.get
+            d.get
+
+            l.append
+            l.append
+            l.append
+            l.append
+            l.append
+
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+
+            d.items
+            d.items
+            d.items
+            d.items
+            d.items
+
+            d.get
+            d.get
+            d.get
+            d.get
+            d.get
+
+            l.append
+            l.append
+            l.append
+            l.append
+            l.append
+
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+
+            d.items
+            d.items
+            d.items
+            d.items
+            d.items
+
+            d.get
+            d.get
+            d.get
+            d.get
+            d.get
+
+            l.append
+            l.append
+            l.append
+            l.append
+            l.append
+
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+            l.insert
+
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+            l.sort
+
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+            d.has_key
+
+            d.items
+            d.items
+            d.items
+            d.items
+            d.items
+
+            d.get
+            d.get
+            d.get
+            d.get
+            d.get
+
+    def calibrate(self):
+
+        l = []
+        d = {}
+
+        for i in xrange(self.rounds):
+            pass
+
diff --git a/Tools/pybench/Numbers.py b/Tools/pybench/Numbers.py
new file mode 100644
index 0000000..75cf2ed
--- /dev/null
+++ b/Tools/pybench/Numbers.py
@@ -0,0 +1,784 @@
+from pybench import Test
+
+class CompareIntegers(Test):
+
+    version = 0.1
+    operations = 30 * 5
+    rounds = 120000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+            
+            2 < 3
+            2 > 3
+            2 == 3
+            2 > 3
+            2 < 3
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
+
+class CompareFloats(Test):
+
+    version = 0.1
+    operations = 30 * 5
+    rounds = 60000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+            
+            2.1 < 3.31
+            2.1 > 3.31
+            2.1 == 3.31
+            2.1 > 3.31
+            2.1 < 3.31
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
+
+class CompareFloatsIntegers(Test):
+
+    version = 0.1
+    operations = 30 * 5
+    rounds = 60000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+            
+            2.1 < 4
+            2.1 > 4
+            2.1 == 4
+            2.1 > 4
+            2.1 < 4
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
+
+class CompareLongs(Test):
+
+    version = 0.1
+    operations = 30 * 5
+    rounds = 60000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+            
+            1234567890L < 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L == 3456789012345L
+            1234567890L > 3456789012345L
+            1234567890L < 3456789012345L
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
diff --git a/Tools/pybench/README b/Tools/pybench/README
new file mode 100644
index 0000000..634e41b
--- /dev/null
+++ b/Tools/pybench/README
@@ -0,0 +1,372 @@
+________________________________________________________________________
+
+PYBENCH - A Python Benchmark Suite
+________________________________________________________________________
+
+     Extendable suite of of low-level benchmarks for measuring
+          the performance of the Python implementation 
+                 (interpreter, compiler or VM).
+
+pybench is a collection of tests that provides a standardized way to
+measure the performance of Python implementations. It takes a very
+close look at different aspects of Python programs and let's you
+decide which factors are more important to you than others, rather
+than wrapping everything up in one number, like the other performance
+tests do (e.g. pystone which is included in the Python Standard
+Library).
+
+pybench has been used in the past by several Python developers to
+track down performance bottlenecks or to demonstrate the impact of
+optimizations and new features in Python.
+
+The command line interface for pybench is the file pybench.py. Run
+this script with option '--help' to get a listing of the possible
+options. Without options, pybench will simply execute the benchmark
+and then print out a report to stdout.
+
+
+Micro-Manual
+------------
+
+Run 'pybench.py -h' to see the help screen.
+Run 'pybench.py' to just let the benchmark suite do it's thing and
+'pybench.py -f <file>' to have it store the results in a file too.
+
+This is the current output of pybench.py --help:
+
+Synopsis:
+ pybench.py [option] files...
+
+Options and default settings:
+  -n arg           number of rounds (10)
+  -f arg           save benchmark to file arg ()
+  -c arg           compare benchmark with the one in file arg ()
+  -s arg           show benchmark in file arg, then exit ()
+  -S               show statistics of benchmarks (0)
+  -w arg           set warp factor to arg (20)
+  -d               hide noise in compares (0)
+  --no-gc          disable garbage collection (0)
+  -v               generate verbose output
+  -h               show this help text
+  --help           show this help text
+  --debug          enable debugging
+  --copyright      show copyright
+  --examples       show examples of usage
+
+Version:
+ 1.3
+
+The normal operation is to run the suite and display the
+results. Use -f to save them for later reuse or comparisms.
+
+Examples:
+
+python1.5 pybench.py -w 100 -f p15
+python1.4 pybench.py -w 100 -f p14
+python pybench.py -s p15 -c p14
+
+
+License
+-------
+
+See LICENSE file.
+
+
+Sample output
+-------------
+
+PYBENCH 1.3
+
+Machine Details:
+   Platform ID:  Linux-2.6.8-24.19-default-x86_64-with-SuSE-9.2-x86-64
+   Executable:   /home/lemburg/projects/Python/Installation/bin/python
+   Python:       2.5a1.0
+   Compiler:     GCC 3.3.4 (pre 3.3.5 20040809)
+   Build:        Apr  9 2006 01:50:57 (#trunk)
+
+Searching for tests...
+   BuiltinFunctionCalls
+   BuiltinMethodLookup
+   CompareFloats
+   CompareFloatsIntegers
+   CompareIntegers
+   CompareInternedStrings
+   CompareLongs
+   CompareStrings
+   CompareUnicode
+   ConcatStrings
+   ConcatUnicode
+   CreateInstances
+   CreateStringsWithConcat
+   CreateUnicodeWithConcat
+   DictCreation
+   DictWithFloatKeys
+   DictWithIntegerKeys
+   DictWithStringKeys
+   ForLoops
+   IfThenElse
+   ListSlicing
+   NestedForLoops
+   NormalClassAttribute
+   NormalInstanceAttribute
+   PythonFunctionCalls
+   PythonMethodCalls
+   Recursion
+   SecondImport
+   SecondPackageImport
+   SecondSubmoduleImport
+   SimpleComplexArithmetic
+   SimpleDictManipulation
+   SimpleFloatArithmetic
+   SimpleIntFloatArithmetic
+   SimpleIntegerArithmetic
+   SimpleListManipulation
+   SimpleLongArithmetic
+   SmallLists
+   SmallTuples
+   SpecialClassAttribute
+   SpecialInstanceAttribute
+   StringMappings
+   StringPredicates
+   StringSlicing
+   TryExcept
+   TryRaiseExcept
+   TupleSlicing
+   UnicodeMappings
+   UnicodePredicates
+   UnicodeProperties
+   UnicodeSlicing
+
+Running 10 round(s) of the suite:
+
+...
+
+ Round 10                         real   abs    overhead
+          BuiltinFunctionCalls:   0.030r 0.030a 0.000o
+           BuiltinMethodLookup:   0.059r 0.060a 0.001o
+                 CompareFloats:   0.050r 0.050a 0.000o
+         CompareFloatsIntegers:   0.050r 0.050a 0.000o
+               CompareIntegers:   0.070r 0.070a 0.000o
+        CompareInternedStrings:   0.039r 0.040a 0.001o
+                  CompareLongs:   0.050r 0.050a 0.000o
+                CompareStrings:   0.060r 0.060a 0.000o
+                CompareUnicode:   0.060r 0.060a 0.000o
+                 ConcatStrings:   0.040r 0.040a 0.000o
+                 ConcatUnicode:   0.050r 0.050a 0.000o
+               CreateInstances:   0.050r 0.050a 0.000o
+       CreateStringsWithConcat:   0.029r 0.030a 0.001o
+       CreateUnicodeWithConcat:   0.060r 0.060a 0.000o
+                  DictCreation:   0.040r 0.040a 0.000o
+             DictWithFloatKeys:   0.089r 0.090a 0.000o
+           DictWithIntegerKeys:   0.059r 0.060a 0.001o
+            DictWithStringKeys:   0.070r 0.070a 0.001o
+                      ForLoops:   0.050r 0.050a 0.000o
+                    IfThenElse:   0.070r 0.070a 0.000o
+                   ListSlicing:   0.030r 0.030a 0.000o
+                NestedForLoops:   0.030r 0.030a 0.000o
+          NormalClassAttribute:   0.060r 0.060a 0.000o
+       NormalInstanceAttribute:   0.060r 0.060a 0.000o
+           PythonFunctionCalls:   0.060r 0.060a 0.000o
+             PythonMethodCalls:   0.050r 0.050a 0.000o
+                     Recursion:   0.050r 0.050a 0.000o
+                  SecondImport:   0.030r 0.030a 0.000o
+           SecondPackageImport:   0.030r 0.030a 0.000o
+         SecondSubmoduleImport:   0.040r 0.040a 0.000o
+       SimpleComplexArithmetic:   0.030r 0.030a 0.000o
+        SimpleDictManipulation:   0.040r 0.040a 0.000o
+         SimpleFloatArithmetic:   0.050r 0.050a 0.001o
+      SimpleIntFloatArithmetic:   0.060r 0.060a 0.000o
+       SimpleIntegerArithmetic:   0.060r 0.060a 0.000o
+        SimpleListManipulation:   0.030r 0.030a 0.000o
+          SimpleLongArithmetic:   0.030r 0.030a 0.000o
+                    SmallLists:   0.050r 0.050a 0.000o
+                   SmallTuples:   0.050r 0.050a 0.000o
+         SpecialClassAttribute:   0.060r 0.060a 0.000o
+      SpecialInstanceAttribute:   0.079r 0.080a 0.001o
+                StringMappings:   0.060r 0.060a 0.000o
+              StringPredicates:   0.049r 0.050a 0.001o
+                 StringSlicing:   0.039r 0.040a 0.000o
+                     TryExcept:   0.079r 0.080a 0.001o
+                TryRaiseExcept:   0.059r 0.060a 0.001o
+                  TupleSlicing:   0.050r 0.050a 0.000o
+               UnicodeMappings:   0.070r 0.070a 0.001o
+             UnicodePredicates:   0.059r 0.060a 0.001o
+             UnicodeProperties:   0.059r 0.060a 0.001o
+                UnicodeSlicing:   0.050r 0.050a 0.000o
+                                 ----------------------
+            Average round time:      2.937 seconds
+
+
+Tests:                              per run    per oper.   overhead
+------------------------------------------------------------------------
+          BuiltinFunctionCalls:      29.85 ms    0.23 us    0.00 ms
+           BuiltinMethodLookup:      66.85 ms    0.13 us    0.50 ms
+                 CompareFloats:      43.00 ms    0.10 us    0.00 ms
+         CompareFloatsIntegers:      51.80 ms    0.12 us    0.00 ms
+               CompareIntegers:      70.70 ms    0.08 us    0.50 ms
+        CompareInternedStrings:      41.40 ms    0.08 us    0.50 ms
+                  CompareLongs:      47.90 ms    0.11 us    0.00 ms
+                CompareStrings:      58.50 ms    0.12 us    0.50 ms
+                CompareUnicode:      56.55 ms    0.15 us    0.50 ms
+                 ConcatStrings:      44.75 ms    0.30 us    0.00 ms
+                 ConcatUnicode:      54.55 ms    0.36 us    0.50 ms
+               CreateInstances:      50.95 ms    1.21 us    0.00 ms
+       CreateStringsWithConcat:      28.85 ms    0.14 us    0.50 ms
+       CreateUnicodeWithConcat:      53.75 ms    0.27 us    0.00 ms
+                  DictCreation:      41.90 ms    0.28 us    0.00 ms
+             DictWithFloatKeys:      88.50 ms    0.15 us    0.50 ms
+           DictWithIntegerKeys:      62.55 ms    0.10 us    0.50 ms
+            DictWithStringKeys:      60.50 ms    0.10 us    0.50 ms
+                      ForLoops:      46.90 ms    4.69 us    0.00 ms
+                    IfThenElse:      60.55 ms    0.09 us    0.00 ms
+                   ListSlicing:      29.90 ms    8.54 us    0.00 ms
+                NestedForLoops:      33.95 ms    0.10 us    0.00 ms
+          NormalClassAttribute:      62.75 ms    0.10 us    0.50 ms
+       NormalInstanceAttribute:      61.80 ms    0.10 us    0.50 ms
+           PythonFunctionCalls:      60.00 ms    0.36 us    0.00 ms
+             PythonMethodCalls:      50.00 ms    0.67 us    0.00 ms
+                     Recursion:      46.85 ms    3.75 us    0.00 ms
+                  SecondImport:      35.00 ms    1.40 us    0.00 ms
+           SecondPackageImport:      32.00 ms    1.28 us    0.00 ms
+         SecondSubmoduleImport:      38.00 ms    1.52 us    0.00 ms
+       SimpleComplexArithmetic:      26.85 ms    0.12 us    0.00 ms
+        SimpleDictManipulation:      40.85 ms    0.14 us    0.00 ms
+         SimpleFloatArithmetic:      48.70 ms    0.09 us    0.50 ms
+      SimpleIntFloatArithmetic:      57.70 ms    0.09 us    0.00 ms
+       SimpleIntegerArithmetic:      58.75 ms    0.09 us    0.50 ms
+        SimpleListManipulation:      34.80 ms    0.13 us    0.00 ms
+          SimpleLongArithmetic:      30.95 ms    0.19 us    0.50 ms
+                    SmallLists:      47.60 ms    0.19 us    0.00 ms
+                   SmallTuples:      48.80 ms    0.20 us    0.50 ms
+         SpecialClassAttribute:      61.70 ms    0.10 us    0.00 ms
+      SpecialInstanceAttribute:      76.70 ms    0.13 us    0.50 ms
+                StringMappings:      58.70 ms    0.47 us    0.00 ms
+              StringPredicates:      50.00 ms    0.18 us    1.00 ms
+                 StringSlicing:      39.65 ms    0.23 us    0.50 ms
+                     TryExcept:      84.45 ms    0.06 us    0.50 ms
+                TryRaiseExcept:      61.75 ms    4.12 us    0.50 ms
+                  TupleSlicing:      48.95 ms    0.47 us    0.00 ms
+               UnicodeMappings:      71.50 ms    3.97 us    0.50 ms
+             UnicodePredicates:      52.75 ms    0.23 us    1.00 ms
+             UnicodeProperties:      61.90 ms    0.31 us    1.00 ms
+                UnicodeSlicing:      53.75 ms    0.31 us    0.50 ms
+------------------------------------------------------------------------
+            Average round time:    2937.00 ms
+
+________________________________________________________________________
+
+Writing New Tests
+________________________________________________________________________
+
+pybench tests are simple modules defining one or more pybench.Test
+subclasses.
+
+Writing a test essentially boils down to providing two methods:
+.test() which runs .rounds number of .operations test operations each
+and .calibrate() which does the same except that it doesn't actually
+execute the operations.
+
+
+Here's an example:
+------------------
+
+from pybench import Test
+
+class IntegerCounting(Test):
+
+    # Version number of the test as float (x.yy); this is important
+    # for comparisons of benchmark runs - tests with unequal version
+    # number will not get compared.
+    version = 1.0
+    
+    # The number of abstract operations done in each round of the
+    # test. An operation is the basic unit of what you want to
+    # measure. The benchmark will output the amount of run-time per
+    # operation. Note that in order to raise the measured timings
+    # significantly above noise level, it is often required to repeat
+    # sets of operations more than once per test round. The measured
+    # overhead per test round should be less than 1 second.
+    operations = 20
+
+    # Number of rounds to execute per test run. This should be
+    # adjusted to a figure that results in a test run-time of between
+    # 20-50 seconds.
+    rounds = 100000
+
+    def test(self):
+
+	""" Run the test.
+
+	    The test needs to run self.rounds executing
+	    self.operations number of operations each.
+
+        """
+        # Init the test
+        a = 1
+
+        # Run test rounds
+	#
+        # NOTE: Use xrange() for all test loops unless you want to face
+	# a 20MB process !
+	#
+        for i in xrange(self.rounds):
+
+            # Repeat the operations per round to raise the run-time
+            # per operation significantly above the noise level of the
+            # for-loop overhead. 
+
+	    # Execute 20 operations (a += 1):
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+            a += 1
+
+    def calibrate(self):
+
+	""" Calibrate the test.
+
+	    This method should execute everything that is needed to
+	    setup and run the test - except for the actual operations
+	    that you intend to measure. pybench uses this method to
+            measure the test implementation overhead.
+
+        """
+        # Init the test
+        a = 1
+
+        # Run test rounds (without actually doing any operation)
+        for i in xrange(self.rounds):
+
+	    # Skip the actual execution of the operations, since we
+	    # only want to measure the test's administration overhead.
+            pass
+
+Registering a new test module
+-----------------------------
+
+To register a test module with pybench, the classes need to be
+imported into the pybench.Setup module. pybench will then scan all the
+symbols defined in that module for subclasses of pybench.Test and
+automatically add them to the benchmark suite.
+
+
+Have fun,
+--
+Marc-Andre Lemburg
+mal@lemburg.com
diff --git a/Tools/pybench/Setup.py b/Tools/pybench/Setup.py
new file mode 100644
index 0000000..906a2a9
--- /dev/null
+++ b/Tools/pybench/Setup.py
@@ -0,0 +1,35 @@
+#!python
+
+# Setup file for pybench
+#
+# This file has to import all tests to be run; it is executed as
+# Python source file, so you can do all kinds of manipulations here
+# rather than having to edit the tests themselves.
+#
+# Note: Please keep this module compatible to Python 1.5.2.
+#
+# Tests may include features in later Python versions, but these
+# should then be embedded in try-except clauses in this configuration
+# module.
+
+# Defaults
+Number_of_rounds = 10
+Warp_factor = 20
+
+# Import tests
+from Arithmetic import *
+from Calls import *
+from Constructs import *
+from Lookups import *
+from Instances import *
+from Lists import *
+from Tuples import *
+from Dict import *
+from Exceptions import *
+from Imports import *
+from Strings import *
+from Numbers import *
+try:
+    from Unicode import *
+except (ImportError, SyntaxError):
+    pass
diff --git a/Tools/pybench/Strings.py b/Tools/pybench/Strings.py
new file mode 100644
index 0000000..5ab458e
--- /dev/null
+++ b/Tools/pybench/Strings.py
@@ -0,0 +1,564 @@
+from pybench import Test
+from string import join
+
+class ConcatStrings(Test):
+
+    version = 0.1
+    operations = 10 * 5
+    rounds = 60000
+
+    def test(self):
+
+        # Make sure the strings are *not* interned
+        s = join(map(str,range(100)))
+        t = join(map(str,range(1,101)))
+
+        for i in xrange(self.rounds):
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+    def calibrate(self):
+
+        s = join(map(str,range(100)))
+        t = join(map(str,range(1,101)))
+
+        for i in xrange(self.rounds):
+            pass
+            
+
+class CompareStrings(Test):
+
+    version = 0.2
+    operations = 10 * 5
+    rounds = 200000
+
+    def test(self):
+
+        # Make sure the strings are *not* interned
+        s = join(map(str,range(10)))
+        t = join(map(str,range(10))) + "abc"
+
+        for i in xrange(self.rounds):
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+    def calibrate(self):
+
+        s = join(map(str,range(10)))
+        t = join(map(str,range(10))) + "abc"
+
+        for i in xrange(self.rounds):
+            pass
+            
+
+class CompareInternedStrings(Test):
+
+    version = 0.1
+    operations = 10 * 5
+    rounds = 200000
+
+    def test(self):
+
+        # Make sure the strings *are* interned
+        s = intern(join(map(str,range(10))))
+        t = s
+
+        for i in xrange(self.rounds):
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+            t == s
+            t == s
+            t >= s
+            t > s
+            t < s
+
+    def calibrate(self):
+
+        s = intern(join(map(str,range(10))))
+        t = s
+
+        for i in xrange(self.rounds):
+            pass
+            
+
+class CreateStringsWithConcat(Test):
+
+    version = 0.1
+    operations = 10 * 5
+    rounds = 80000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+            s = 'om'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+            s = s + 'xax'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+            s = s + 'xax'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+            s = s + 'xax'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+            s = s + 'xax'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+            s = s + 'xax'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+            s = s + 'xax'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+            s = s + 'xax'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+            s = s + 'xax'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+            s = s + 'xax'
+            s = s + 'xbx'
+            s = s + 'xcx'
+            s = s + 'xdx'
+            s = s + 'xex'
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+            
+
+class StringSlicing(Test):
+
+    version = 0.1
+    operations = 5 * 7
+    rounds = 100000
+
+    def test(self):
+
+        s = join(map(str,range(100)))
+
+        for i in xrange(self.rounds):
+
+                s[50:]
+                s[:25]
+                s[50:55]
+                s[-1:]
+                s[:1]
+                s[2:]
+                s[11:-11]
+
+                s[50:]
+                s[:25]
+                s[50:55]
+                s[-1:]
+                s[:1]
+                s[2:]
+                s[11:-11]
+
+                s[50:]
+                s[:25]
+                s[50:55]
+                s[-1:]
+                s[:1]
+                s[2:]
+                s[11:-11]
+
+                s[50:]
+                s[:25]
+                s[50:55]
+                s[-1:]
+                s[:1]
+                s[2:]
+                s[11:-11]
+
+                s[50:]
+                s[:25]
+                s[50:55]
+                s[-1:]
+                s[:1]
+                s[2:]
+                s[11:-11]
+
+    def calibrate(self):
+
+        s = join(map(str,range(100)))
+
+        for i in xrange(self.rounds):
+            pass
+
+### String methods
+
+if hasattr('', 'lower'):
+
+    class StringMappings(Test):
+
+        version = 0.1
+        operations = 3 * (5 + 4 + 2 + 1)
+        rounds = 70000
+
+        def test(self):
+
+            s = join(map(chr,range(20)),'')
+            t = join(map(chr,range(50)),'')
+            u = join(map(chr,range(100)),'')
+            v = join(map(chr,range(256)),'')
+
+            for i in xrange(self.rounds):
+
+                s.lower()
+                s.lower()
+                s.lower()
+                s.lower()
+                s.lower()
+
+                s.upper()
+                s.upper()
+                s.upper()
+                s.upper()
+                s.upper()
+
+                s.title()
+                s.title()
+                s.title()
+                s.title()
+                s.title()
+
+                t.lower()
+                t.lower()
+                t.lower()
+                t.lower()
+
+                t.upper()
+                t.upper()
+                t.upper()
+                t.upper()
+
+                t.title()
+                t.title()
+                t.title()
+                t.title()
+
+                u.lower()
+                u.lower()
+
+                u.upper()
+                u.upper()
+
+                u.title()
+                u.title()
+
+                v.lower()
+
+                v.upper()
+
+                v.title()
+
+        def calibrate(self):
+
+            s = join(map(chr,range(20)),'')
+            t = join(map(chr,range(50)),'')
+            u = join(map(chr,range(100)),'')
+            v = join(map(chr,range(256)),'')
+
+            for i in xrange(self.rounds):
+                pass
+
+    class StringPredicates(Test):
+
+        version = 0.1
+        operations = 10 * 7
+        rounds = 80000
+
+        def test(self):
+
+            data = ('abc', '123', '   ', '\xe4\xf6\xfc', '\xdf'*10)
+            len_data = len(data)
+
+            for i in xrange(self.rounds):
+                s = data[i % len_data]
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+                s.isalnum()
+                s.isalpha()
+                s.isdigit()
+                s.islower()
+                s.isspace()
+                s.istitle()
+                s.isupper()
+
+        def calibrate(self):
+
+            data = ('abc', '123', '   ', '\u1234\u2345\u3456', '\uFFFF'*10)
+            data = ('abc', '123', '   ', '\xe4\xf6\xfc', '\xdf'*10)
+            len_data = len(data)
+
+            for i in xrange(self.rounds):
+                s = data[i % len_data]
+
+
diff --git a/Tools/pybench/Tuples.py b/Tools/pybench/Tuples.py
new file mode 100644
index 0000000..7854def
--- /dev/null
+++ b/Tools/pybench/Tuples.py
@@ -0,0 +1,365 @@
+from pybench import Test
+
+class TupleSlicing(Test):
+
+    version = 0.31
+    operations = 3 * 25 * 10 * 7
+    rounds = 400
+
+    def test(self):
+
+        r = range(25)
+
+        for i in xrange(self.rounds):
+
+            t = tuple(range(100))
+
+            for j in r:
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+                m = t[50:]
+                m = t[:25]
+                m = t[50:55]
+                m = t[:-1]
+                m = t[1:]
+                m = t[-10:]
+                m = t[:10]
+
+    def calibrate(self):
+
+        r = range(25)
+
+        for i in xrange(self.rounds):
+
+            t = tuple(range(100))
+
+            for j in r:
+                
+                pass
+
+class SmallTuples(Test):
+
+    version = 0.3
+    operations = 5*(1 + 3 + 6 + 2)
+    rounds = 80000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+
+            t = (1,2,3,4,5,6)
+
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+
+            l = list(t)
+            t = tuple(l)
+
+            t = (1,2,3,4,5,6)
+
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+
+            l = list(t)
+            t = tuple(l)
+
+            t = (1,2,3,4,5,6)
+
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+
+            l = list(t)
+            t = tuple(l)
+
+            t = (1,2,3,4,5,6)
+
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+
+            l = list(t)
+            t = tuple(l)
+
+            t = (1,2,3,4,5,6)
+
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+            a,b,c,d,e,f = t
+
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+            a,b,c = t[:3]
+
+            l = list(t)
+            t = tuple(l)
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+
diff --git a/Tools/pybench/Unicode.py b/Tools/pybench/Unicode.py
new file mode 100644
index 0000000..855fcf2
--- /dev/null
+++ b/Tools/pybench/Unicode.py
@@ -0,0 +1,542 @@
+try:
+    unicode
+except NameError:
+    raise ImportError
+
+from pybench import Test
+from string import join
+
+class ConcatUnicode(Test):
+
+    version = 0.1
+    operations = 10 * 5
+    rounds = 60000
+
+    def test(self):
+
+        # Make sure the strings are *not* interned
+        s = unicode(join(map(str,range(100))))
+        t = unicode(join(map(str,range(1,101))))
+
+        for i in xrange(self.rounds):
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+            t + s
+            t + s
+            t + s
+            t + s
+            t + s
+
+    def calibrate(self):
+
+        s = unicode(join(map(str,range(100))))
+        t = unicode(join(map(str,range(1,101))))
+
+        for i in xrange(self.rounds):
+            pass
+            
+
+class CompareUnicode(Test):
+
+    version = 0.1
+    operations = 10 * 5
+    rounds = 150000
+
+    def test(self):
+
+        # Make sure the strings are *not* interned
+        s = unicode(join(map(str,range(10))))
+        t = unicode(join(map(str,range(10))) + "abc")
+
+        for i in xrange(self.rounds):
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+            t < s
+            t > s
+            t == s
+            t > s
+            t < s
+
+    def calibrate(self):
+
+        s = unicode(join(map(str,range(10))))
+        t = unicode(join(map(str,range(10))) + "abc")
+
+        for i in xrange(self.rounds):
+            pass
+            
+
+class CreateUnicodeWithConcat(Test):
+
+    version = 0.1
+    operations = 10 * 5
+    rounds = 80000
+
+    def test(self):
+
+        for i in xrange(self.rounds):
+            s = u'om'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+            s = s + u'xax'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+            s = s + u'xax'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+            s = s + u'xax'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+            s = s + u'xax'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+            s = s + u'xax'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+            s = s + u'xax'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+            s = s + u'xax'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+            s = s + u'xax'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+            s = s + u'xax'
+            s = s + u'xbx'
+            s = s + u'xcx'
+            s = s + u'xdx'
+            s = s + u'xex'
+
+    def calibrate(self):
+
+        for i in xrange(self.rounds):
+            pass
+            
+
+class UnicodeSlicing(Test):
+
+    version = 0.1
+    operations = 5 * 7
+    rounds = 100000
+
+    def test(self):
+
+        s = unicode(join(map(str,range(100))))
+
+        for i in xrange(self.rounds):
+
+            s[50:]
+            s[:25]
+            s[50:55]
+            s[-1:]
+            s[:1]
+            s[2:]
+            s[11:-11]
+
+            s[50:]
+            s[:25]
+            s[50:55]
+            s[-1:]
+            s[:1]
+            s[2:]
+            s[11:-11]
+
+            s[50:]
+            s[:25]
+            s[50:55]
+            s[-1:]
+            s[:1]
+            s[2:]
+            s[11:-11]
+
+            s[50:]
+            s[:25]
+            s[50:55]
+            s[-1:]
+            s[:1]
+            s[2:]
+            s[11:-11]
+
+            s[50:]
+            s[:25]
+            s[50:55]
+            s[-1:]
+            s[:1]
+            s[2:]
+            s[11:-11]
+
+    def calibrate(self):
+
+        s = unicode(join(map(str,range(100))))
+
+        for i in xrange(self.rounds):
+            pass
+        
+### String methods
+
+class UnicodeMappings(Test):
+
+    version = 0.1
+    operations = 3 * (5 + 4 + 2 + 1)
+    rounds = 10000
+
+    def test(self):
+
+        s = join(map(unichr,range(20)),'')
+        t = join(map(unichr,range(100)),'')
+        u = join(map(unichr,range(500)),'')
+        v = join(map(unichr,range(1000)),'')
+        
+        for i in xrange(self.rounds):
+
+            s.lower()
+            s.lower()
+            s.lower()
+            s.lower()
+            s.lower()
+
+            s.upper()
+            s.upper()
+            s.upper()
+            s.upper()
+            s.upper()
+
+            s.title()
+            s.title()
+            s.title()
+            s.title()
+            s.title()
+
+            t.lower()
+            t.lower()
+            t.lower()
+            t.lower()
+
+            t.upper()
+            t.upper()
+            t.upper()
+            t.upper()
+
+            t.title()
+            t.title()
+            t.title()
+            t.title()
+
+            u.lower()
+            u.lower()
+
+            u.upper()
+            u.upper()
+
+            u.title()
+            u.title()
+
+            v.lower()
+
+            v.upper()
+
+            v.title()
+
+    def calibrate(self):
+
+        s = join(map(unichr,range(20)),'')
+        t = join(map(unichr,range(100)),'')
+        u = join(map(unichr,range(500)),'')
+        v = join(map(unichr,range(1000)),'')
+        
+        for i in xrange(self.rounds):
+            pass
+
+class UnicodePredicates(Test):
+
+    version = 0.1
+    operations = 5 * 9
+    rounds = 100000
+
+    def test(self):
+
+        data = (u'abc', u'123', u'   ', u'\u1234\u2345\u3456', u'\uFFFF'*10)
+        len_data = len(data)
+        
+        for i in xrange(self.rounds):
+            s = data[i % len_data]
+
+            s.isalnum()
+            s.isalpha()
+            s.isdecimal()
+            s.isdigit()
+            s.islower()
+            s.isnumeric()
+            s.isspace()
+            s.istitle()
+            s.isupper()
+
+            s.isalnum()
+            s.isalpha()
+            s.isdecimal()
+            s.isdigit()
+            s.islower()
+            s.isnumeric()
+            s.isspace()
+            s.istitle()
+            s.isupper()
+
+            s.isalnum()
+            s.isalpha()
+            s.isdecimal()
+            s.isdigit()
+            s.islower()
+            s.isnumeric()
+            s.isspace()
+            s.istitle()
+            s.isupper()
+
+            s.isalnum()
+            s.isalpha()
+            s.isdecimal()
+            s.isdigit()
+            s.islower()
+            s.isnumeric()
+            s.isspace()
+            s.istitle()
+            s.isupper()
+
+            s.isalnum()
+            s.isalpha()
+            s.isdecimal()
+            s.isdigit()
+            s.islower()
+            s.isnumeric()
+            s.isspace()
+            s.istitle()
+            s.isupper()
+
+    def calibrate(self):
+
+        data = (u'abc', u'123', u'   ', u'\u1234\u2345\u3456', u'\uFFFF'*10)
+        len_data = len(data)
+        
+        for i in xrange(self.rounds):
+            s = data[i % len_data]
+
+try:
+    import unicodedata
+except ImportError:
+    pass
+else:
+    class UnicodeProperties(Test):
+
+        version = 0.1
+        operations = 5 * 8
+        rounds = 100000
+
+        def test(self):
+
+            data = (u'a', u'1', u' ', u'\u1234', u'\uFFFF')
+            len_data = len(data)
+            digit = unicodedata.digit
+            numeric = unicodedata.numeric
+            decimal = unicodedata.decimal
+            category = unicodedata.category
+            bidirectional = unicodedata.bidirectional
+            decomposition = unicodedata.decomposition
+            mirrored = unicodedata.mirrored
+            combining = unicodedata.combining
+
+            for i in xrange(self.rounds):
+
+                c = data[i % len_data]
+
+                digit(c, None)
+                numeric(c, None)
+                decimal(c, None)
+                category(c)
+                bidirectional(c)
+                decomposition(c)
+                mirrored(c)
+                combining(c)
+
+                digit(c, None)
+                numeric(c, None)
+                decimal(c, None)
+                category(c)
+                bidirectional(c)
+                decomposition(c)
+                mirrored(c)
+                combining(c)
+
+                digit(c, None)
+                numeric(c, None)
+                decimal(c, None)
+                category(c)
+                bidirectional(c)
+                decomposition(c)
+                mirrored(c)
+                combining(c)
+
+                digit(c, None)
+                numeric(c, None)
+                decimal(c, None)
+                category(c)
+                bidirectional(c)
+                decomposition(c)
+                mirrored(c)
+                combining(c)
+
+                digit(c, None)
+                numeric(c, None)
+                decimal(c, None)
+                category(c)
+                bidirectional(c)
+                decomposition(c)
+                mirrored(c)
+                combining(c)
+
+        def calibrate(self):
+
+            data = (u'a', u'1', u' ', u'\u1234', u'\uFFFF')
+            len_data = len(data)
+            digit = unicodedata.digit
+            numeric = unicodedata.numeric
+            decimal = unicodedata.decimal
+            category = unicodedata.category
+            bidirectional = unicodedata.bidirectional
+            decomposition = unicodedata.decomposition
+            mirrored = unicodedata.mirrored
+            combining = unicodedata.combining
+
+            for i in xrange(self.rounds):
+
+                c = data[i % len_data]
diff --git a/Tools/pybench/package/__init__.py b/Tools/pybench/package/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Tools/pybench/package/__init__.py
diff --git a/Tools/pybench/package/submodule.py b/Tools/pybench/package/submodule.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/Tools/pybench/package/submodule.py
diff --git a/Tools/pybench/pybench.py b/Tools/pybench/pybench.py
new file mode 100755
index 0000000..6f10bd1
--- /dev/null
+++ b/Tools/pybench/pybench.py
@@ -0,0 +1,461 @@
+#!/usr/local/bin/python -O
+
+""" A Python Benchmark Suite
+
+"""
+#
+# Note: Please keep this module compatible to Python 1.5.2.
+#
+# Tests may include features in later Python versions, but these
+# should then be embedded in try-except clauses in the configuration
+# module Setup.py.
+#
+
+# pybench Copyright
+__copyright__ = """\
+Copyright (c), 1997-2006, Marc-Andre Lemburg (mal@lemburg.com)
+Copyright (c), 2000-2006, eGenix.com Software GmbH (info@egenix.com)
+
+                   All Rights Reserved.
+
+Permission to use, copy, modify, and distribute this software and its
+documentation for any purpose and without fee or royalty is hereby
+granted, provided that the above copyright notice appear in all copies
+and that both that copyright notice and this permission notice appear
+in supporting documentation or portions thereof, including
+modifications, that you make.
+
+THE AUTHOR MARC-ANDRE LEMBURG DISCLAIMS ALL WARRANTIES WITH REGARD TO
+THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND
+FITNESS, IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL,
+INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING
+FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT,
+NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION
+WITH THE USE OR PERFORMANCE OF THIS SOFTWARE !
+"""
+
+# Version number
+__version__ = '1.3'
+
+#
+# NOTE: Use xrange for all test loops unless you want to face 
+#       a 20MB process !
+#
+# All tests should have rounds set to values so that a run()
+# takes between 20-50 seconds. This is to get fairly good
+# clock() values. You can use option -w to speedup the tests
+# by a fixed integer factor (the "warp factor").
+#
+
+import sys,time,operator
+from CommandLine import *
+
+try:
+    import cPickle
+    pickle = cPickle
+except ImportError:
+    import pickle
+
+### Test baseclass
+
+class Test:
+
+    """ All test must have this class as baseclass. It provides
+        the necessary interface to the benchmark machinery.
+
+        The tests must set .rounds to a value high enough to let the
+        test run between 20-50 seconds. This is needed because
+        clock()-timing only gives rather inaccurate values (on Linux,
+        for example, it is accurate to a few hundreths of a
+        second). If you don't want to wait that long, use a warp
+        factor larger than 1.
+
+        It is also important to set the .operations variable to a
+        value representing the number of "virtual operations" done per
+        call of .run().
+
+        If you change a test in some way, don't forget to increase
+        it's version number.
+
+    """
+
+    ### Instance variables that each test should override
+
+    # Version number of the test as float (x.yy); this is important
+    # for comparisons of benchmark runs - tests with unequal version
+    # number will not get compared.
+    version = 1.0
+    
+    # The number of abstract operations done in each round of the
+    # test. An operation is the basic unit of what you want to
+    # measure. The benchmark will output the amount of run-time per
+    # operation. Note that in order to raise the measured timings
+    # significantly above noise level, it is often required to repeat
+    # sets of operations more than once per test round. The measured
+    # overhead per test round should be less than 1 second.
+    operations = 1
+
+    # Number of rounds to execute per test run. This should be
+    # adjusted to a figure that results in a test run-time of between
+    # 20-50 seconds.
+    rounds = 100000
+
+    ### Internal variables
+
+    # Mark this class as implementing a test
+    is_a_test = 1
+
+    # Misc. internal variables
+    last_timing = (0,0,0) # last timing (real,run,calibration)
+    warp = 1            # warp factor this test uses
+    cruns = 20          # number of calibration runs
+    overhead = None     # list of calibration timings
+
+    def __init__(self,warp=1):
+
+        if warp > 1:
+            self.rounds = self.rounds / warp
+            self.warp = warp
+        self.times = []
+        self.overhead = []
+        # We want these to be in the instance dict, so that pickle
+        # saves them
+        self.version = self.version
+        self.operations = self.operations
+        self.rounds = self.rounds
+
+    def run(self):
+
+        """ Run the test in two phases: first calibrate, then
+            do the actual test. Be careful to keep the calibration
+            timing low w/r to the test timing.
+            
+        """
+        test = self.test
+        calibrate = self.calibrate
+        clock = time.clock
+        cruns = self.cruns
+        # first calibrate
+        offset = 0.0
+        for i in range(cruns):
+            t = clock()
+            calibrate()
+            t = clock() - t
+            offset = offset + t
+        offset = offset / cruns
+        # now the real thing
+        t = clock() 
+        test()
+        t = clock() - t
+        self.last_timing = (t-offset,t,offset)
+        self.times.append(t-offset)
+
+    def calibrate(self):
+
+	""" Calibrate the test.
+
+	    This method should execute everything that is needed to
+	    setup and run the test - except for the actual operations
+	    that you intend to measure. pybench uses this method to
+	    measure the test implementation overhead.
+
+        """
+        return
+
+    def test(self):
+
+	""" Run the test.
+
+	    The test needs to run self.rounds executing
+	    self.operations number of operations each.
+
+        """
+        # do some tests
+        return
+    
+    def stat(self):
+
+        """ Returns two value: average time per run and average per
+            operation.
+            
+        """
+        runs = len(self.times)
+        if runs == 0:
+            return 0,0
+        totaltime = reduce(operator.add,self.times,0.0)
+        avg = totaltime / float(runs)
+        op_avg = totaltime / float(runs * self.rounds * self.operations)
+        if self.overhead:
+            totaloverhead = reduce(operator.add,self.overhead,0.0)
+            ov_avg = totaloverhead / float(runs)
+        else:
+            # use self.last_timing - not too accurate
+            ov_avg = self.last_timing[2]
+        return avg,op_avg,ov_avg
+
+### Load Setup
+
+# This has to be done after the definition of the Test class, since
+# the Setup module will import subclasses using this class.
+
+import Setup
+
+### Benchmark base class
+
+class Benchmark:
+
+    name = '?'                  # Name of the benchmark
+    rounds = 1                  # Number of rounds to run
+    warp = 1                    # Warp factor
+    roundtime = 0               # Average round time
+    version = None              # Benchmark version number (see __init__)
+                                # as float x.yy
+    starttime = None		# Benchmark start time
+
+    def __init__(self):
+
+        self.tests = {}
+        self.version = 0.31
+
+    def load_tests(self,setupmod,warp=1):
+
+        self.warp = warp
+        tests = self.tests
+        print 'Searching for tests...'
+        setupmod.__dict__.values()
+        for c in setupmod.__dict__.values():
+            if hasattr(c,'is_a_test') and c.__name__ != 'Test':
+                tests[c.__name__] = c(warp)
+        l = tests.keys()
+        l.sort()
+        for t in l:
+            print '  ',t
+        print
+
+    def run(self):
+
+        tests = self.tests.items()
+        tests.sort()
+        clock = time.clock
+        print 'Running %i round(s) of the suite: ' % self.rounds
+        print
+        self.starttime = time.time()
+        roundtime = clock()
+        for i in range(self.rounds):
+            print ' Round %-25i  real   abs    overhead' % (i+1)
+            for j in range(len(tests)):
+                name,t = tests[j]
+                print '%30s:' % name,
+                t.run()
+                print '  %.3fr %.3fa %.3fo' % t.last_timing
+            print '                                 ----------------------'
+            print '            Average round time:      %.3f seconds' % \
+                  ((clock() - roundtime)/(i+1))
+            print
+        self.roundtime = (clock() - roundtime) / self.rounds
+        print
+    
+    def print_stat(self, compare_to=None, hidenoise=0):
+
+        if not compare_to:
+            print '%-30s      per run    per oper.   overhead' % 'Tests:'
+            print '-'*72
+            tests = self.tests.items()
+            tests.sort()
+            for name,t in tests:
+                avg,op_avg,ov_avg = t.stat()
+                print '%30s: %10.2f ms %7.2f us %7.2f ms' % \
+                      (name,avg*1000.0,op_avg*1000000.0,ov_avg*1000.0)
+            print '-'*72
+            print '%30s: %10.2f ms' % \
+                  ('Average round time',self.roundtime * 1000.0)
+
+        else:
+            print '%-30s      per run    per oper.    diff *)' % \
+                  'Tests:'
+            print '-'*72
+            tests = self.tests.items()
+            tests.sort()
+            compatible = 1
+            for name,t in tests:
+                avg,op_avg,ov_avg = t.stat()
+                try:
+                    other = compare_to.tests[name]
+                except KeyError:
+                    other = None
+                if other and other.version == t.version and \
+                   other.operations == t.operations:
+                    avg1,op_avg1,ov_avg1 = other.stat()
+                    qop_avg = (op_avg/op_avg1-1.0)*100.0
+                    if hidenoise and abs(qop_avg) < 10:
+                        qop_avg = ''
+                    else:
+                        qop_avg = '%+7.2f%%' % qop_avg
+                else:
+                    qavg,qop_avg = 'n/a', 'n/a'
+                    compatible = 0
+                print '%30s: %10.2f ms %7.2f us  %8s' % \
+                      (name,avg*1000.0,op_avg*1000000.0,qop_avg)
+            print '-'*72
+            if compatible and compare_to.roundtime > 0 and \
+               compare_to.version == self.version:
+                print '%30s: %10.2f ms             %+7.2f%%' % \
+                      ('Average round time',self.roundtime * 1000.0,
+                       ((self.roundtime*self.warp)/
+                        (compare_to.roundtime*compare_to.warp)-1.0)*100.0)
+            else:
+                print '%30s: %10.2f ms                  n/a' % \
+                      ('Average round time',self.roundtime * 1000.0)
+            print
+            print '*) measured against: %s (rounds=%i, warp=%i)' % \
+                  (compare_to.name,compare_to.rounds,compare_to.warp)
+        print
+
+def print_machine():
+
+    import platform
+    print 'Machine Details:'
+    print '   Platform ID:  %s' % platform.platform()
+    print '   Executable:   %s' % sys.executable
+    # There's a bug in Python 2.2b1+...
+    if sys.version[:6] == '2.2b1+':
+        return
+    print '   Python:       %s' % platform.python_version()
+    print '   Compiler:     %s' % platform.python_compiler()
+    buildno, builddate = platform.python_build()
+    print '   Build:        %s (#%s)' % (builddate, buildno)
+
+class PyBenchCmdline(Application):
+
+    header = ("PYBENCH - a benchmark test suite for Python "
+              "interpreters/compilers.")
+
+    version = __version__
+
+    options = [ArgumentOption('-n','number of rounds',Setup.Number_of_rounds),
+               ArgumentOption('-f','save benchmark to file arg',''),
+               ArgumentOption('-c','compare benchmark with the one in file arg',''),
+               ArgumentOption('-s','show benchmark in file arg, then exit',''),
+               SwitchOption('-S','show statistics of benchmarks',0),
+               ArgumentOption('-w','set warp factor to arg',Setup.Warp_factor),
+               SwitchOption('-d','hide noise in compares', 0),
+               SwitchOption('--no-gc','disable garbage collection', 0),
+               ]
+
+    about = """\
+The normal operation is to run the suite and display the
+results. Use -f to save them for later reuse or comparisms.
+
+Examples:
+
+python1.5 pybench.py -w 100 -f p15
+python1.4 pybench.py -w 100 -f p14
+python pybench.py -s p15 -c p14
+"""
+    copyright = __copyright__
+
+    def handle_S(self, value):
+
+        """ Display one line stats for each benchmark file given on the
+            command line.
+
+        """
+        for benchmark in self.files:
+            try:
+                f = open(benchmark, 'rb')
+                bench = pickle.load(f)
+                f.close()
+            except IOError:
+                print '* Error opening/reading file %s' % repr(benchmark)
+            else:
+                print '%s,%-.2f,ms' % (benchmark, bench.roundtime*1000.0)
+        return 0
+
+    def main(self):
+
+        rounds = self.values['-n']
+        reportfile = self.values['-f']
+        show_bench = self.values['-s']
+        compare_to = self.values['-c']
+        hidenoise = self.values['-d']
+        warp = self.values['-w']
+        nogc = self.values['--no-gc']
+        
+        # Switch off GC
+        if nogc:
+            try:
+                import gc
+            except ImportError:
+                nogc = 0
+            else:
+                if self.values['--no-gc']:
+                    gc.disable()
+
+        print 'PYBENCH',__version__
+        print
+
+        if not compare_to:
+            print_machine()
+            print
+
+        if compare_to:
+            try:
+                f = open(compare_to,'rb')
+                bench = pickle.load(f)
+                bench.name = compare_to
+                f.close()
+                compare_to = bench
+            except IOError:
+                print '* Error opening/reading file',compare_to
+                compare_to = None    
+
+        if show_bench:
+            try:
+                f = open(show_bench,'rb')
+                bench = pickle.load(f)
+                bench.name = show_bench
+                f.close()
+                print 'Benchmark: %s (rounds=%i, warp=%i)' % \
+                      (bench.name,bench.rounds,bench.warp)
+                print
+                bench.print_stat(compare_to, hidenoise)
+            except IOError:
+                print '* Error opening/reading file',show_bench
+                print
+            return
+
+        if reportfile:
+            if nogc:
+                print 'Benchmark: %s (rounds=%i, warp=%i, no GC)' % \
+                      (reportfile,rounds,warp)
+            else:
+                print 'Benchmark: %s (rounds=%i, warp=%i)' % \
+                      (reportfile,rounds,warp)
+            print
+
+        # Create benchmark object
+        bench = Benchmark()
+        bench.rounds = rounds
+        bench.load_tests(Setup,warp)
+        try:
+            bench.run()
+        except KeyboardInterrupt:
+            print
+            print '*** KeyboardInterrupt -- Aborting'
+            print
+            return
+        bench.print_stat(compare_to)
+        # ring bell
+        sys.stderr.write('\007')
+
+        if reportfile:
+            try:
+                f = open(reportfile,'wb')
+                bench.name = reportfile
+                pickle.dump(bench,f)
+                f.close()
+            except IOError:
+                print '* Error opening/writing reportfile'
+
+if __name__ == '__main__':
+    PyBenchCmdline()
diff --git a/Tools/scripts/byext.py b/Tools/scripts/byext.py
index 93759bc..09610b0 100644
--- a/Tools/scripts/byext.py
+++ b/Tools/scripts/byext.py
@@ -17,7 +17,7 @@
             elif os.path.isfile(arg):
                 self.statfile(arg)
             else:
-                sys.stderr.write("Can't find %s\n" % file)
+                sys.stderr.write("Can't find %s\n" % arg)
                 self.addstats("<???>", "unknown", 1)
 
     def statdir(self, dir):
@@ -25,8 +25,8 @@
         try:
             names = os.listdir(dir)
         except os.error, err:
-            sys.stderr.write("Can't list %s: %s\n" % (file, err))
-            self.addstats(ext, "unlistable", 1)
+            sys.stderr.write("Can't list %s: %s\n" % (dir, err))
+            self.addstats("<dir>", "unlistable", 1)
             return
         names.sort()
         for name in names:
@@ -42,9 +42,9 @@
             else:
                 self.statfile(full)
 
-    def statfile(self, file):
-        head, ext = os.path.splitext(file)
-        head, base = os.path.split(file)
+    def statfile(self, filename):
+        head, ext = os.path.splitext(filename)
+        head, base = os.path.split(filename)
         if ext == base:
             ext = "" # E.g. .cvsignore is deemed not to have an extension
         ext = os.path.normcase(ext)
@@ -52,9 +52,9 @@
             ext = "<none>"
         self.addstats(ext, "files", 1)
         try:
-            f = open(file, "rb")
+            f = open(filename, "rb")
         except IOError, err:
-            sys.stderr.write("Can't open %s: %s\n" % (file, err))
+            sys.stderr.write("Can't open %s: %s\n" % (filename, err))
             self.addstats(ext, "unopenable", 1)
             return
         data = f.read()
diff --git a/Tools/scripts/classfix.py b/Tools/scripts/classfix.py
index cdf006a..d30700f 100755
--- a/Tools/scripts/classfix.py
+++ b/Tools/scripts/classfix.py
@@ -30,7 +30,7 @@
 # into a program for a different change to Python programs...
 
 import sys
-import regex
+import re
 import os
 from stat import *
 
@@ -53,7 +53,7 @@
             if fix(arg): bad = 1
     sys.exit(bad)
 
-ispythonprog = regex.compile('^[a-zA-Z0-9_]+\.py$')
+ispythonprog = re.compile('^[a-zA-Z0-9_]+\.py$')
 def ispython(name):
     return ispythonprog.match(name) >= 0
 
@@ -148,12 +148,12 @@
 
 # This expression doesn't catch *all* class definition headers,
 # but it's pretty darn close.
-classexpr = '^\([ \t]*class +[a-zA-Z0-9_]+\) *( *) *\(\(=.*\)?\):'
-classprog = regex.compile(classexpr)
+classexpr = '^([ \t]*class +[a-zA-Z0-9_]+) *( *) *((=.*)?):'
+classprog = re.compile(classexpr)
 
 # Expressions for finding base class expressions.
-baseexpr = '^ *\(.*\) *( *) *$'
-baseprog = regex.compile(baseexpr)
+baseexpr = '^ *(.*) *( *) *$'
+baseprog = re.compile(baseexpr)
 
 def fixline(line):
     if classprog.match(line) < 0: # No 'class' keyword -- no change
diff --git a/Tools/scripts/fixcid.py b/Tools/scripts/fixcid.py
index 42aa835..433a425 100755
--- a/Tools/scripts/fixcid.py
+++ b/Tools/scripts/fixcid.py
@@ -35,7 +35,7 @@
 # files.
 
 import sys
-import regex
+import re
 import os
 from stat import *
 import getopt
@@ -90,7 +90,7 @@
 # Change this regular expression to select a different set of files
 Wanted = '^[a-zA-Z0-9_]+\.[ch]$'
 def wanted(name):
-    return regex.match(Wanted, name) >= 0
+    return re.match(Wanted, name) >= 0
 
 def recursedown(dirname):
     dbg('recursedown(%r)\n' % (dirname,))
@@ -212,12 +212,12 @@
 # Anything else is an operator -- don't list this explicitly because of '/*'
 
 OutsideComment = (Identifier, Number, String, Char, CommentStart)
-OutsideCommentPattern = '\(' + '\|'.join(OutsideComment) + '\)'
-OutsideCommentProgram = regex.compile(OutsideCommentPattern)
+OutsideCommentPattern = '(' + '|'.join(OutsideComment) + ')'
+OutsideCommentProgram = re.compile(OutsideCommentPattern)
 
 InsideComment = (Identifier, Number, CommentEnd)
-InsideCommentPattern = '\(' + '\|'.join(InsideComment) + '\)'
-InsideCommentProgram = regex.compile(InsideCommentPattern)
+InsideCommentPattern = '(' + '|'.join(InsideComment) + ')'
+InsideCommentProgram = re.compile(InsideCommentPattern)
 
 def initfixline():
     global Program
diff --git a/Tools/scripts/ifdef.py b/Tools/scripts/ifdef.py
index 7e7b5cc..2ed7a66 100755
--- a/Tools/scripts/ifdef.py
+++ b/Tools/scripts/ifdef.py
@@ -27,7 +27,6 @@
 # preprocessor commands.
 
 import sys
-import regex
 import getopt
 
 defs = []
diff --git a/Tools/scripts/methfix.py b/Tools/scripts/methfix.py
index a872ab7..b81871f 100755
--- a/Tools/scripts/methfix.py
+++ b/Tools/scripts/methfix.py
@@ -27,7 +27,7 @@
 # into a program for a different change to Python programs...
 
 import sys
-import regex
+import re
 import os
 from stat import *
 
@@ -50,7 +50,7 @@
             if fix(arg): bad = 1
     sys.exit(bad)
 
-ispythonprog = regex.compile('^[a-zA-Z0-9_]+\.py$')
+ispythonprog = re.compile('^[a-zA-Z0-9_]+\.py$')
 def ispython(name):
     return ispythonprog.match(name) >= 0
 
@@ -101,7 +101,7 @@
         if lineno == 1 and g is None and line[:2] == '#!':
             # Check for non-Python scripts
             words = line[2:].split()
-            if words and regex.search('[pP]ython', words[0]) < 0:
+            if words and re.search('[pP]ython', words[0]) < 0:
                 msg = filename + ': ' + words[0]
                 msg = msg + ' script; not fixed\n'
                 err(msg)
@@ -158,8 +158,8 @@
     return 0
 
 
-fixpat = '^[ \t]+def +[a-zA-Z0-9_]+ *( *self *, *\(( *\(.*\) *)\) *) *:'
-fixprog = regex.compile(fixpat)
+fixpat = '^[ \t]+def +[a-zA-Z0-9_]+ *( *self *, *(( *(.*) *)) *) *:'
+fixprog = re.compile(fixpat)
 
 def fixline(line):
     if fixprog.match(line) >= 0:
diff --git a/Tools/scripts/objgraph.py b/Tools/scripts/objgraph.py
index 01060f9..f74c2b6 100755
--- a/Tools/scripts/objgraph.py
+++ b/Tools/scripts/objgraph.py
@@ -22,7 +22,7 @@
 import sys
 import os
 import getopt
-import regex
+import re
 
 # Types of symbols.
 #
@@ -32,7 +32,7 @@
 
 # Regular expression to parse "nm -o" output.
 #
-matcher = regex.compile('\(.*\):\t?........ \(.\) \(.*\)$')
+matcher = re.compile('(.*):\t?........ (.) (.*)$')
 
 # Store "item" in "dict" under "key".
 # The dictionary maps keys to lists of items.
diff --git a/Tools/scripts/pathfix.py b/Tools/scripts/pathfix.py
index 5cb5add..7f6f191 100755
--- a/Tools/scripts/pathfix.py
+++ b/Tools/scripts/pathfix.py
@@ -20,7 +20,7 @@
 # into a program for a different change to Python programs...
 
 import sys
-import regex
+import re
 import os
 from stat import *
 import getopt
@@ -59,7 +59,7 @@
             if fix(arg): bad = 1
     sys.exit(bad)
 
-ispythonprog = regex.compile('^[a-zA-Z0-9_]+\.py$')
+ispythonprog = re.compile('^[a-zA-Z0-9_]+\.py$')
 def ispython(name):
     return ispythonprog.match(name) >= 0
 
diff --git a/Tools/scripts/pdeps.py b/Tools/scripts/pdeps.py
index e835f84..da63e35 100755
--- a/Tools/scripts/pdeps.py
+++ b/Tools/scripts/pdeps.py
@@ -21,7 +21,7 @@
 
 
 import sys
-import regex
+import re
 import os
 
 
@@ -57,8 +57,8 @@
 
 # Compiled regular expressions to search for import statements
 #
-m_import = regex.compile('^[ \t]*from[ \t]+\([^ \t]+\)[ \t]+')
-m_from = regex.compile('^[ \t]*import[ \t]+\([^#]+\)')
+m_import = re.compile('^[ \t]*from[ \t]+([^ \t]+)[ \t]+')
+m_from = re.compile('^[ \t]*import[ \t]+([^#]+)')
 
 
 # Collect data from one file
diff --git a/Tools/unicode/Makefile b/Tools/unicode/Makefile
index f266d4d..fbd3557 100644
--- a/Tools/unicode/Makefile
+++ b/Tools/unicode/Makefile
@@ -15,7 +15,7 @@
 
 all:	distclean mappings codecs
 
-codecs:	misc windows iso apple ebcdic custom-mappings
+codecs:	misc windows iso apple ebcdic custom-mappings cjk
 
 ### Mappings
 
@@ -72,6 +72,9 @@
 	$(PYTHON) gencodec.py MAPPINGS/VENDORS/MICSFT/EBCDIC/ build/
 	$(RM) -f build/readme.*
 
+cjk:	build/
+	$(PYTHON) gencjkcodecs.py build/
+
 ### Cleanup
 
 clean:
diff --git a/Tools/unicode/gencjkcodecs.py b/Tools/unicode/gencjkcodecs.py
new file mode 100644
index 0000000..975c19c
--- /dev/null
+++ b/Tools/unicode/gencjkcodecs.py
@@ -0,0 +1,68 @@
+import os, string
+
+codecs = {
+    'cn': ('gb2312', 'gbk', 'gb18030', 'hz'),
+    'tw': ('big5', 'cp950'),
+    'hk': ('big5hkscs',),
+    'jp': ('cp932', 'shift_jis', 'euc_jp', 'euc_jisx0213', 'shift_jisx0213',
+           'euc_jis_2004', 'shift_jis_2004'),
+    'kr': ('cp949', 'euc_kr', 'johab'),
+    'iso2022': ('iso2022_jp', 'iso2022_jp_1', 'iso2022_jp_2',
+                'iso2022_jp_2004', 'iso2022_jp_3', 'iso2022_jp_ext',
+                'iso2022_kr'),
+}
+
+TEMPLATE = string.Template("""\
+#
+# $encoding.py: Python Unicode Codec for $ENCODING
+#
+# Written by Hye-Shik Chang <perky@FreeBSD.org>
+#
+
+import _codecs_$owner, codecs
+import _multibytecodec as mbc
+
+codec = _codecs_$owner.getcodec('$encoding')
+
+class Codec(codecs.Codec):
+    encode = codec.encode
+    decode = codec.decode
+
+class IncrementalEncoder(mbc.MultibyteIncrementalEncoder,
+                         codecs.IncrementalEncoder):
+    codec = codec
+
+class IncrementalDecoder(mbc.MultibyteIncrementalDecoder,
+                         codecs.IncrementalDecoder):
+    codec = codec
+
+class StreamReader(Codec, mbc.MultibyteStreamReader, codecs.StreamReader):
+    codec = codec
+
+class StreamWriter(Codec, mbc.MultibyteStreamWriter, codecs.StreamWriter):
+    codec = codec
+
+def getregentry():
+    return codecs.CodecInfo(
+        name='$encoding',
+        encode=Codec().encode,
+        decode=Codec().decode,
+        incrementalencoder=IncrementalEncoder,
+        incrementaldecoder=IncrementalDecoder,
+        streamreader=StreamReader,
+        streamwriter=StreamWriter,
+    )
+""")
+
+def gencodecs(prefix):
+    for loc, encodings in codecs.iteritems():
+        for enc in encodings:
+            code = TEMPLATE.substitute(ENCODING=enc.upper(),
+                                       encoding=enc.lower(),
+                                       owner=loc)
+            codecpath = os.path.join(prefix, enc + '.py')
+            open(codecpath, 'w').write(code)
+
+if __name__ == '__main__':
+    import sys
+    gencodecs(sys.argv[1])
diff --git a/Tools/unicode/gencodec.py b/Tools/unicode/gencodec.py
index bb1c9da..3cfef20 100644
--- a/Tools/unicode/gencodec.py
+++ b/Tools/unicode/gencodec.py
@@ -348,7 +348,7 @@
     l.extend(encoding_map_code)
 
     # Final new-line
-    l.append('\n')
+    l.append('')
 
     return '\n'.join(l).expandtabs()
 
diff --git a/configure b/configure
index 3e322e6..0250d9e 100755
--- a/configure
+++ b/configure
@@ -1,5 +1,5 @@
 #! /bin/sh
-# From configure.in Revision: 42563 .
+# From configure.in Revision: 43459 .
 # Guess values for system-dependent variables and create Makefiles.
 # Generated by GNU Autoconf 2.59 for python 3.0.
 #
@@ -312,7 +312,7 @@
 # include <unistd.h>
 #endif"
 
-ac_subst_vars='SHELL PATH_SEPARATOR PACKAGE_NAME PACKAGE_TARNAME PACKAGE_VERSION PACKAGE_STRING PACKAGE_BUGREPORT exec_prefix prefix program_transform_name bindir sbindir libexecdir datadir sysconfdir sharedstatedir localstatedir libdir includedir oldincludedir infodir mandir build_alias host_alias target_alias DEFS ECHO_C ECHO_N ECHO_T LIBS VERSION SOVERSION CONFIG_ARGS PYTHONFRAMEWORK PYTHONFRAMEWORKDIR PYTHONFRAMEWORKPREFIX PYTHONFRAMEWORKINSTALLDIR MACHDEP SGI_ABI EXTRAPLATDIR EXTRAMACHDEPPATH CONFIGURE_MACOSX_DEPLOYMENT_TARGET CXX MAINOBJ EXEEXT CC CFLAGS LDFLAGS CPPFLAGS ac_ct_CC OBJEXT CPP EGREP BUILDEXEEXT LIBRARY LDLIBRARY DLLLIBRARY BLDLIBRARY LDLIBRARYDIR INSTSONAME RUNSHARED LINKCC RANLIB ac_ct_RANLIB AR SVNVERSION INSTALL_PROGRAM INSTALL_SCRIPT INSTALL_DATA LN OPT BASECFLAGS OTHER_LIBTOOL_OPT LIBTOOL_CRUFT SO LDSHARED BLDSHARED CCSHARED LINKFORSHARED CFLAGSFORSHARED SHLIBS USE_SIGNAL_MODULE SIGNAL_OBJS USE_THREAD_MODULE LDLAST THREADOBJ DLINCLDIR DYNLOADFILE MACHDEP_OBJS TRUE LIBOBJS HAVE_GETHOSTBYNAME_R_6_ARG HAVE_GETHOSTBYNAME_R_5_ARG HAVE_GETHOSTBYNAME_R_3_ARG HAVE_GETHOSTBYNAME_R HAVE_GETHOSTBYNAME LIBM LIBC UNICODE_OBJS THREADHEADERS SRCDIRS LTLIBOBJS'
+ac_subst_vars='SHELL PATH_SEPARATOR PACKAGE_NAME PACKAGE_TARNAME PACKAGE_VERSION PACKAGE_STRING PACKAGE_BUGREPORT exec_prefix prefix program_transform_name bindir sbindir libexecdir datadir sysconfdir sharedstatedir localstatedir libdir includedir oldincludedir infodir mandir build_alias host_alias target_alias DEFS ECHO_C ECHO_N ECHO_T LIBS VERSION SOVERSION CONFIG_ARGS PYTHONFRAMEWORK PYTHONFRAMEWORKDIR PYTHONFRAMEWORKPREFIX PYTHONFRAMEWORKINSTALLDIR MACHDEP SGI_ABI EXTRAPLATDIR EXTRAMACHDEPPATH CONFIGURE_MACOSX_DEPLOYMENT_TARGET CC CFLAGS LDFLAGS CPPFLAGS ac_ct_CC EXEEXT OBJEXT CXX MAINCC CPP EGREP BUILDEXEEXT LIBRARY LDLIBRARY DLLLIBRARY BLDLIBRARY LDLIBRARYDIR INSTSONAME RUNSHARED LINKCC RANLIB ac_ct_RANLIB AR SVNVERSION INSTALL_PROGRAM INSTALL_SCRIPT INSTALL_DATA LN OPT BASECFLAGS OTHER_LIBTOOL_OPT LIBTOOL_CRUFT SO LDSHARED BLDSHARED CCSHARED LINKFORSHARED CFLAGSFORSHARED SHLIBS USE_SIGNAL_MODULE SIGNAL_OBJS USE_THREAD_MODULE LDLAST THREADOBJ DLINCLDIR DYNLOADFILE MACHDEP_OBJS TRUE LIBOBJS HAVE_GETHOSTBYNAME_R_6_ARG HAVE_GETHOSTBYNAME_R_5_ARG HAVE_GETHOSTBYNAME_R_3_ARG HAVE_GETHOSTBYNAME_R HAVE_GETHOSTBYNAME LIBM LIBC UNICODE_OBJS THREADHEADERS SRCDIRS LTLIBOBJS'
 ac_subst_files=''
 
 # Initialize some variables set by options.
@@ -859,10 +859,13 @@
   --with-PACKAGE[=ARG]    use PACKAGE [ARG=yes]
   --without-PACKAGE       do not use PACKAGE (same as --with-PACKAGE=no)
   --without-gcc           never use gcc
-  --with-cxx=<compiler>   enable C++ support
+  --with-cxx-main=<compiler>
+                          compile main() and link python executable with C++
+                          compiler
   --with-suffix=.exe      set executable suffix
   --with-pydebug          build with Py_DEBUG defined
   --with-libs='lib1 ...'  link against additional libs
+  --with-system-ffi       build _ctypes module using an installed ffi library
   --with-signal-module    disable/enable signal module
   --with-dec-threads      use DEC Alpha/OSF1 thread-safe libraries
   --with(out)-threads[=DIRECTORY]
@@ -979,7 +982,7 @@
     else
       echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2
     fi
-    cd $ac_popdir
+    cd "$ac_popdir"
   done
 fi
 
@@ -1513,7 +1516,9 @@
     ;;
   # On Mac OS X 10.4, defining _POSIX_C_SOURCE or _XOPEN_SOURCE
   # disables platform specific features beyond repair.
-  Darwin/8.*)
+  # On Mac OS X 10.3, defining _POSIX_C_SOURCE or _XOPEN_SOURCE
+  # has no effect, don't bother defineing them
+  Darwin/[78].*)
     define_xopen_source=no
     ;;
 
@@ -1676,258 +1681,6 @@
 echo "$as_me:$LINENO: result: $without_gcc" >&5
 echo "${ECHO_T}$without_gcc" >&6
 
-
-
-MAINOBJ=python.o
-echo "$as_me:$LINENO: checking for --with-cxx=<compiler>" >&5
-echo $ECHO_N "checking for --with-cxx=<compiler>... $ECHO_C" >&6
-
-# Check whether --with-cxx or --without-cxx was given.
-if test "${with_cxx+set}" = set; then
-  withval="$with_cxx"
-
-	check_cxx=no
-	case $withval in
-	no)	CXX=
-		with_cxx=no;;
-	*)	CXX=$withval
-		MAINOBJ=ccpython.o
-		with_cxx=$withval;;
-	esac
-else
-
-	with_cxx=no
-	check_cxx=yes
-
-fi;
-echo "$as_me:$LINENO: result: $with_cxx" >&5
-echo "${ECHO_T}$with_cxx" >&6
-
-if test "$with_cxx" = "yes"
-then
-	{ { echo "$as_me:$LINENO: error: must supply a compiler when using --with-cxx" >&5
-echo "$as_me: error: must supply a compiler when using --with-cxx" >&2;}
-   { (exit 1); exit 1; }; }
-fi
-
-
-
-
-if test "$check_cxx" = "yes"
-then
-	for ac_prog in $CCC c++ g++ gcc CC cxx cc++ cl
-do
-  # Extract the first word of "$ac_prog", so it can be a program name with args.
-set dummy $ac_prog; ac_word=$2
-echo "$as_me:$LINENO: checking for $ac_word" >&5
-echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6
-if test "${ac_cv_prog_CXX+set}" = set; then
-  echo $ECHO_N "(cached) $ECHO_C" >&6
-else
-  if test -n "$CXX"; then
-  ac_cv_prog_CXX="$CXX" # Let the user override the test.
-else
-as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
-for as_dir in $PATH
-do
-  IFS=$as_save_IFS
-  test -z "$as_dir" && as_dir=.
-  for ac_exec_ext in '' $ac_executable_extensions; do
-  if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
-    ac_cv_prog_CXX="$ac_prog"
-    echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5
-    break 2
-  fi
-done
-done
-
-fi
-fi
-CXX=$ac_cv_prog_CXX
-if test -n "$CXX"; then
-  echo "$as_me:$LINENO: result: $CXX" >&5
-echo "${ECHO_T}$CXX" >&6
-else
-  echo "$as_me:$LINENO: result: no" >&5
-echo "${ECHO_T}no" >&6
-fi
-
-  test -n "$CXX" && break
-done
-test -n "$CXX" || CXX="notfound"
-
-	if test "$CXX" = "notfound"
-	then
-		CXX=
-	else
-		ac_ext=cc
-ac_cpp='$CXXCPP $CPPFLAGS'
-ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_cxx_compiler_gnu
-       cat >conftest.$ac_ext <<_ACEOF
-/* confdefs.h.  */
-_ACEOF
-cat confdefs.h >>conftest.$ac_ext
-cat >>conftest.$ac_ext <<_ACEOF
-/* end confdefs.h.  */
-
-int
-main ()
-{
-
-  ;
-  return 0;
-}
-_ACEOF
-ac_clean_files_save=$ac_clean_files
-ac_clean_files="$ac_clean_files a.out a.exe b.out"
-# Try to create an executable without -o first, disregard a.out.
-# It will help us diagnose broken compilers, and finding out an intuition
-# of exeext.
-echo "$as_me:$LINENO: checking for C++ compiler default output file name" >&5
-echo $ECHO_N "checking for C++ compiler default output file name... $ECHO_C" >&6
-ac_link_default=`echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'`
-if { (eval echo "$as_me:$LINENO: \"$ac_link_default\"") >&5
-  (eval $ac_link_default) 2>&5
-  ac_status=$?
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); }; then
-  # Find the output, starting from the most likely.  This scheme is
-# not robust to junk in `.', hence go to wildcards (a.*) only as a last
-# resort.
-
-# Be careful to initialize this variable, since it used to be cached.
-# Otherwise an old cache value of `no' led to `EXEEXT = no' in a Makefile.
-ac_cv_exeext=
-# b.out is created by i960 compilers.
-for ac_file in a_out.exe a.exe conftest.exe a.out conftest a.* conftest.* b.out
-do
-  test -f "$ac_file" || continue
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.o | *.obj )
-	;;
-    conftest.$ac_ext )
-	# This is the source file.
-	;;
-    [ab].out )
-	# We found the default executable, but exeext='' is most
-	# certainly right.
-	break;;
-    *.* )
-	ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
-	# FIXME: I believe we export ac_cv_exeext for Libtool,
-	# but it would be cool to find out if it's true.  Does anybody
-	# maintain Libtool? --akim.
-	export ac_cv_exeext
-	break;;
-    * )
-	break;;
-  esac
-done
-else
-  echo "$as_me: failed program was:" >&5
-sed 's/^/| /' conftest.$ac_ext >&5
-
-{ { echo "$as_me:$LINENO: error: C++ compiler cannot create executables
-See \`config.log' for more details." >&5
-echo "$as_me: error: C++ compiler cannot create executables
-See \`config.log' for more details." >&2;}
-   { (exit 77); exit 77; }; }
-fi
-
-ac_exeext=$ac_cv_exeext
-echo "$as_me:$LINENO: result: $ac_file" >&5
-echo "${ECHO_T}$ac_file" >&6
-
-# Check the compiler produces executables we can run.  If not, either
-# the compiler is broken, or we cross compile.
-echo "$as_me:$LINENO: checking whether the C++ compiler works" >&5
-echo $ECHO_N "checking whether the C++ compiler works... $ECHO_C" >&6
-# FIXME: These cross compiler hacks should be removed for Autoconf 3.0
-# If not cross compiling, check that we can run a simple program.
-if test "$cross_compiling" != yes; then
-  if { ac_try='./$ac_file'
-  { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
-  (eval $ac_try) 2>&5
-  ac_status=$?
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); }; }; then
-    cross_compiling=no
-  else
-    if test "$cross_compiling" = maybe; then
-	cross_compiling=yes
-    else
-	{ { echo "$as_me:$LINENO: error: cannot run C++ compiled programs.
-If you meant to cross compile, use \`--host'.
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run C++ compiled programs.
-If you meant to cross compile, use \`--host'.
-See \`config.log' for more details." >&2;}
-   { (exit 1); exit 1; }; }
-    fi
-  fi
-fi
-echo "$as_me:$LINENO: result: yes" >&5
-echo "${ECHO_T}yes" >&6
-
-rm -f a.out a.exe conftest$ac_cv_exeext b.out
-ac_clean_files=$ac_clean_files_save
-# Check the compiler produces executables we can run.  If not, either
-# the compiler is broken, or we cross compile.
-echo "$as_me:$LINENO: checking whether we are cross compiling" >&5
-echo $ECHO_N "checking whether we are cross compiling... $ECHO_C" >&6
-echo "$as_me:$LINENO: result: $cross_compiling" >&5
-echo "${ECHO_T}$cross_compiling" >&6
-
-echo "$as_me:$LINENO: checking for suffix of executables" >&5
-echo $ECHO_N "checking for suffix of executables... $ECHO_C" >&6
-if { (eval echo "$as_me:$LINENO: \"$ac_link\"") >&5
-  (eval $ac_link) 2>&5
-  ac_status=$?
-  echo "$as_me:$LINENO: \$? = $ac_status" >&5
-  (exit $ac_status); }; then
-  # If both `conftest.exe' and `conftest' are `present' (well, observable)
-# catch `conftest.exe'.  For instance with Cygwin, `ls conftest' will
-# work properly (i.e., refer to `conftest.exe'), while it won't with
-# `rm'.
-for ac_file in conftest.exe conftest conftest.*; do
-  test -f "$ac_file" || continue
-  case $ac_file in
-    *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.o | *.obj ) ;;
-    *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'`
-	  export ac_cv_exeext
-	  break;;
-    * ) break;;
-  esac
-done
-else
-  { { echo "$as_me:$LINENO: error: cannot compute suffix of executables: cannot compile and link
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot compute suffix of executables: cannot compile and link
-See \`config.log' for more details." >&2;}
-   { (exit 1); exit 1; }; }
-fi
-
-rm -f conftest$ac_cv_exeext
-echo "$as_me:$LINENO: result: $ac_cv_exeext" >&5
-echo "${ECHO_T}$ac_cv_exeext" >&6
-
-rm -f conftest.$ac_ext
-EXEEXT=$ac_cv_exeext
-ac_exeext=$EXEEXT
-
-       ac_ext=c
-ac_cpp='$CPP $CPPFLAGS'
-ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5'
-ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5'
-ac_compiler_gnu=$ac_cv_c_compiler_gnu
-
-
-
-	fi
-fi
-
 # If the user switches compilers, we can't believe the cache
 if test ! -z "$ac_cv_prog_CC" -a ! -z "$CC" -a "$CC" != "$ac_cv_prog_CC"
 then
@@ -2506,8 +2259,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -2565,8 +2317,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -2682,8 +2433,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -2737,8 +2487,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -2783,8 +2532,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -2828,8 +2576,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -2869,6 +2616,190 @@
 ac_compiler_gnu=$ac_cv_c_compiler_gnu
 
 
+
+
+echo "$as_me:$LINENO: checking for --with-cxx-main=<compiler>" >&5
+echo $ECHO_N "checking for --with-cxx-main=<compiler>... $ECHO_C" >&6
+
+# Check whether --with-cxx_main or --without-cxx_main was given.
+if test "${with_cxx_main+set}" = set; then
+  withval="$with_cxx_main"
+
+
+	case $withval in
+	no)	with_cxx_main=no
+		MAINCC='$(CC)';;
+	yes)	with_cxx_main=yes
+		MAINCC='$(CXX)';;
+	*)	with_cxx_main=yes
+		MAINCC=$withval
+		if test -z "$CXX"
+		then
+			CXX=$withval
+		fi;;
+	esac
+else
+
+	with_cxx_main=no
+	MAINCC='$(CC)'
+
+fi;
+echo "$as_me:$LINENO: result: $with_cxx_main" >&5
+echo "${ECHO_T}$with_cxx_main" >&6
+
+preset_cxx="$CXX"
+if test -z "$CXX"
+then
+        case "$CC" in
+        gcc)    # Extract the first word of "g++", so it can be a program name with args.
+set dummy g++; ac_word=$2
+echo "$as_me:$LINENO: checking for $ac_word" >&5
+echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6
+if test "${ac_cv_path_CXX+set}" = set; then
+  echo $ECHO_N "(cached) $ECHO_C" >&6
+else
+  case $CXX in
+  [\\/]* | ?:[\\/]*)
+  ac_cv_path_CXX="$CXX" # Let the user override the test with a path.
+  ;;
+  *)
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in notfound
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  for ac_exec_ext in '' $ac_executable_extensions; do
+  if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+    ac_cv_path_CXX="$as_dir/$ac_word$ac_exec_ext"
+    echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+done
+
+  test -z "$ac_cv_path_CXX" && ac_cv_path_CXX="g++"
+  ;;
+esac
+fi
+CXX=$ac_cv_path_CXX
+
+if test -n "$CXX"; then
+  echo "$as_me:$LINENO: result: $CXX" >&5
+echo "${ECHO_T}$CXX" >&6
+else
+  echo "$as_me:$LINENO: result: no" >&5
+echo "${ECHO_T}no" >&6
+fi
+ ;;
+        cc)     # Extract the first word of "c++", so it can be a program name with args.
+set dummy c++; ac_word=$2
+echo "$as_me:$LINENO: checking for $ac_word" >&5
+echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6
+if test "${ac_cv_path_CXX+set}" = set; then
+  echo $ECHO_N "(cached) $ECHO_C" >&6
+else
+  case $CXX in
+  [\\/]* | ?:[\\/]*)
+  ac_cv_path_CXX="$CXX" # Let the user override the test with a path.
+  ;;
+  *)
+  as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in notfound
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  for ac_exec_ext in '' $ac_executable_extensions; do
+  if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+    ac_cv_path_CXX="$as_dir/$ac_word$ac_exec_ext"
+    echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+done
+
+  test -z "$ac_cv_path_CXX" && ac_cv_path_CXX="c++"
+  ;;
+esac
+fi
+CXX=$ac_cv_path_CXX
+
+if test -n "$CXX"; then
+  echo "$as_me:$LINENO: result: $CXX" >&5
+echo "${ECHO_T}$CXX" >&6
+else
+  echo "$as_me:$LINENO: result: no" >&5
+echo "${ECHO_T}no" >&6
+fi
+ ;;
+        esac
+	if test "$CXX" = "notfound"
+	then
+		CXX=""
+	fi
+fi
+if test -z "$CXX"
+then
+	for ac_prog in $CCC c++ g++ gcc CC cxx cc++ cl
+do
+  # Extract the first word of "$ac_prog", so it can be a program name with args.
+set dummy $ac_prog; ac_word=$2
+echo "$as_me:$LINENO: checking for $ac_word" >&5
+echo $ECHO_N "checking for $ac_word... $ECHO_C" >&6
+if test "${ac_cv_prog_CXX+set}" = set; then
+  echo $ECHO_N "(cached) $ECHO_C" >&6
+else
+  if test -n "$CXX"; then
+  ac_cv_prog_CXX="$CXX" # Let the user override the test.
+else
+as_save_IFS=$IFS; IFS=$PATH_SEPARATOR
+for as_dir in $PATH
+do
+  IFS=$as_save_IFS
+  test -z "$as_dir" && as_dir=.
+  for ac_exec_ext in '' $ac_executable_extensions; do
+  if $as_executable_p "$as_dir/$ac_word$ac_exec_ext"; then
+    ac_cv_prog_CXX="$ac_prog"
+    echo "$as_me:$LINENO: found $as_dir/$ac_word$ac_exec_ext" >&5
+    break 2
+  fi
+done
+done
+
+fi
+fi
+CXX=$ac_cv_prog_CXX
+if test -n "$CXX"; then
+  echo "$as_me:$LINENO: result: $CXX" >&5
+echo "${ECHO_T}$CXX" >&6
+else
+  echo "$as_me:$LINENO: result: no" >&5
+echo "${ECHO_T}no" >&6
+fi
+
+  test -n "$CXX" && break
+done
+test -n "$CXX" || CXX="notfound"
+
+	if test "$CXX" = "notfound"
+	then
+		CXX=""
+	fi
+fi
+if test "$preset_cxx" != "$CXX"
+then
+        { echo "$as_me:$LINENO: WARNING:
+
+  By default, distutils will build C++ extension modules with \"$CXX\".
+  If this is not intended, then set CXX on the configure command line.
+  " >&5
+echo "$as_me: WARNING:
+
+  By default, distutils will build C++ extension modules with \"$CXX\".
+  If this is not intended, then set CXX on the configure command line.
+  " >&2;}
+fi
+
+
 # checks for UNIX variants that set C preprocessor variables
 
 ac_ext=c
@@ -3271,22 +3202,7 @@
 echo $ECHO_N "checking LINKCC... $ECHO_C" >&6
 if test -z "$LINKCC"
 then
-        if test -z "$CXX"; then
-              LINKCC="\$(PURIFY) \$(CC)"
-        else
-              echo 'extern "C" void foo();int main(){foo();}' > conftest_a.cc
-              $CXX -c conftest_a.cc # 2>&5
-              echo 'void foo(){}' > conftest_b.$ac_ext
-              $CC -c conftest_b.$ac_ext # 2>&5
-              if $CC -o conftest$ac_exeext conftest_a.$ac_objext conftest_b.$ac_objext 2>&5 \
-                 && test -s conftest$ac_exeext && ./conftest$ac_exeext
-              then
-                 LINKCC="\$(PURIFY) \$(CC)"
-              else
-                 LINKCC="\$(PURIFY) \$(CXX)"
-              fi
-              rm -fr conftest*
-        fi
+	LINKCC='$(PURIFY) $(MAINCC)'
 	case $ac_sys_system in
 	AIX*)
 	   exp_extra="\"\""
@@ -3600,6 +3516,7 @@
 done
 done
 
+  test -z "$ac_cv_prog_SVNVERSION" && ac_cv_prog_SVNVERSION="not-found"
 fi
 fi
 SVNVERSION=$ac_cv_prog_SVNVERSION
@@ -3788,18 +3705,21 @@
 then
     case $GCC in
     yes)
+        if test "$CC" != 'g++' ; then
+	    STRICT_PROTO="-Wstrict-prototypes"
+	fi
 	case $ac_cv_prog_cc_g in
 	yes)
 	    if test "$Py_DEBUG" = 'true' ; then
 		# Optimization messes up debuggers, so turn it off for
 		# debug builds.
-		OPT="-g -Wall -Wstrict-prototypes"
+		OPT="-g -Wall $STRICT_PROTO"
 	    else
-		OPT="-g -O3 -Wall -Wstrict-prototypes"
+		OPT="-g -O3 -Wall $STRICT_PROTO"
 	    fi
 	    ;;
 	*)
-	    OPT="-O3 -Wall -Wstrict-prototypes"
+	    OPT="-O3 -Wall $STRICT_PROTO"
 	    ;;
 	esac
 	case $ac_sys_system in
@@ -3882,6 +3802,9 @@
 	Darwin*)
 	    BASECFLAGS="$BASECFLAGS -Wno-long-double -no-cpp-precomp -mno-fused-madd"
 	    ;;
+	OSF*)
+	    BASECFLAGS="$BASECFLAGS -mieee"
+	    ;;
     esac
     ;;
 
@@ -4366,8 +4289,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -4537,8 +4459,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -4652,8 +4573,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -4808,8 +4728,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -4881,8 +4800,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -4936,8 +4854,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5008,8 +4925,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5063,8 +4979,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5127,8 +5042,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5185,8 +5099,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5333,8 +5246,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5486,8 +5398,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5556,8 +5467,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5647,8 +5557,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5698,8 +5607,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5776,8 +5684,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5858,8 +5765,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5924,8 +5830,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -5990,8 +5895,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6063,8 +5967,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6125,8 +6028,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6229,8 +6131,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6296,8 +6197,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6359,8 +6259,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6400,8 +6299,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6457,8 +6355,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6498,8 +6395,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6563,8 +6459,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6595,10 +6490,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -6710,8 +6603,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6773,8 +6665,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6814,8 +6705,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6871,8 +6761,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6912,8 +6801,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -6977,8 +6865,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7009,10 +6896,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -7124,8 +7009,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7187,8 +7071,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7228,8 +7111,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7285,8 +7167,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7326,8 +7207,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7391,8 +7271,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7423,10 +7302,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -7538,8 +7415,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7601,8 +7477,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7642,8 +7517,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7699,8 +7573,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7740,8 +7613,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7805,8 +7677,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -7837,10 +7708,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -7952,8 +7821,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8015,8 +7883,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8056,8 +7923,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8113,8 +7979,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8154,8 +8019,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8219,8 +8083,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8251,10 +8114,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -8366,8 +8227,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8429,8 +8289,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8470,8 +8329,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8527,8 +8385,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8568,8 +8425,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8633,8 +8489,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8665,10 +8520,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -8780,8 +8633,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8843,8 +8695,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8884,8 +8735,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8941,8 +8791,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -8982,8 +8831,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9047,8 +8895,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9079,10 +8926,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -9194,8 +9039,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9257,8 +9101,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9298,8 +9141,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9355,8 +9197,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9396,8 +9237,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9461,8 +9301,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9493,10 +9332,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -9604,8 +9441,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9666,8 +9502,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9729,8 +9564,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9770,8 +9604,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9827,8 +9660,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9868,8 +9700,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9933,8 +9764,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -9965,10 +9795,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -10077,8 +9905,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -10139,8 +9966,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -10202,8 +10028,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -10243,8 +10068,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -10300,8 +10124,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -10341,8 +10164,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -10406,8 +10228,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -10438,10 +10259,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -10693,8 +10512,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -10813,7 +10631,7 @@
 
 
 case $ac_sys_system/$ac_sys_release in
-  Darwin/[01234567].*)
+  Darwin/[01567]\..*)
     OTHER_LIBTOOL_OPT="-prebind -seg1addr 0x10000000"
     ;;
   Darwin/*)
@@ -10823,7 +10641,7 @@
 
 
 case $ac_sys_system/$ac_sys_release in
-  Darwin/[01234567].*)
+  Darwin/[01567]\..*)
     LIBTOOL_CRUFT="-framework System -lcc_dynamic -arch_only `arch`"
     LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -install_name $(PYTHONFRAMEWORKINSTALLDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
     LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -compatibility_version $(VERSION) -current_version $(VERSION)';;
@@ -11170,8 +10988,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -11244,8 +11061,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -11320,8 +11136,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -11375,8 +11190,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -11451,8 +11265,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -11514,8 +11327,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -11586,8 +11398,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -11654,8 +11465,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -11725,8 +11535,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -11774,6 +11583,22 @@
 echo "${ECHO_T}no" >&6
 fi;
 
+# Check for use of the system libffi library
+echo "$as_me:$LINENO: checking for --with-system-ffi" >&5
+echo $ECHO_N "checking for --with-system-ffi... $ECHO_C" >&6
+
+# Check whether --with-system_ffi or --without-system_ffi was given.
+if test "${with_system_ffi+set}" = set; then
+  withval="$with_system_ffi"
+
+fi;
+
+if test -z "$with_system_ffi"
+then with_system_ffi="no"
+fi
+echo "$as_me:$LINENO: result: $with_system_ffi" >&5
+echo "${ECHO_T}$with_system_ffi" >&6
+
 # Determine if signalmodule should be used.
 
 
@@ -11979,8 +11804,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -12134,8 +11958,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -12315,8 +12138,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -12408,8 +12230,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -12472,8 +12293,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -12622,8 +12442,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -12780,8 +12599,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -12854,8 +12672,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -12928,8 +12745,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -13002,8 +12818,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -13105,8 +12920,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -13181,8 +12995,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -13384,8 +13197,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -13533,8 +13345,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -13924,8 +13735,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -13974,7 +13784,8 @@
 	;;
 	BeOS*) DYNLOADFILE="dynload_beos.o";;
 	hp*|HP*) DYNLOADFILE="dynload_hpux.o";;
-	Darwin/*) DYNLOADFILE="dynload_next.o";;
+	# Use dynload_next.c only on 10.2 and below, which don't have native dlopen()
+	Darwin/[0156]\..*) DYNLOADFILE="dynload_next.o";;
 	atheos*) DYNLOADFILE="dynload_atheos.o";;
 	*)
 	# use dynload_shlib.c and dlopen() if we have it; otherwise stub
@@ -14086,6 +13897,8 @@
 
 
 
+
+
 for ac_func in alarm bind_textdomain_codeset chown clock confstr ctermid \
  execv fork fpathconf ftime ftruncate \
  gai_strerror getgroups getlogin getloadavg getpeername getpgid getpid \
@@ -14097,7 +13910,7 @@
  setlocale setregid setreuid setsid setpgid setpgrp setuid setvbuf snprintf \
  sigaction siginterrupt sigrelse strftime \
  sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \
- truncate uname unsetenv utimes waitpid wcscoll _getpty
+ truncate uname unsetenv utimes waitpid wait3 wait4 wcscoll _getpty
 do
 as_ac_var=`echo "ac_cv_func_$ac_func" | $as_tr_sh`
 echo "$as_me:$LINENO: checking for $ac_func" >&5
@@ -14165,8 +13978,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14227,8 +14039,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14282,8 +14093,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14337,8 +14147,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14392,8 +14201,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14447,8 +14255,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14502,8 +14309,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14567,8 +14373,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14626,8 +14431,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14685,8 +14489,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14795,8 +14598,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14863,8 +14665,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14931,8 +14732,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -14994,8 +14794,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15057,8 +14856,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15120,8 +14918,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15221,8 +15018,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15290,8 +15086,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15360,8 +15155,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15469,8 +15263,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15538,8 +15331,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15608,8 +15400,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15724,8 +15515,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15831,8 +15621,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15943,8 +15732,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -15996,8 +15784,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16094,8 +15881,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16147,8 +15933,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16245,8 +16030,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16298,8 +16082,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16365,8 +16148,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16434,8 +16216,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16672,8 +16453,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16740,8 +16520,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16803,8 +16582,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16869,8 +16647,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16916,8 +16693,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -16991,8 +16767,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17056,8 +16831,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17100,8 +16874,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17166,8 +16939,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17210,8 +16982,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17276,8 +17047,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17320,8 +17090,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17386,8 +17155,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17430,8 +17198,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17496,8 +17263,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17540,8 +17306,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17606,8 +17371,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17650,8 +17414,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17729,8 +17492,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17793,8 +17555,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17853,8 +17614,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17917,8 +17677,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -17984,8 +17743,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18090,8 +17848,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18150,8 +17907,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18206,8 +17962,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18262,8 +18017,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18329,8 +18083,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18389,8 +18142,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18448,8 +18200,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18510,8 +18261,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18610,8 +18360,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18679,8 +18428,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18748,8 +18496,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18815,8 +18562,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -18930,8 +18676,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19041,8 +18786,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19108,8 +18852,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19301,8 +19044,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19377,8 +19119,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19536,8 +19277,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19600,8 +19340,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19642,8 +19381,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19700,8 +19438,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19742,8 +19479,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19808,8 +19544,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -19840,10 +19575,8 @@
 esac
 else
   if test "$cross_compiling" = yes; then
-  { { echo "$as_me:$LINENO: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&5
-echo "$as_me: error: cannot run test program while cross compiling
-See \`config.log' for more details." >&2;}
+  { { echo "$as_me:$LINENO: error: internal error: not reached in cross-compile" >&5
+echo "$as_me: error: internal error: not reached in cross-compile" >&2;}
    { (exit 1); exit 1; }; }
 else
   cat >conftest.$ac_ext <<_ACEOF
@@ -19957,8 +19690,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -20010,7 +19742,8 @@
   #include <wchar.h>
   int main()
   {
-        exit((((wchar_t) -1) < ((wchar_t) 0)) ? 1 : 0);
+	/* Success: exit code 0 */
+        exit((((wchar_t) -1) < ((wchar_t) 0)) ? 0 : 1);
   }
 
 _ACEOF
@@ -20168,8 +19901,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -20211,8 +19943,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -20269,8 +20000,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -20459,8 +20189,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -20536,8 +20265,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -20612,8 +20340,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -20688,8 +20415,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -20820,8 +20546,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -20894,8 +20619,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -21157,8 +20881,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -21204,8 +20927,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -21279,8 +21001,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -21463,8 +21184,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -21530,8 +21250,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -21597,8 +21316,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -21664,8 +21382,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -21774,8 +21491,7 @@
   cat conftest.err >&5
   echo "$as_me:$LINENO: \$? = $ac_status" >&5
   (exit $ac_status); } &&
-	 { ac_try='test -z "$ac_c_werror_flag"
-			 || test ! -s conftest.err'
+	 { ac_try='test -z "$ac_c_werror_flag"			 || test ! -s conftest.err'
   { (eval echo "$as_me:$LINENO: \"$ac_try\"") >&5
   (eval $ac_try) 2>&5
   ac_status=$?
@@ -22455,15 +22171,15 @@
 s,@EXTRAPLATDIR@,$EXTRAPLATDIR,;t t
 s,@EXTRAMACHDEPPATH@,$EXTRAMACHDEPPATH,;t t
 s,@CONFIGURE_MACOSX_DEPLOYMENT_TARGET@,$CONFIGURE_MACOSX_DEPLOYMENT_TARGET,;t t
-s,@CXX@,$CXX,;t t
-s,@MAINOBJ@,$MAINOBJ,;t t
-s,@EXEEXT@,$EXEEXT,;t t
 s,@CC@,$CC,;t t
 s,@CFLAGS@,$CFLAGS,;t t
 s,@LDFLAGS@,$LDFLAGS,;t t
 s,@CPPFLAGS@,$CPPFLAGS,;t t
 s,@ac_ct_CC@,$ac_ct_CC,;t t
+s,@EXEEXT@,$EXEEXT,;t t
 s,@OBJEXT@,$OBJEXT,;t t
+s,@CXX@,$CXX,;t t
+s,@MAINCC@,$MAINCC,;t t
 s,@CPP@,$CPP,;t t
 s,@EGREP@,$EGREP,;t t
 s,@BUILDEXEEXT@,$BUILDEXEEXT,;t t
@@ -22681,11 +22397,6 @@
   *) ac_INSTALL=$ac_top_builddir$INSTALL ;;
   esac
 
-  if test x"$ac_file" != x-; then
-    { echo "$as_me:$LINENO: creating $ac_file" >&5
-echo "$as_me: creating $ac_file" >&6;}
-    rm -f "$ac_file"
-  fi
   # Let's still pretend it is `configure' which instantiates (i.e., don't
   # use $as_me), people would be surprised to read:
   #    /* config.h.  Generated by config.status.  */
@@ -22724,6 +22435,12 @@
 	 fi;;
       esac
     done` || { (exit 1); exit 1; }
+
+  if test x"$ac_file" != x-; then
+    { echo "$as_me:$LINENO: creating $ac_file" >&5
+echo "$as_me: creating $ac_file" >&6;}
+    rm -f "$ac_file"
+  fi
 _ACEOF
 cat >>$CONFIG_STATUS <<_ACEOF
   sed "$ac_vpsub
@@ -23007,13 +22724,13 @@
 fi
 
 
-echo "creating Setup"
+echo "creating Modules/Setup"
 if test ! -f Modules/Setup
 then
 	cp $srcdir/Modules/Setup.dist Modules/Setup
 fi
 
-echo "creating Setup.local"
+echo "creating Modules/Setup.local"
 if test ! -f Modules/Setup.local
 then
 	echo "# Edit this file for local setup changes" >Modules/Setup.local
diff --git a/configure.in b/configure.in
index f3525b1..7a3a37f 100644
--- a/configure.in
+++ b/configure.in
@@ -173,7 +173,9 @@
     ;;
   # On Mac OS X 10.4, defining _POSIX_C_SOURCE or _XOPEN_SOURCE
   # disables platform specific features beyond repair.
-  Darwin/8.*)
+  # On Mac OS X 10.3, defining _POSIX_C_SOURCE or _XOPEN_SOURCE 
+  # has no effect, don't bother defineing them
+  Darwin/@<:@78@:>@.*)
     define_xopen_source=no
     ;;
 
@@ -311,56 +313,6 @@
 	esac])
 AC_MSG_RESULT($without_gcc)
 
-AC_SUBST(CXX)
-AC_SUBST(MAINOBJ)
-MAINOBJ=python.o
-AC_MSG_CHECKING(for --with-cxx=<compiler>)
-AC_ARG_WITH(cxx,
-            AC_HELP_STRING(--with-cxx=<compiler>, enable C++ support),
-[
-	check_cxx=no
-	case $withval in
-	no)	CXX=
-		with_cxx=no;;
-	*)	CXX=$withval
-		MAINOBJ=ccpython.o
-		with_cxx=$withval;;
-	esac], [
-	with_cxx=no
-	check_cxx=yes
-])
-AC_MSG_RESULT($with_cxx)
-
-if test "$with_cxx" = "yes"
-then
-	AC_MSG_ERROR([must supply a compiler when using --with-cxx])
-fi
-
-dnl The following fragment works similar to AC_PROG_CXX.
-dnl It does not fail if CXX is not found, and it is not executed if 
-dnl --without-cxx was given.
-dnl Finally, it does not test whether CXX is g++.
-
-dnl Autoconf 2.5x does not have AC_PROG_CXX_WORKS anymore
-ifdef([AC_PROG_CXX_WORKS],[],
-      [AC_DEFUN([AC_PROG_CXX_WORKS],
-      [AC_LANG_PUSH(C++)dnl
-       _AC_COMPILER_EXEEXT
-       AC_LANG_POP()
-      ]
-)])
-
-if test "$check_cxx" = "yes" 
-then
-	AC_CHECK_PROGS(CXX, $CCC c++ g++ gcc CC cxx cc++ cl, notfound)
-	if test "$CXX" = "notfound"
-	then
-		CXX=
-	else
-		AC_PROG_CXX_WORKS
-	fi
-fi
-
 # If the user switches compilers, we can't believe the cache
 if test ! -z "$ac_cv_prog_CC" -a ! -z "$CC" -a "$CC" != "$ac_cv_prog_CC"
 then
@@ -370,6 +322,61 @@
 
 AC_PROG_CC
 
+AC_SUBST(CXX)
+AC_SUBST(MAINCC)
+AC_MSG_CHECKING(for --with-cxx-main=<compiler>)
+AC_ARG_WITH(cxx_main,
+            AC_HELP_STRING([--with-cxx-main=<compiler>],
+                           [compile main() and link python executable with C++ compiler]),
+[
+	
+	case $withval in
+	no)	with_cxx_main=no
+		MAINCC='$(CC)';;
+	yes)	with_cxx_main=yes
+		MAINCC='$(CXX)';;
+	*)	with_cxx_main=yes
+		MAINCC=$withval
+		if test -z "$CXX"
+		then
+			CXX=$withval
+		fi;;
+	esac], [
+	with_cxx_main=no
+	MAINCC='$(CC)'
+])
+AC_MSG_RESULT($with_cxx_main)
+
+preset_cxx="$CXX"
+if test -z "$CXX"
+then
+        case "$CC" in
+        gcc)    AC_PATH_PROG(CXX, [g++], [g++], [notfound]) ;;
+        cc)     AC_PATH_PROG(CXX, [c++], [c++], [notfound]) ;;
+        esac
+	if test "$CXX" = "notfound"
+	then
+		CXX=""
+	fi
+fi
+if test -z "$CXX"
+then
+	AC_CHECK_PROGS(CXX, $CCC c++ g++ gcc CC cxx cc++ cl, notfound)
+	if test "$CXX" = "notfound"
+	then
+		CXX=""
+	fi
+fi
+if test "$preset_cxx" != "$CXX"
+then
+        AC_MSG_WARN([
+
+  By default, distutils will build C++ extension modules with "$CXX".
+  If this is not intended, then set CXX on the configure command line.
+  ])
+fi
+
+
 # checks for UNIX variants that set C preprocessor variables
 AC_AIX
 
@@ -478,22 +485,7 @@
 AC_MSG_CHECKING(LINKCC)
 if test -z "$LINKCC"
 then
-        if test -z "$CXX"; then
-              LINKCC="\$(PURIFY) \$(CC)"
-        else
-              echo 'extern "C" void foo();int main(){foo();}' > conftest_a.cc
-              $CXX -c conftest_a.cc # 2>&5
-              echo 'void foo(){}' > conftest_b.$ac_ext
-              $CC -c conftest_b.$ac_ext # 2>&5
-              if $CC -o conftest$ac_exeext conftest_a.$ac_objext conftest_b.$ac_objext 2>&5 \
-                 && test -s conftest$ac_exeext && ./conftest$ac_exeext
-              then
-                 LINKCC="\$(PURIFY) \$(CC)"
-              else
-                 LINKCC="\$(PURIFY) \$(CXX)"
-              fi
-              rm -fr conftest*
-        fi
+	LINKCC='$(PURIFY) $(MAINCC)'
 	case $ac_sys_system in
 	AIX*)
 	   exp_extra="\"\""
@@ -619,7 +611,7 @@
 AC_CHECK_PROGS(AR, ar aal, ar)
 
 AC_SUBST(SVNVERSION)
-AC_CHECK_PROG(SVNVERSION, svnversion, found)
+AC_CHECK_PROG(SVNVERSION, svnversion, found, not-found)
 if test $SVNVERSION = found
 then
 	SVNVERSION="svnversion \$(srcdir)"
@@ -683,18 +675,21 @@
 then
     case $GCC in
     yes)
+        if test "$CC" != 'g++' ; then
+	    STRICT_PROTO="-Wstrict-prototypes"
+	fi
 	case $ac_cv_prog_cc_g in
 	yes)
 	    if test "$Py_DEBUG" = 'true' ; then
 		# Optimization messes up debuggers, so turn it off for
 		# debug builds.
-		OPT="-g -Wall -Wstrict-prototypes"
+		OPT="-g -Wall $STRICT_PROTO"
 	    else
-		OPT="-g -O3 -Wall -Wstrict-prototypes"
+		OPT="-g -O3 -Wall $STRICT_PROTO"
 	    fi
 	    ;;
 	*)
-	    OPT="-O3 -Wall -Wstrict-prototypes"
+	    OPT="-O3 -Wall $STRICT_PROTO"
 	    ;;
 	esac
 	case $ac_sys_system in
@@ -746,6 +741,9 @@
 	Darwin*)
 	    BASECFLAGS="$BASECFLAGS -Wno-long-double -no-cpp-precomp -mno-fused-madd"
 	    ;;
+	OSF*)
+	    BASECFLAGS="$BASECFLAGS -mieee"
+	    ;;
     esac
     ;;
 
@@ -1254,7 +1252,7 @@
 
 AC_SUBST(OTHER_LIBTOOL_OPT)
 case $ac_sys_system/$ac_sys_release in
-  Darwin/@<:@01234567@:>@.*) 
+  Darwin/@<:@01567@:>@\..*) 
     OTHER_LIBTOOL_OPT="-prebind -seg1addr 0x10000000"
     ;;
   Darwin/*)
@@ -1264,7 +1262,7 @@
 
 AC_SUBST(LIBTOOL_CRUFT)
 case $ac_sys_system/$ac_sys_release in
-  Darwin/@<:@01234567@:>@.*) 
+  Darwin/@<:@01567@:>@\..*) 
     LIBTOOL_CRUFT="-framework System -lcc_dynamic -arch_only `arch`"
     LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -install_name $(PYTHONFRAMEWORKINSTALLDIR)/Versions/$(VERSION)/$(PYTHONFRAMEWORK)'
     LIBTOOL_CRUFT=$LIBTOOL_CRUFT' -compatibility_version $(VERSION) -current_version $(VERSION)';;
@@ -1599,6 +1597,16 @@
 ],
 [AC_MSG_RESULT(no)])
 
+# Check for use of the system libffi library
+AC_MSG_CHECKING(for --with-system-ffi)
+AC_ARG_WITH(system_ffi,
+            AC_HELP_STRING(--with-system-ffi, build _ctypes module using an installed ffi library))
+
+if test -z "$with_system_ffi"
+then with_system_ffi="no"
+fi
+AC_MSG_RESULT($with_system_ffi)
+
 # Determine if signalmodule should be used.
 AC_SUBST(USE_SIGNAL_MODULE)
 AC_SUBST(SIGNAL_OBJS)
@@ -2105,7 +2113,8 @@
 	;;
 	BeOS*) DYNLOADFILE="dynload_beos.o";;
 	hp*|HP*) DYNLOADFILE="dynload_hpux.o";;
-	Darwin/*) DYNLOADFILE="dynload_next.o";;
+	# Use dynload_next.c only on 10.2 and below, which don't have native dlopen()
+	Darwin/@<:@0156@:>@\..*) DYNLOADFILE="dynload_next.o";;
 	atheos*) DYNLOADFILE="dynload_atheos.o";;
 	*)
 	# use dynload_shlib.c and dlopen() if we have it; otherwise stub
@@ -2148,7 +2157,7 @@
  setlocale setregid setreuid setsid setpgid setpgrp setuid setvbuf snprintf \
  sigaction siginterrupt sigrelse strftime \
  sysconf tcgetpgrp tcsetpgrp tempnam timegm times tmpfile tmpnam tmpnam_r \
- truncate uname unsetenv utimes waitpid wcscoll _getpty)
+ truncate uname unsetenv utimes waitpid wait3 wait4 wcscoll _getpty)
 
 # For some functions, having a definition is not sufficient, since
 # we want to take their address.
@@ -2786,7 +2795,8 @@
   #include <wchar.h>
   int main()
   {
-        exit((((wchar_t) -1) < ((wchar_t) 0)) ? 1 : 0);
+	/* Success: exit code 0 */
+        exit((((wchar_t) -1) < ((wchar_t) 0)) ? 0 : 1);
   }
   ],
   ac_cv_wchar_t_signed=yes,
@@ -3211,13 +3221,13 @@
 AC_CONFIG_FILES(Makefile.pre Modules/Setup.config)
 AC_OUTPUT
 
-echo "creating Setup"
+echo "creating Modules/Setup"
 if test ! -f Modules/Setup
 then
 	cp $srcdir/Modules/Setup.dist Modules/Setup
 fi
 
-echo "creating Setup.local"
+echo "creating Modules/Setup.local"
 if test ! -f Modules/Setup.local
 then
 	echo "# Edit this file for local setup changes" >Modules/Setup.local
diff --git a/pyconfig.h.in b/pyconfig.h.in
index 9c3ca53..8df7f9b 100644
--- a/pyconfig.h.in
+++ b/pyconfig.h.in
@@ -670,6 +670,12 @@
 /* Define to 1 if you have the <utime.h> header file. */
 #undef HAVE_UTIME_H
 
+/* Define to 1 if you have the `wait3' function. */
+#undef HAVE_WAIT3
+
+/* Define to 1 if you have the `wait4' function. */
+#undef HAVE_WAIT4
+
 /* Define to 1 if you have the `waitpid' function. */
 #undef HAVE_WAITPID
 
diff --git a/setup.py b/setup.py
index 09b79fd..61c1fec 100644
--- a/setup.py
+++ b/setup.py
@@ -180,6 +180,10 @@
 
     def build_extension(self, ext):
 
+        if ext.name == '_ctypes':
+            if not self.configure_ctypes(ext):
+                return
+
         try:
             build_ext.build_extension(self, ext)
         except (CCompilerError, DistutilsError), why:
@@ -326,8 +330,6 @@
         #
 
         # Some modules that are normally always on:
-        exts.append( Extension('regex', ['regexmodule.c', 'regexpr.c']) )
-
         exts.append( Extension('_weakref', ['_weakref.c']) )
 
         # array objects
@@ -551,8 +553,8 @@
             exts.append( Extension('_sha', ['shamodule.c']) )
             # The _md5 module implements the RSA Data Security, Inc. MD5
             # Message-Digest Algorithm, described in RFC 1321.  The
-            # necessary files md5c.c and md5.h are included here.
-            exts.append( Extension('_md5', ['md5module.c', 'md5c.c']) )
+            # necessary files md5.c and md5.h are included here.
+            exts.append( Extension('_md5', ['md5module.c', 'md5.c']) )
 
         if (openssl_ver < 0x00908000):
             # OpenSSL doesn't do these until 0.9.8 so we'll bring our own hash
@@ -569,14 +571,13 @@
 
         # Sleepycat Berkeley DB interface.  http://www.sleepycat.com
         #
-        # This requires the Sleepycat DB code. The earliest supported version
-        # of that library is 3.2, the latest supported version is 4.4.  A list
-        # of available releases can be found at
-        #
-        # http://www.sleepycat.com/update/index.html
+        # This requires the Sleepycat DB code. The supported versions
+        # are set below.  Visit http://www.sleepycat.com/ to download
+        # a release.  Most open source OSes come with one or more
+        # versions of BerkeleyDB already installed.
 
         max_db_ver = (4, 4)
-        min_db_ver = (3, 2)
+        min_db_ver = (3, 3)
         db_setup_debug = False   # verbose debug prints from this script?
 
         # construct a list of paths to look for the header file in on
@@ -691,6 +692,88 @@
             dblibs = []
             dblib_dir = None
 
+        # The sqlite interface
+        sqlite_setup_debug = True   # verbose debug prints from this script?
+
+        # We hunt for #define SQLITE_VERSION "n.n.n"
+        # We need to find >= sqlite version 3.0.8
+        sqlite_incdir = sqlite_libdir = None
+        sqlite_inc_paths = [ '/usr/include',
+                             '/usr/include/sqlite',
+                             '/usr/include/sqlite3',
+                             '/usr/local/include',
+                             '/usr/local/include/sqlite',
+                             '/usr/local/include/sqlite3',
+                           ]
+        MIN_SQLITE_VERSION_NUMBER = (3, 0, 8)
+        MIN_SQLITE_VERSION = ".".join([str(x)
+                                    for x in MIN_SQLITE_VERSION_NUMBER])
+        for d in sqlite_inc_paths + inc_dirs:
+            f = os.path.join(d, "sqlite3.h")
+            if os.path.exists(f):
+                if sqlite_setup_debug: print "sqlite: found %s"%f
+                incf = open(f).read()
+                m = re.search(
+                    r'\s*.*#\s*.*define\s.*SQLITE_VERSION\W*"(.*)"', incf)
+                if m:
+                    sqlite_version = m.group(1)
+                    sqlite_version_tuple = tuple([int(x)
+                                        for x in sqlite_version.split(".")])
+                    if sqlite_version_tuple >= MIN_SQLITE_VERSION_NUMBER:
+                        # we win!
+                        print "%s/sqlite3.h: version %s"%(d, sqlite_version)
+                        sqlite_incdir = d
+                        break
+                    else:
+                        if sqlite_setup_debug:
+                            print "%s: version %d is too old, need >= %s"%(d,
+                                        sqlite_version, MIN_SQLITE_VERSION)
+                elif sqlite_setup_debug:
+                    print "sqlite: %s had no SQLITE_VERSION"%(f,)
+
+        if sqlite_incdir:
+            sqlite_dirs_to_check = [
+                os.path.join(sqlite_incdir, '..', 'lib64'),
+                os.path.join(sqlite_incdir, '..', 'lib'),
+                os.path.join(sqlite_incdir, '..', '..', 'lib64'),
+                os.path.join(sqlite_incdir, '..', '..', 'lib'),
+            ]
+            sqlite_libfile = self.compiler.find_library_file(
+                                sqlite_dirs_to_check + lib_dirs, 'sqlite3')
+            sqlite_libdir = [os.path.abspath(os.path.dirname(sqlite_libfile))]
+
+        if sqlite_incdir and sqlite_libdir:
+            sqlite_srcs = ['_sqlite/adapters.c',
+                '_sqlite/cache.c',
+                '_sqlite/connection.c',
+                '_sqlite/converters.c',
+                '_sqlite/cursor.c',
+                '_sqlite/microprotocols.c',
+                '_sqlite/module.c',
+                '_sqlite/prepare_protocol.c',
+                '_sqlite/row.c',
+                '_sqlite/statement.c',
+                '_sqlite/util.c', ]
+
+            PYSQLITE_VERSION = "2.2.0"
+            sqlite_defines = []
+            if sys.platform != "win32":
+                sqlite_defines.append(('MODULE_NAME', '"sqlite3"'))
+            else:
+                sqlite_defines.append(('MODULE_NAME', '\\"sqlite3\\"'))
+
+            sqlite_defines.append(('PY_MAJOR_VERSION',
+                                        str(sys.version_info[0])))
+            sqlite_defines.append(('PY_MINOR_VERSION',
+                                        str(sys.version_info[1])))
+
+            exts.append(Extension('_sqlite3', sqlite_srcs,
+                                  define_macros=sqlite_defines,
+                                  include_dirs=["Modules/_sqlite",
+                                                sqlite_incdir],
+                                  library_dirs=sqlite_libdir,
+                                  runtime_library_dirs=sqlite_libdir,
+                                  libraries=["sqlite3",]))
 
         # Look for Berkeley db 1.85.   Note that it is built as a different
         # module name so it can be included even when later versions are
@@ -865,7 +948,7 @@
         # Fredrik Lundh's cElementTree module.  Note that this also
         # uses expat (via the CAPI hook in pyexpat).
 
-        if os.path.isfile('Modules/_elementtree.c'):
+        if os.path.isfile(os.path.join(srcdir, 'Modules', '_elementtree.c')):
             define_macros.append(('USE_PYEXPAT_CAPI', None))
             exts.append(Extension('_elementtree',
                                   define_macros = define_macros,
@@ -885,11 +968,11 @@
         if sys.maxint == 0x7fffffff:
             # This requires sizeof(int) == sizeof(long) == sizeof(char*)
             dl_inc = find_file('dlfcn.h', [], inc_dirs)
-            if (dl_inc is not None) and (platform not in ['atheos', 'darwin']):
+            if (dl_inc is not None) and (platform not in ['atheos']):
                 exts.append( Extension('dl', ['dlmodule.c']) )
 
         # Thomas Heller's _ctypes module
-        self.detect_ctypes()
+        self.detect_ctypes(inc_dirs, lib_dirs)
 
         # Platform-specific libraries
         if platform == 'linux2':
@@ -905,82 +988,86 @@
             exts.append( Extension('sunaudiodev', ['sunaudiodev.c']) )
 
         if platform == 'darwin' and ("--disable-toolbox-glue" not in
-            sysconfig.get_config_var("CONFIG_ARGS")):
-            # Mac OS X specific modules.
-            exts.append( Extension('_CF', ['cf/_CFmodule.c', 'cf/pycfbridge.c'],
-                        extra_link_args=['-framework', 'CoreFoundation']) )
+                sysconfig.get_config_var("CONFIG_ARGS")):
 
-            exts.append( Extension('ColorPicker', ['ColorPickermodule.c'],
-                        extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('autoGIL', ['autoGIL.c'],
-                        extra_link_args=['-framework', 'CoreFoundation']) )
-            exts.append( Extension('gestalt', ['gestaltmodule.c'],
-                        extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('MacOS', ['macosmodule.c'],
-                        extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('OSATerminology', ['OSATerminology.c'],
-                        extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('icglue', ['icgluemodule.c'],
-                        extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Res', ['res/_Resmodule.c'],
-                        extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Snd', ['snd/_Sndmodule.c'],
-                        extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('Nav', ['Nav.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_AE', ['ae/_AEmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_AH', ['ah/_AHmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_App', ['app/_Appmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_CarbonEvt', ['carbonevt/_CarbonEvtmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_CG', ['cg/_CGmodule.c'],
-                    extra_link_args=['-framework', 'ApplicationServices']) )
-            exts.append( Extension('_Cm', ['cm/_Cmmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Ctl', ['ctl/_Ctlmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Dlg', ['dlg/_Dlgmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Drag', ['drag/_Dragmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Evt', ['evt/_Evtmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_File', ['file/_Filemodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Folder', ['folder/_Foldermodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Fm', ['fm/_Fmmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Help', ['help/_Helpmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Icn', ['icn/_Icnmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_IBCarbon', ['ibcarbon/_IBCarbon.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Launch', ['launch/_Launchmodule.c'],
-                    extra_link_args=['-framework', 'ApplicationServices']) )
-            exts.append( Extension('_List', ['list/_Listmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Menu', ['menu/_Menumodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Mlte', ['mlte/_Mltemodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_OSA', ['osa/_OSAmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Qd', ['qd/_Qdmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_Qdoffs', ['qdoffs/_Qdoffsmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
+            if os.uname()[2] > '8.':
+                # We're on Mac OS X 10.4 or later, the compiler should
+                # support '-Wno-deprecated-declarations'. This will
+                # surpress deprecation warnings for the Carbon extensions,
+                # these extensions wrap the Carbon APIs and even those
+                # parts that are deprecated.
+                carbon_extra_compile_args = ['-Wno-deprecated-declarations']
+            else:
+                carbon_extra_compile_args = []
+
+            # Mac OS X specific modules.
+            def macSrcExists(name1, name2=''):
+                if not name1:
+                    return None
+                names = (name1,)
+                if name2:
+                    names = (name1, name2)
+                path = os.path.join(srcdir, 'Mac', 'Modules', *names)
+                return os.path.exists(path)
+
+            def addMacExtension(name, kwds, extra_srcs=[]):
+                dirname = ''
+                if name[0] == '_':
+                    dirname = name[1:].lower()
+                cname = name + '.c'
+                cmodulename = name + 'module.c'
+                # Check for NNN.c, NNNmodule.c, _nnn/NNN.c, _nnn/NNNmodule.c
+                if macSrcExists(cname):
+                    srcs = [cname]
+                elif macSrcExists(cmodulename):
+                    srcs = [cmodulename]
+                elif macSrcExists(dirname, cname):
+                    # XXX(nnorwitz): If all the names ended with module, we
+                    # wouldn't need this condition.  ibcarbon is the only one.
+                    srcs = [os.path.join(dirname, cname)]
+                elif macSrcExists(dirname, cmodulename):
+                    srcs = [os.path.join(dirname, cmodulename)]
+                else:
+                    raise RuntimeError("%s not found" % name)
+
+                # Here's the whole point:  add the extension with sources
+                exts.append(Extension(name, srcs + extra_srcs, **kwds))
+
+            # Core Foundation
+            core_kwds = {'extra_compile_args': carbon_extra_compile_args,
+                         'extra_link_args': ['-framework', 'CoreFoundation'],
+                        }
+            addMacExtension('_CF', core_kwds, ['cf/pycfbridge.c'])
+            addMacExtension('autoGIL', core_kwds)
+
+            # Carbon
+            carbon_kwds = {'extra_compile_args': carbon_extra_compile_args,
+                           'extra_link_args': ['-framework', 'Carbon'],
+                          }
+            CARBON_EXTS = ['ColorPicker', 'gestalt', 'MacOS', 'Nav',
+                           'OSATerminology', 'icglue',
+                           # All these are in subdirs
+                           '_AE', '_AH', '_App', '_CarbonEvt', '_Cm', '_Ctl',
+                           '_Dlg', '_Drag', '_Evt', '_File', '_Folder', '_Fm',
+                           '_Help', '_Icn', '_IBCarbon', '_List',
+                           '_Menu', '_Mlte', '_OSA', '_Res', '_Qd', '_Qdoffs',
+                           '_Scrap', '_Snd', '_TE', '_Win',
+                          ]
+            for name in CARBON_EXTS:
+                addMacExtension(name, carbon_kwds)
+
+            # Application Services & QuickTime
+            app_kwds = {'extra_compile_args': carbon_extra_compile_args,
+                        'extra_link_args': ['-framework','ApplicationServices'],
+                       }
+            addMacExtension('_Launch', app_kwds)
+            addMacExtension('_CG', app_kwds)
+
             exts.append( Extension('_Qt', ['qt/_Qtmodule.c'],
-                    extra_link_args=['-framework', 'QuickTime',
+                        extra_compile_args=carbon_extra_compile_args,
+                        extra_link_args=['-framework', 'QuickTime',
                                      '-framework', 'Carbon']) )
-            exts.append( Extension('_Scrap', ['scrap/_Scrapmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
-            exts.append( Extension('_TE', ['te/_TEmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
+
             # As there is no standardized place (yet) to put
             # user-installed Mac libraries on OSX, we search for "waste"
             # in parent directories of the Python source tree. You
@@ -992,7 +1079,6 @@
             waste_libs = find_library_file(self.compiler, "WASTE", [],
                     ["../"*n + "waste/Static Libraries" for n in (0,1,2,3,4)])
             if waste_incs != None and waste_libs != None:
-                (srcdir,) = sysconfig.get_config_vars('srcdir')
                 exts.append( Extension('waste',
                                ['waste/wastemodule.c'] + [
                                 os.path.join(srcdir, d) for d in
@@ -1005,8 +1091,6 @@
                                libraries = ['WASTE'],
                                extra_link_args = ['-framework', 'Carbon'],
                 ) )
-            exts.append( Extension('_Win', ['win/_Winmodule.c'],
-                    extra_link_args=['-framework', 'Carbon']) )
 
         self.extensions.extend(exts)
 
@@ -1183,44 +1267,55 @@
         # *** Uncomment these for TOGL extension only:
         #       -lGL -lGLU -lXext -lXmu \
 
-    def detect_ctypes(self):
-        (srcdir,) = sysconfig.get_config_vars('srcdir')
-        ffi_builddir = os.path.join(self.build_temp, 'libffi')
-        ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
-                                     '_ctypes', 'libffi'))
-        ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
+    def configure_ctypes(self, ext):
+        if not self.use_system_libffi:
+            (srcdir,) = sysconfig.get_config_vars('srcdir')
+            ffi_builddir = os.path.join(self.build_temp, 'libffi')
+            ffi_srcdir = os.path.abspath(os.path.join(srcdir, 'Modules',
+                                         '_ctypes', 'libffi'))
+            ffi_configfile = os.path.join(ffi_builddir, 'fficonfig.py')
 
-        if self.force or not os.path.exists(ffi_configfile):
-            from distutils.dir_util import mkpath
-            mkpath(ffi_builddir)
-            config_args = []
+            if self.force or not os.path.exists(ffi_configfile):
+                from distutils.dir_util import mkpath
+                mkpath(ffi_builddir)
+                config_args = []
 
-            # Pass empty CFLAGS because we'll just append the resulting CFLAGS
-            # to Python's; -g or -O2 is to be avoided.
-            cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
-                  % (ffi_builddir, ffi_srcdir, " ".join(config_args))
+                # Pass empty CFLAGS because we'll just append the resulting
+                # CFLAGS to Python's; -g or -O2 is to be avoided.
+                cmd = "cd %s && env CFLAGS='' '%s/configure' %s" \
+                      % (ffi_builddir, ffi_srcdir, " ".join(config_args))
 
-            res = os.system(cmd)
-            if res or not os.path.exists(ffi_configfile):
-                print "Failed to configure _ctypes module"
-                return
+                res = os.system(cmd)
+                if res or not os.path.exists(ffi_configfile):
+                    print "Failed to configure _ctypes module"
+                    return False
 
-        fficonfig = {}
-        execfile(ffi_configfile, globals(), fficonfig)
-        ffi_srcdir = os.path.join(fficonfig['ffi_srcdir'], 'src')
+            fficonfig = {}
+            execfile(ffi_configfile, globals(), fficonfig)
+            ffi_srcdir = os.path.join(fficonfig['ffi_srcdir'], 'src')
 
-        # Add .S (preprocessed assembly) to C compiler source extensions.
-        self.compiler.src_extensions.append('.S')
+            # Add .S (preprocessed assembly) to C compiler source extensions.
+            self.compiler.src_extensions.append('.S')
 
-        include_dirs = [os.path.join(ffi_builddir, 'include'),
-                        ffi_builddir, ffi_srcdir]
-        extra_compile_args = fficonfig['ffi_cflags'].split()
+            include_dirs = [os.path.join(ffi_builddir, 'include'),
+                            ffi_builddir, ffi_srcdir]
+            extra_compile_args = fficonfig['ffi_cflags'].split()
+
+            ext.sources.extend(fficonfig['ffi_sources'])
+            ext.include_dirs.extend(include_dirs)
+            ext.extra_compile_args.extend(extra_compile_args)
+        return True
+
+    def detect_ctypes(self, inc_dirs, lib_dirs):
+        self.use_system_libffi = False
+        include_dirs = []
+        extra_compile_args = []
         sources = ['_ctypes/_ctypes.c',
                    '_ctypes/callbacks.c',
                    '_ctypes/callproc.c',
                    '_ctypes/stgdict.c',
                    '_ctypes/cfield.c',
-                   '_ctypes/malloc_closure.c'] + fficonfig['ffi_sources']
+                   '_ctypes/malloc_closure.c']
         depends = ['_ctypes/ctypes.h']
 
         if sys.platform == 'darwin':
@@ -1232,12 +1327,40 @@
         ext = Extension('_ctypes',
                         include_dirs=include_dirs,
                         extra_compile_args=extra_compile_args,
+                        libraries=[],
                         sources=sources,
                         depends=depends)
         ext_test = Extension('_ctypes_test',
                              sources=['_ctypes/_ctypes_test.c'])
         self.extensions.extend([ext, ext_test])
 
+        if not '--with-system-ffi' in sysconfig.get_config_var("CONFIG_ARGS"):
+            return
+
+        ffi_inc = find_file('ffi.h', [], inc_dirs)
+        if ffi_inc is not None:
+            ffi_h = ffi_inc[0] + '/ffi.h'
+            fp = open(ffi_h)
+            while 1:
+                line = fp.readline()
+                if not line:
+                    ffi_inc = None
+                    break
+                if line.startswith('#define LIBFFI_H'):
+                    break
+        ffi_lib = None
+        if ffi_inc is not None:
+            for lib_name in ('ffi_convenience', 'ffi_pic', 'ffi'):
+                if (self.compiler.find_library_file(lib_dirs, lib_name)):
+                    ffi_lib = lib_name
+                    break
+
+        if ffi_inc and ffi_lib:
+            ext.include_dirs.extend(ffi_inc)
+            ext.libraries.append(ffi_lib)
+            self.use_system_libffi = True
+
+
 class PyBuildInstall(install):
     # Suppress the warning about installation into the lib_dynload
     # directory, which is not in sys.path when running Python during