Patch implementing bz2 module.

* setup.py
  (PyBuildExt.detect_modules): Included bz2 module detection.

* Modules/bz2module.c
* Lib/test/test_bz2.py
* Doc/lib/libbz2.tex
  Included files implementing, testing, and documenting bz2 module.

* Doc/Makefile.deps
* Doc/lib/lib.tex
  Include references to libbz2.tex.

* Misc/NEWS
  (Library): Mention distutils' c++ linkage patch, and new bz2 module.
diff --git a/Doc/Makefile.deps b/Doc/Makefile.deps
index b6c311e..f1673c1 100644
--- a/Doc/Makefile.deps
+++ b/Doc/Makefile.deps
@@ -227,6 +227,7 @@
 	lib/libcommands.tex \
 	lib/libcmath.tex \
 	lib/libgzip.tex \
+	lib/libbz2.tex \
 	lib/libzipfile.tex \
 	lib/libpprint.tex \
 	lib/libcode.tex \
diff --git a/Doc/lib/lib.tex b/Doc/lib/lib.tex
index 6b52d63..23444f6 100644
--- a/Doc/lib/lib.tex
+++ b/Doc/lib/lib.tex
@@ -169,6 +169,7 @@
 \input{libbsddb}
 \input{libzlib}
 \input{libgzip}
+\input{libbz2}
 \input{libzipfile}
 \input{libreadline}
 \input{librlcompleter}
diff --git a/Doc/lib/libbz2.tex b/Doc/lib/libbz2.tex
new file mode 100644
index 0000000..a6d9f73
--- /dev/null
+++ b/Doc/lib/libbz2.tex
@@ -0,0 +1,174 @@
+\section{\module{bz2} ---
+         Compression compatible with \program{bzip2}}
+
+\declaremodule{builtin}{bz2}
+\modulesynopsis{Interface to compression and decompression
+                routines compatible with \program{bzip2}.}
+\moduleauthor{Gustavo Niemeyer}{niemeyer@conectiva.com}
+\sectionauthor{Gustavo Niemeyer}{niemeyer@conectiva.com}
+
+\versionadded{2.3}
+
+This module provides a comprehensive interface for the bz2 compression library.
+It implements a complete file interface, one-shot (de)compression functions,
+and types for sequential (de)compression.
+
+Here is a resume of the features offered by the bz2 module:
+
+\begin{itemize}
+\item \class{BZ2File} class implements a complete file interface, including
+      \method{readline()}, \method{readlines()}, \method{xreadlines()},
+      \method{writelines()}, \method{seek()}, etc;
+\item \class{BZ2File} class implements emulated \method{seek()} support;
+\item \class{BZ2File} class implements universal newline support;
+\item \class{BZ2File} class offers an optimized line iteration using
+      the readahead algorithm borrowed from file objects;
+\item \class{BZ2File} class developed inheriting builtin file type
+      (\code{isinstance(BZ2File(), file) == 1});
+\item Sequential (de)compression supported by \class{BZ2Compressor} and
+      \class{BZ2Decompressor} classes;
+\item One-shot (de)compression supported by \function{compress()} and
+      \function{decompress()} functions;
+\item Thread safety uses individual locking mechanism;
+\item Complete inline documentation;
+\end{itemize}
+
+
+\subsection{(De)compression of files}
+
+Handling of compressed files is offered by the \class{BZ2File} class.
+
+\begin{classdesc}{BZ2File}{filename \optional{, mode='r'\optional{,
+                           buffering=0\optional{, compresslevel=9}}}}
+Open a bz2 file. Mode can be either \code{'r'} or \code{'w'}, for reading
+(default) or writing. When opened for writing, the file will be created if
+it doesn't exist, and truncated otherwise. If the buffering argument is given,
+\code{0} means unbuffered, and larger numbers specify the buffer size. If
+compresslevel is given, must be a number between \code{1} and \code{9}.
+Add a \code{'U'} to mode to open the file for input with universal newline
+support. Any line ending in the input file will be seen as a
+\code{'\textbackslash n'}
+in Python.  Also, a file so opened gains the attribute \member{newlines};
+the value for this attribute is one of \code{None} (no newline read yet),
+\code{'\textbackslash r'}, \code{'\textbackslash n'},
+\code{'\textbackslash r\textbackslash n'} or a tuple containing all the
+newline types seen. Universal newlines are available only when reading.
+\end{classdesc}
+
+\begin{methoddesc}[BZ2File]{close}{}
+Close the file. Sets data attribute \member{closed} to true. A closed file
+cannot be used for further I/O operations. \method{close()} may be called
+more than once without error.
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2File]{read}{\optional{size}}
+Read at most \var{size} uncompressed bytes, returned as a string. If the
+\var{size} argument is negative or omitted, read until EOF is reached.
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2File]{readline}{\optional{size}}
+Return the next line from the file, as a string, retaining newline.
+A non-negative \var{size} argument limits the maximum number of bytes to
+return (an incomplete line may be returned then). Return an empty
+string at EOF.
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2File]{readlines}{\optional{size}}
+Return a list of lines read. The optional \var{size} argument, if given,
+is an approximate bound on the total number of bytes in the lines returned.
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2File]{xreadlines}{}
+For backward compatibility. \class{BZ2File} objects now include the
+performance optimizations previously implemented in the \module{xreadlines}
+module.
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2File]{\_\_iter\_\_}{}
+Iterate trough the file lines. Iteration optimization is implemented
+using the same readahead algorithm available in \class{file} objects.
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2File]{seek}{offset \optional{, whence}}
+Move to new file position. Argument \var{offset} is a byte count. Optional
+argument \var{whence} defaults to \code{0} (offset from start of file,
+offset should be \code{>= 0}); other values are \code{1} (move relative to
+current position, positive or negative), and \code{2} (move relative to end
+of file, usually negative, although many platforms allow seeking beyond
+the end of a file).
+
+Note that seeking of bz2 files is emulated, and depending on the parameters
+the operation may be extremely slow.
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2File]{tell}{}
+Return the current file position, an integer (may be a long integer).
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2File]{write}{data}
+Write string \var{data} to file. Note that due to buffering, \method{close()}
+may be needed before the file on disk reflects the data written.
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2File]{writelines}{sequence_of_strings}
+Write the sequence of strings to the file. Note that newlines are not added.
+The sequence can be any iterable object producing strings. This is equivalent
+to calling write() for each string.
+\end{methoddesc}
+
+
+\subsection{Sequential (de)compression}
+
+Sequential compression and decompression is done using the classes
+\class{BZ2Compressor} and \class{BZ2Decompressor}.
+
+\begin{classdesc}{BZ2Compressor}{\optional{compresslevel=9}}
+Create a new compressor object. This object may be used to compress
+data sequentially. If you want to compress data in one shot, use the
+\function{compress()} function instead. The \var{compresslevel} parameter,
+if given, must be a number between \code{1} and \code{9}.
+\end{classdesc}
+
+\begin{methoddesc}[BZ2Compressor]{compress}{data}
+Provide more data to the compressor object. It will return chunks of compressed
+data whenever possible. When you've finished providing data to compress, call
+the \method{flush()} method to finish the compression process, and return what
+is left in internal buffers.
+\end{methoddesc}
+
+\begin{methoddesc}[BZ2Compressor]{flush}{}
+Finish the compression process and return what is left in internal buffers. You
+must not use the compressor object after calling this method.
+\end{methoddesc}
+
+\begin{classdesc}{BZ2Decompressor}{}
+Create a new decompressor object. This object may be used to decompress
+data sequentially. If you want to decompress data in one shot, use the
+\function{decompress()} function instead.
+\end{classdesc}
+
+\begin{methoddesc}[BZ2Decompressor]{decompress}{data}
+Provide more data to the decompressor object. It will return chunks of
+decompressed data whenever possible. If you try to decompress data after the
+end of stream is found, \exception{EOFError} will be raised. If any data was
+found after the end of stream, it'll be ignored and saved in
+\member{unused\_data} attribute.
+\end{methoddesc}
+
+
+\subsection{One-shot (de)compression}
+
+One-shot compression and decompression is provided trough the
+\function{compress()} and \function{decompress()} functions.
+
+\begin{funcdesc}{compress}{data\optional{, compresslevel=9}}
+Compress \var{data} in one shot. If you want to compress data sequentially,
+use an instance of \class{BZ2Compressor} instead. The \var{compresslevel}
+parameter, if given, must be a number between \code{1} and \code{9}.
+\end{funcdesc}
+
+\begin{funcdesc}{decompress}{}
+Decompress \var{data} in one shot. If you want to decompress data
+sequentially, use an instance of \class{BZ2Decompressor} instead.
+\end{funcdesc}
+
diff --git a/Lib/test/test_bz2.py b/Lib/test/test_bz2.py
new file mode 100644
index 0000000..5c93887
--- /dev/null
+++ b/Lib/test/test_bz2.py
@@ -0,0 +1,290 @@
+#!/usr/bin/python
+import unittest
+from cStringIO import StringIO
+import os
+import popen2
+import tempfile
+from bz2 import *
+from test import test_support
+
+class BaseTest(unittest.TestCase):
+	"Base for other testcases."
+	TEXT = 'root:x:0:0:root:/root:/bin/bash\nbin:x:1:1:bin:/bin:\ndaemon:x:2:2:daemon:/sbin:\nadm:x:3:4:adm:/var/adm:\nlp:x:4:7:lp:/var/spool/lpd:\nsync:x:5:0:sync:/sbin:/bin/sync\nshutdown:x:6:0:shutdown:/sbin:/sbin/shutdown\nhalt:x:7:0:halt:/sbin:/sbin/halt\nmail:x:8:12:mail:/var/spool/mail:\nnews:x:9:13:news:/var/spool/news:\nuucp:x:10:14:uucp:/var/spool/uucp:\noperator:x:11:0:operator:/root:\ngames:x:12:100:games:/usr/games:\ngopher:x:13:30:gopher:/usr/lib/gopher-data:\nftp:x:14:50:FTP User:/var/ftp:/bin/bash\nnobody:x:65534:65534:Nobody:/home:\npostfix:x:100:101:postfix:/var/spool/postfix:\nniemeyer:x:500:500::/home/niemeyer:/bin/bash\npostgres:x:101:102:PostgreSQL Server:/var/lib/pgsql:/bin/bash\nmysql:x:102:103:MySQL server:/var/lib/mysql:/bin/bash\nwww:x:103:104::/var/www:/bin/false\n'
+	DATA = 'BZh91AY&SY.\xc8N\x18\x00\x01>_\x80\x00\x10@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe00\x01\x99\xaa\x00\xc0\x03F\x86\x8c#&\x83F\x9a\x03\x06\xa6\xd0\xa6\x93M\x0fQ\xa7\xa8\x06\x804hh\x12$\x11\xa4i4\xf14S\xd2<Q\xb5\x0fH\xd3\xd4\xdd\xd5\x87\xbb\xf8\x94\r\x8f\xafI\x12\xe1\xc9\xf8/E\x00pu\x89\x12]\xc9\xbbDL\nQ\x0e\t1\x12\xdf\xa0\xc0\x97\xac2O9\x89\x13\x94\x0e\x1c7\x0ed\x95I\x0c\xaaJ\xa4\x18L\x10\x05#\x9c\xaf\xba\xbc/\x97\x8a#C\xc8\xe1\x8cW\xf9\xe2\xd0\xd6M\xa7\x8bXa<e\x84t\xcbL\xb3\xa7\xd9\xcd\xd1\xcb\x84.\xaf\xb3\xab\xab\xad`n}\xa0lh\tE,\x8eZ\x15\x17VH>\x88\xe5\xcd9gd6\x0b\n\xe9\x9b\xd5\x8a\x99\xf7\x08.K\x8ev\xfb\xf7xw\xbb\xdf\xa1\x92\xf1\xdd|/";\xa2\xba\x9f\xd5\xb1#A\xb6\xf6\xb3o\xc9\xc5y\\\xebO\xe7\x85\x9a\xbc\xb6f8\x952\xd5\xd7"%\x89>V,\xf7\xa6z\xe2\x9f\xa3\xdf\x11\x11"\xd6E)I\xa9\x13^\xca\xf3r\xd0\x03U\x922\xf26\xec\xb6\xed\x8b\xc3U\x13\x9d\xc5\x170\xa4\xfa^\x92\xacDF\x8a\x97\xd6\x19\xfe\xdd\xb8\xbd\x1a\x9a\x19\xa3\x80ankR\x8b\xe5\xd83]\xa9\xc6\x08\x82f\xf6\xb9"6l$\xb8j@\xc0\x8a\xb0l1..\xbak\x83ls\x15\xbc\xf4\xc1\x13\xbe\xf8E\xb8\x9d\r\xa8\x9dk\x84\xd3n\xfa\xacQ\x07\xb1%y\xaav\xb4\x08\xe0z\x1b\x16\xf5\x04\xe9\xcc\xb9\x08z\x1en7.G\xfc]\xc9\x14\xe1B@\xbb!8`'
+	DATA_CRLF = 'BZh91AY&SY\xaez\xbbN\x00\x01H\xdf\x80\x00\x12@\x02\xff\xf0\x01\x07n\x00?\xe7\xff\xe0@\x01\xbc\xc6`\x86*\x8d=M\xa9\x9a\x86\xd0L@\x0fI\xa6!\xa1\x13\xc8\x88jdi\x8d@\x03@\x1a\x1a\x0c\x0c\x83 \x00\xc4h2\x19\x01\x82D\x84e\t\xe8\x99\x89\x19\x1ah\x00\r\x1a\x11\xaf\x9b\x0fG\xf5(\x1b\x1f?\t\x12\xcf\xb5\xfc\x95E\x00ps\x89\x12^\xa4\xdd\xa2&\x05(\x87\x04\x98\x89u\xe40%\xb6\x19\'\x8c\xc4\x89\xca\x07\x0e\x1b!\x91UIFU%C\x994!DI\xd2\xfa\xf0\xf1N8W\xde\x13A\xf5\x9cr%?\x9f3;I45A\xd1\x8bT\xb1<l\xba\xcb_\xc00xY\x17r\x17\x88\x08\x08@\xa0\ry@\x10\x04$)`\xf2\xce\x89z\xb0s\xec\x9b.iW\x9d\x81\xb5-+t\x9f\x1a\'\x97dB\xf5x\xb5\xbe.[.\xd7\x0e\x81\xe7\x08\x1cN`\x88\x10\xca\x87\xc3!"\x80\x92R\xa1/\xd1\xc0\xe6mf\xac\xbd\x99\xcca\xb3\x8780>\xa4\xc7\x8d\x1a\\"\xad\xa1\xabyBg\x15\xb9l\x88\x88\x91k"\x94\xa4\xd4\x89\xae*\xa6\x0b\x10\x0c\xd6\xd4m\xe86\xec\xb5j\x8a\x86j\';\xca.\x01I\xf2\xaaJ\xe8\x88\x8cU+t3\xfb\x0c\n\xa33\x13r2\r\x16\xe0\xb3(\xbf\x1d\x83r\xe7M\xf0D\x1365\xd8\x88\xd3\xa4\x92\xcb2\x06\x04\\\xc1\xb0\xea//\xbek&\xd8\xe6+t\xe5\xa1\x13\xada\x16\xder5"w]\xa2i\xb7[\x97R \xe2IT\xcd;Z\x04dk4\xad\x8a\t\xd3\x81z\x10\xf1:^`\xab\x1f\xc5\xdc\x91N\x14$+\x9e\xae\xd3\x80'
+
+	def decompress(self, data):
+		pop = popen2.Popen3("bunzip2", capturestderr=1)
+		pop.tochild.write(data)
+		pop.tochild.close()
+		ret = pop.fromchild.read()
+		pop.fromchild.close()
+		if pop.wait() != 0:
+			ret = decompress(data)
+		return ret
+
+class BZ2FileTest(BaseTest):
+	"Test MCRYPT type miscelaneous methods."
+
+	def setUp(self):
+		self.filename = tempfile.mktemp("bz2")
+		
+	def tearDown(self):
+		if os.path.isfile(self.filename):
+			os.unlink(self.filename)
+	
+	def createTempFile(self, crlf=0):
+		f = open(self.filename, "w")
+		if crlf:
+			data = self.DATA_CRLF
+		else:
+			data = self.DATA
+		f.write(data)
+		f.close()
+	
+	def testRead(self):
+		"Test BZ2File.read()"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		self.assertEqual(bz2f.read(), self.TEXT)
+		bz2f.close()
+	
+	def testReadChunk10(self):
+		"Test BZ2File.read() in chunks of 10 bytes"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		text = ''
+		while 1:
+			str = bz2f.read(10)
+			if not str:
+				break
+			text += str
+		self.assertEqual(text, text)
+		bz2f.close()
+	
+	def testRead100(self):
+		"Test BZ2File.read(100)"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		self.assertEqual(bz2f.read(100), self.TEXT[:100])
+		bz2f.close()
+	
+	def testReadLine(self):
+		"Test BZ2File.readline()"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		sio = StringIO(self.TEXT)
+		for line in sio.readlines():
+			self.assertEqual(bz2f.readline(), line)
+		bz2f.close()
+	
+	def testReadLines(self):
+		"Test BZ2File.readlines()"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		sio = StringIO(self.TEXT)
+		self.assertEqual(bz2f.readlines(), sio.readlines())
+		bz2f.close()
+	
+	def testIterator(self):
+		"Test iter(BZ2File)"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		sio = StringIO(self.TEXT)
+		self.assertEqual(list(iter(bz2f)), sio.readlines())
+		bz2f.close()
+	
+	def testXReadLines(self):
+		"Test BZ2File.xreadlines()"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		sio = StringIO(self.TEXT)
+		self.assertEqual(list(bz2f.xreadlines()), sio.readlines())
+		bz2f.close()
+	
+	def testUniversalNewlinesLF(self):
+		"Test BZ2File.read() with universal newlines (\\n)"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename, "rU")
+		self.assertEqual(bz2f.read(), self.TEXT)
+		self.assertEqual(bz2f.newlines, "\n")
+		bz2f.close()
+	
+	def testUniversalNewlinesCRLF(self):
+		"Test BZ2File.read() with universal newlines (\\r\\n)"
+		self.createTempFile(crlf=1)
+		bz2f = BZ2File(self.filename, "rU")
+		self.assertEqual(bz2f.read(), self.TEXT)
+		self.assertEqual(bz2f.newlines, "\r\n")
+		bz2f.close()
+	
+	def testWrite(self):
+		"Test BZ2File.write()"
+		bz2f = BZ2File(self.filename, "w")
+		bz2f.write(self.TEXT)
+		bz2f.close()
+		f = open(self.filename)
+		self.assertEqual(self.decompress(f.read()), self.TEXT)
+		f.close()
+
+	def testWriteChunks10(self):
+		"Test BZ2File.write() with chunks of 10 bytes"
+		bz2f = BZ2File(self.filename, "w")
+		n = 0
+		while 1:
+			str = self.TEXT[n*10:(n+1)*10]
+			if not str:
+				break
+			bz2f.write(str)
+			n += 1
+		bz2f.close()
+		f = open(self.filename)
+		self.assertEqual(self.decompress(f.read()), self.TEXT)
+		f.close()
+
+	def testWriteLines(self):
+		"Test BZ2File.writelines()"
+		bz2f = BZ2File(self.filename, "w")
+		sio = StringIO(self.TEXT)
+		bz2f.writelines(sio.readlines())
+		bz2f.close()
+		f = open(self.filename)
+		self.assertEqual(self.decompress(f.read()), self.TEXT)
+		f.close()
+	
+	def testSeekForward(self):
+		"Test BZ2File.seek(150, 0)"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		bz2f.seek(150)
+		self.assertEqual(bz2f.read(), self.TEXT[150:])
+
+	def testSeekBackwards(self):
+		"Test BZ2File.seek(-150, 1)"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		bz2f.read(500)
+		bz2f.seek(-150, 1)
+		self.assertEqual(bz2f.read(), self.TEXT[500-150:])
+
+	def testSeekBackwardsFromEnd(self):
+		"Test BZ2File.seek(-150, 2)"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		bz2f.seek(-150, 2)
+		self.assertEqual(bz2f.read(), self.TEXT[len(self.TEXT)-150:])
+	
+	def testSeekPostEnd(self):
+		"Test BZ2File.seek(150000)"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		bz2f.seek(150000)
+		self.assertEqual(bz2f.tell(), len(self.TEXT))
+		self.assertEqual(bz2f.read(), "")
+
+	def testSeekPostEndTwice(self):
+		"Test BZ2File.seek(150000) twice"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		bz2f.seek(150000)
+		bz2f.seek(150000)
+		self.assertEqual(bz2f.tell(), len(self.TEXT))
+		self.assertEqual(bz2f.read(), "")
+
+	def testSeekPreStart(self):
+		"Test BZ2File.seek(-150, 0)"
+		self.createTempFile()
+		bz2f = BZ2File(self.filename)
+		bz2f.seek(-150)
+		self.assertEqual(bz2f.tell(), 0)
+		self.assertEqual(bz2f.read(), self.TEXT)
+
+class BZ2CompressorTest(BaseTest):
+	def testCompress(self):
+		"Test BZ2Compressor.compress()/flush()"
+		bz2c = BZ2Compressor()
+		data = bz2c.compress(self.TEXT)
+		data += bz2c.flush()
+		self.assertEqual(self.decompress(data), self.TEXT)
+
+	def testCompressChunks10(self):
+		"Test BZ2Compressor.compress()/flush() with chunks of 10 bytes"
+		bz2c = BZ2Compressor()
+		n = 0
+		data = ''
+		while 1:
+			str = self.TEXT[n*10:(n+1)*10]
+			if not str:
+				break
+			data += bz2c.compress(str)
+			n += 1
+		data += bz2c.flush()
+		self.assertEqual(self.decompress(data), self.TEXT)
+
+class BZ2DecompressorTest(BaseTest):
+	def testDecompress(self):
+		"Test BZ2Decompressor.decompress()"
+		bz2d = BZ2Decompressor()
+		text = bz2d.decompress(self.DATA)
+		self.assertEqual(text, self.TEXT)
+
+	def testDecompressChunks10(self):
+		"Test BZ2Decompressor.decompress() with chunks of 10 bytes"
+		bz2d = BZ2Decompressor()
+		text = ''
+		n = 0
+		while 1:
+			str = self.DATA[n*10:(n+1)*10]
+			if not str:
+				break
+			text += bz2d.decompress(str)
+			n += 1
+		self.assertEqual(text, self.TEXT)
+
+	def testDecompressUnusedData(self):
+		"Test BZ2Decompressor.decompress() with unused data"
+		bz2d = BZ2Decompressor()
+		unused_data = "this is unused data"
+		text = bz2d.decompress(self.DATA+unused_data)
+		self.assertEqual(text, self.TEXT)
+		self.assertEqual(bz2d.unused_data, unused_data)
+
+	def testEOFError(self):
+		"Calling BZ2Decompressor.decompress() after EOS must raise EOFError"
+		bz2d = BZ2Decompressor()
+		text = bz2d.decompress(self.DATA)
+		self.assertRaises(EOFError, bz2d.decompress, "anything")
+
+
+class FuncTest(BaseTest):
+	"Test module functions"
+	
+	def testCompress(self):
+		"Test compress() function"
+		data = compress(self.TEXT)
+		self.assertEqual(self.decompress(data), self.TEXT)
+
+	def testDecompress(self):
+		"Test decompress() function"
+		text = decompress(self.DATA)
+		self.assertEqual(text, self.TEXT)
+	
+	def testDecompressEmpty(self):
+		"Test decompress() function with empty string"
+		text = decompress("")
+		self.assertEqual(text, "")
+
+	def testDecompressIncomplete(self):
+		"Test decompress() function with incomplete data"
+		self.assertRaises(ValueError, decompress, self.DATA[:-10])
+
+def test_main():
+    test_support.run_unittest(BZ2FileTest)
+    test_support.run_unittest(BZ2CompressorTest)
+    test_support.run_unittest(BZ2DecompressorTest)
+    test_support.run_unittest(FuncTest)
+
+if __name__ == '__main__':
+    test_main()
+
+# vim:ts=4:sw=4
diff --git a/Misc/NEWS b/Misc/NEWS
index 980e4d2..c43474e 100644
--- a/Misc/NEWS
+++ b/Misc/NEWS
@@ -534,6 +534,14 @@
   has changed slightly so that an explicit maxlinelen value is always
   honored.
 
+- distutils' build_ext command now links c++ extensions with the c++
+  compiler available in the Makefile or CXX environment variable, if
+  running under *nix.
+
+- New module bz2: provides a comprehensive interface for the bz2 compression
+  library.  It implements a complete file interface, one-shot (de)compression
+  functions, and types for sequential (de)compression.
+
 Tools/Demos
 -----------
 
diff --git a/Modules/bz2module.c b/Modules/bz2module.c
new file mode 100644
index 0000000..c664c2a
--- /dev/null
+++ b/Modules/bz2module.c
@@ -0,0 +1,2099 @@
+/*
+
+python-bz2 - python bz2 library interface
+
+Copyright (c) 2002  Gustavo Niemeyer <niemeyer@conectiva.com>
+Copyright (c) 2002  Python Software Foundation; All Rights Reserved
+
+*/
+
+#include <stdio.h>
+#include <bzlib.h>
+#include "Python.h"
+#include "structmember.h"
+
+#ifdef WITH_THREAD
+#include "pythread.h"
+#endif
+
+static char __author__[] =
+"The bz2 python module was written by:\n\
+\n\
+    Gustavo Niemeyer <niemeyer@conectiva.com>\n\
+";
+
+#define BUF(v) PyString_AS_STRING((PyStringObject *)v)
+
+#define MODE_CLOSED   0
+#define MODE_READ     1
+#define MODE_READ_EOF 2
+#define MODE_WRITE    3
+
+#define BZ2FileObject_Check(v)	((v)->ob_type == &BZ2File_Type)
+
+#if SIZEOF_LONG >= 8
+#define BZS_TOTAL_OUT(bzs) \
+	(((long)bzs->total_out_hi32 << 32) + bzs->total_out_lo32)
+#elif SIZEOF_LONG_LONG >= 8
+#define BZS_TOTAL_OUT(bzs) \
+	(((long long)bzs->total_out_hi32 << 32) + bzs->total_out_lo32)
+#else
+#define BZS_TOTAL_OUT(bzs) \
+	bzs->total_out_lo32;
+#endif
+
+#ifdef WITH_THREAD
+#define ACQUIRE_LOCK(obj) PyThread_acquire_lock(obj->lock, 1)
+#define RELEASE_LOCK(obj) PyThread_release_lock(obj->lock)
+#else
+#define ACQUIRE_LOCK(obj)
+#define RELEASE_LOCK(obj)
+#endif
+
+#ifdef WITH_UNIVERSAL_NEWLINES
+/* Bits in f_newlinetypes */
+#define NEWLINE_UNKNOWN	0	/* No newline seen, yet */
+#define NEWLINE_CR 1		/* \r newline seen */
+#define NEWLINE_LF 2		/* \n newline seen */
+#define NEWLINE_CRLF 4		/* \r\n newline seen */
+#endif
+
+/* ===================================================================== */
+/* Structure definitions. */
+
+typedef struct {
+	PyFileObject file;
+	BZFILE *fp;
+	int mode;
+	long pos;
+	long size;
+#ifdef WITH_THREAD
+	PyThread_type_lock lock;
+#endif
+} BZ2FileObject;
+
+typedef struct {
+	PyObject_HEAD
+	bz_stream bzs;
+	int running;
+#ifdef WITH_THREAD
+	PyThread_type_lock lock;
+#endif
+} BZ2CompObject;
+
+typedef struct {
+	PyObject_HEAD
+	bz_stream bzs;
+	int running;
+	PyObject *unused_data;
+#ifdef WITH_THREAD
+	PyThread_type_lock lock;
+#endif
+} BZ2DecompObject;
+
+/* ===================================================================== */
+/* Utility functions. */
+
+static int
+Util_CatchBZ2Error(int bzerror)
+{
+	int ret = 0;
+	switch(bzerror) {
+		case BZ_OK:
+		case BZ_STREAM_END:
+			break;
+
+		case BZ_CONFIG_ERROR:
+			PyErr_SetString(PyExc_SystemError,
+					"the bz2 library was not compiled "
+					"correctly");
+			ret = 1;
+			break;
+			
+		case BZ_PARAM_ERROR:
+			PyErr_SetString(PyExc_ValueError,
+					"the bz2 library has received wrong "
+					"parameters");
+			ret = 1;
+			break;
+			
+		case BZ_MEM_ERROR:
+			PyErr_NoMemory();
+			ret = 1;
+			break;
+
+		case BZ_DATA_ERROR:
+		case BZ_DATA_ERROR_MAGIC:
+			PyErr_SetString(PyExc_IOError, "invalid data stream");
+			ret = 1;
+			break;
+
+		case BZ_IO_ERROR:
+			PyErr_SetString(PyExc_IOError, "unknown IO error");
+			ret = 1;
+			break;
+
+		case BZ_UNEXPECTED_EOF:
+			PyErr_SetString(PyExc_EOFError,
+					"compressed file ended before the "
+					"logical end-of-stream was detected");
+			ret = 1;
+			break;
+
+		case BZ_SEQUENCE_ERROR:
+			PyErr_SetString(PyExc_RuntimeError,
+					"wrong sequence of bz2 library "
+					"commands used");
+			ret = 1;
+			break;
+	}
+	return ret;
+}
+
+#if BUFSIZ < 8192
+#define SMALLCHUNK 8192
+#else
+#define SMALLCHUNK BUFSIZ
+#endif
+
+#if SIZEOF_INT < 4
+#define BIGCHUNK  (512 * 32)
+#else
+#define BIGCHUNK  (512 * 1024)
+#endif
+
+/* This is a hacked version of Python's fileobject.c:new_buffersize(). */
+static size_t
+Util_NewBufferSize(size_t currentsize)
+{
+	if (currentsize > SMALLCHUNK) {
+		/* Keep doubling until we reach BIGCHUNK;
+		   then keep adding BIGCHUNK. */
+		if (currentsize <= BIGCHUNK)
+			return currentsize + currentsize;
+		else
+			return currentsize + BIGCHUNK;
+	}
+	return currentsize + SMALLCHUNK;
+}
+
+/* This is a hacked version of Python's fileobject.c:get_line(). */
+static PyObject *
+Util_GetLine(BZ2FileObject *self, int n)
+{
+	char c;
+	char *buf, *end;
+	size_t total_v_size;	/* total # of slots in buffer */
+	size_t used_v_size;	/* # used slots in buffer */
+	size_t increment;       /* amount to increment the buffer */
+	PyObject *v;
+	int bzerror;
+#ifdef WITH_UNIVERSAL_NEWLINES
+	int newlinetypes = ((PyFileObject*)self)->f_newlinetypes;
+	int skipnextlf = ((PyFileObject*)self)->f_skipnextlf;
+	int univ_newline = ((PyFileObject*)self)->f_univ_newline;
+#endif
+
+	total_v_size = n > 0 ? n : 100;
+	v = PyString_FromStringAndSize((char *)NULL, total_v_size);
+	if (v == NULL)
+		return NULL;
+
+	buf = BUF(v);
+	end = buf + total_v_size;
+
+	for (;;) {
+		Py_BEGIN_ALLOW_THREADS
+#ifdef WITH_UNIVERSAL_NEWLINES
+		if (univ_newline) {
+			while (1) {
+				BZ2_bzRead(&bzerror, self->fp, &c, 1);
+				self->pos++;
+				if (bzerror != BZ_OK || buf == end)
+					break;
+				if (skipnextlf ) {
+					skipnextlf = 0;
+					if (c == '\n') {
+						/* Seeing a \n here with 
+						 * skipnextlf true means we 
+						 * saw a \r before.
+						 */
+						newlinetypes |= NEWLINE_CRLF;
+						BZ2_bzRead(&bzerror, self->fp,
+							   &c, 1);
+						if (bzerror != BZ_OK)
+							break;
+					} else {
+						newlinetypes |= NEWLINE_CR;
+					}
+				}
+				if (c == '\r') {
+					skipnextlf = 1;
+					c = '\n';
+				} else if ( c == '\n')
+					newlinetypes |= NEWLINE_LF;
+				*buf++ = c;
+				if (c == '\n') break;
+			}
+			if (bzerror == BZ_STREAM_END && skipnextlf)
+				newlinetypes |= NEWLINE_CR;
+		} else /* If not universal newlines use the normal loop */
+#endif
+			do {
+				BZ2_bzRead(&bzerror, self->fp, &c, 1);
+				self->pos++;
+				*buf++ = c;
+			} while (bzerror == BZ_OK && c != '\n' && buf != end);
+		Py_END_ALLOW_THREADS
+#ifdef WITH_UNIVERSAL_NEWLINES
+		((PyFileObject*)self)->f_newlinetypes = newlinetypes;
+		((PyFileObject*)self)->f_skipnextlf = skipnextlf;
+#endif
+		if (bzerror == BZ_STREAM_END) {
+			self->size = self->pos;
+			self->mode = MODE_READ_EOF;
+			break;
+		} else if (bzerror != BZ_OK) {
+			Util_CatchBZ2Error(bzerror);
+			Py_DECREF(v);
+			return NULL;
+		}
+		if (c == '\n')
+			break;
+		/* Must be because buf == end */
+		if (n > 0)
+			break;
+		used_v_size = total_v_size;
+		increment = total_v_size >> 2; /* mild exponential growth */
+		total_v_size += increment;
+		if (total_v_size > INT_MAX) {
+			PyErr_SetString(PyExc_OverflowError,
+			    "line is longer than a Python string can hold");
+			Py_DECREF(v);
+			return NULL;
+		}
+		if (_PyString_Resize(&v, total_v_size) < 0)
+			return NULL;
+		buf = BUF(v) + used_v_size;
+		end = BUF(v) + total_v_size;
+	}
+
+	used_v_size = buf - BUF(v);
+	if (used_v_size != total_v_size)
+		_PyString_Resize(&v, used_v_size);
+	return v;
+}
+
+#ifndef WITH_UNIVERSAL_NEWLINES
+#define Util_UnivNewlineRead(a,b,c,d,e) BZ2_bzRead(a,b,c,d)
+#else
+/* This is a hacked version of Python's
+ * fileobject.c:Py_UniversalNewlineFread(). */
+size_t
+Util_UnivNewlineRead(int *bzerror, BZFILE *stream,
+		     char* buf, size_t n, BZ2FileObject *fobj)
+{
+	char *dst = buf;
+	PyFileObject *f = (PyFileObject *)fobj;
+	int newlinetypes, skipnextlf;
+
+	assert(buf != NULL);
+	assert(stream != NULL);
+
+	if (!f->f_univ_newline)
+		return BZ2_bzRead(bzerror, stream, buf, n);
+
+	newlinetypes = f->f_newlinetypes;
+	skipnextlf = f->f_skipnextlf;
+
+	/* Invariant:  n is the number of bytes remaining to be filled
+	 * in the buffer.
+	 */
+	while (n) {
+		size_t nread;
+		int shortread;
+		char *src = dst;
+
+		nread = BZ2_bzRead(bzerror, stream, dst, n);
+		assert(nread <= n);
+		n -= nread; /* assuming 1 byte out for each in; will adjust */
+		shortread = n != 0;	/* true iff EOF or error */
+		while (nread--) {
+			char c = *src++;
+			if (c == '\r') {
+				/* Save as LF and set flag to skip next LF. */
+				*dst++ = '\n';
+				skipnextlf = 1;
+			}
+			else if (skipnextlf && c == '\n') {
+				/* Skip LF, and remember we saw CR LF. */
+				skipnextlf = 0;
+				newlinetypes |= NEWLINE_CRLF;
+				++n;
+			}
+			else {
+				/* Normal char to be stored in buffer.  Also
+				 * update the newlinetypes flag if either this
+				 * is an LF or the previous char was a CR.
+				 */
+				if (c == '\n')
+					newlinetypes |= NEWLINE_LF;
+				else if (skipnextlf)
+					newlinetypes |= NEWLINE_CR;
+				*dst++ = c;
+				skipnextlf = 0;
+			}
+		}
+		if (shortread) {
+			/* If this is EOF, update type flags. */
+			if (skipnextlf && *bzerror == BZ_STREAM_END)
+				newlinetypes |= NEWLINE_CR;
+			break;
+		}
+	}
+	f->f_newlinetypes = newlinetypes;
+	f->f_skipnextlf = skipnextlf;
+	return dst - buf;
+}
+#endif
+
+/* This is a hacked version of Python's fileobject.c:drop_readahead(). */
+static void
+Util_DropReadAhead(BZ2FileObject *self)
+{
+	PyFileObject *f = (PyFileObject*)self;
+	if (f->f_buf != NULL) {
+		PyMem_Free(f->f_buf);
+		f->f_buf = NULL;
+	}
+}
+
+/* This is a hacked version of Python's fileobject.c:readahead(). */
+static int
+Util_ReadAhead(BZ2FileObject *self, int bufsize)
+{
+	int chunksize;
+	int bzerror;
+	PyFileObject *f = (PyFileObject*)self;
+
+	if (f->f_buf != NULL) {
+		if((f->f_bufend - f->f_bufptr) >= 1) 
+			return 0;
+		else
+			Util_DropReadAhead(self);
+	}
+	if (self->mode == MODE_READ_EOF) {
+		return -1;
+	}
+	if ((f->f_buf = PyMem_Malloc(bufsize)) == NULL) {
+		return -1;
+	}
+	Py_BEGIN_ALLOW_THREADS
+	chunksize = Util_UnivNewlineRead(&bzerror, self->fp, f->f_buf,
+					 bufsize, self);
+	Py_END_ALLOW_THREADS
+	self->pos += chunksize;
+	if (bzerror == BZ_STREAM_END) {
+		self->size = self->pos;
+		self->mode = MODE_READ_EOF;
+	} else if (bzerror != BZ_OK) {
+		Util_CatchBZ2Error(bzerror);
+		Util_DropReadAhead(self);
+		return -1;
+	}
+	f->f_bufptr = f->f_buf;
+	f->f_bufend = f->f_buf + chunksize;
+	return 0;
+}
+
+/* This is a hacked version of Python's
+ * fileobject.c:readahead_get_line_skip(). */
+static PyStringObject *
+Util_ReadAheadGetLineSkip(BZ2FileObject *bf, int skip, int bufsize)
+{
+	PyFileObject *f = (PyFileObject*)bf;
+	PyStringObject* s;
+	char *bufptr;
+	char *buf;
+	int len;
+
+	if (f->f_buf == NULL)
+		if (Util_ReadAhead(bf, bufsize) < 0) 
+			return NULL;
+
+	len = f->f_bufend - f->f_bufptr;
+	if (len == 0) 
+		return (PyStringObject *)
+			PyString_FromStringAndSize(NULL, skip);
+	bufptr = memchr(f->f_bufptr, '\n', len);
+	if (bufptr != NULL) {
+		bufptr++;			/* Count the '\n' */
+		len = bufptr - f->f_bufptr;
+		s = (PyStringObject *)
+			PyString_FromStringAndSize(NULL, skip+len);
+		if (s == NULL) 
+			return NULL;
+		memcpy(PyString_AS_STRING(s)+skip, f->f_bufptr, len);
+		f->f_bufptr = bufptr;
+		if (bufptr == f->f_bufend)
+			Util_DropReadAhead(bf);
+	} else {
+		bufptr = f->f_bufptr;
+		buf = f->f_buf;
+		f->f_buf = NULL; 	/* Force new readahead buffer */
+                s = Util_ReadAheadGetLineSkip(
+			bf, skip+len, bufsize + (bufsize>>2) );
+		if (s == NULL) {
+		        PyMem_Free(buf);
+			return NULL;
+		}
+		memcpy(PyString_AS_STRING(s)+skip, bufptr, len);
+		PyMem_Free(buf);
+	}
+	return s;
+}
+
+/* ===================================================================== */
+/* Methods of BZ2File. */
+
+PyDoc_STRVAR(BZ2File_read__doc__,
+"read([size]) -> string\n\
+\n\
+Read at most size uncompressed bytes, returned as a string. If the size\n\
+argument is negative or omitted, read until EOF is reached.\n\
+");
+
+/* This is a hacked version of Python's fileobject.c:file_read(). */
+static PyObject *
+BZ2File_read(BZ2FileObject *self, PyObject *args)
+{
+	long bytesrequested = -1;
+	size_t bytesread, buffersize, chunksize;
+	int bzerror;
+	PyObject *ret = NULL;
+	
+	if (!PyArg_ParseTuple(args, "|l:read", &bytesrequested))
+		return NULL;
+	
+	ACQUIRE_LOCK(self);
+	switch (self->mode) {
+		case MODE_READ:
+			break;
+		case MODE_READ_EOF:
+			ret = PyString_FromString("");
+			goto cleanup;
+		case MODE_CLOSED:
+			PyErr_SetString(PyExc_ValueError,
+					"I/O operation on closed file");
+			goto cleanup;
+		default:
+			PyErr_SetString(PyExc_IOError,
+					"file is not ready for reading");
+			goto cleanup;
+	}
+
+	if (bytesrequested < 0)
+		buffersize = Util_NewBufferSize((size_t)0);
+	else
+		buffersize = bytesrequested;
+	if (buffersize > INT_MAX) {
+		PyErr_SetString(PyExc_OverflowError,
+			"requested number of bytes is more than a Python string can hold");
+		goto cleanup;
+	}
+	ret = PyString_FromStringAndSize((char *)NULL, buffersize);
+	if (ret == NULL)
+		goto cleanup;
+	bytesread = 0;
+
+	for (;;) {
+		Py_BEGIN_ALLOW_THREADS
+		chunksize = Util_UnivNewlineRead(&bzerror, self->fp,
+						 BUF(ret)+bytesread,
+						 buffersize-bytesread,
+						 self);
+		self->pos += chunksize;
+		Py_END_ALLOW_THREADS
+		bytesread += chunksize;
+		if (bzerror == BZ_STREAM_END) {
+			self->size = self->pos;
+			self->mode = MODE_READ_EOF;
+			break;
+		} else if (bzerror != BZ_OK) {
+			Util_CatchBZ2Error(bzerror);
+			Py_DECREF(ret);
+			ret = NULL;
+			goto cleanup;
+		}
+		if (bytesrequested < 0) {
+			buffersize = Util_NewBufferSize(buffersize);
+			if (_PyString_Resize(&ret, buffersize) < 0)
+				goto cleanup;
+		} else {
+			break;
+		}
+	}
+	if (bytesread != buffersize)
+		_PyString_Resize(&ret, bytesread);
+
+cleanup:
+	RELEASE_LOCK(self);
+	return ret;
+}
+
+PyDoc_STRVAR(BZ2File_readline__doc__,
+"readline([size]) -> string\n\
+\n\
+Return the next line from the file, as a string, retaining newline.\n\
+A non-negative size argument will limit the maximum number of bytes to\n\
+return (an incomplete line may be returned then). Return an empty\n\
+string at EOF.\n\
+");
+
+static PyObject *
+BZ2File_readline(BZ2FileObject *self, PyObject *args)
+{
+	PyObject *ret = NULL;
+	int sizehint = -1;
+
+	if (!PyArg_ParseTuple(args, "|i:readline", &sizehint))
+		return NULL;
+
+	ACQUIRE_LOCK(self);
+	switch (self->mode) {
+		case MODE_READ:
+			break;
+		case MODE_READ_EOF:
+			ret = PyString_FromString("");
+			goto cleanup;
+		case MODE_CLOSED:
+			PyErr_SetString(PyExc_ValueError,
+					"I/O operation on closed file");
+			goto cleanup;
+		default:
+			PyErr_SetString(PyExc_IOError,
+					"file is not ready for reading");
+			goto cleanup;
+	}
+
+	if (sizehint == 0)
+		ret = PyString_FromString("");
+	else
+		ret = Util_GetLine(self, (sizehint < 0) ? 0 : sizehint);
+
+cleanup:
+	RELEASE_LOCK(self);
+	return ret;
+}
+
+PyDoc_STRVAR(BZ2File_readlines__doc__,
+"readlines([size]) -> list\n\
+\n\
+Call readline() repeatedly and return a list of lines read.\n\
+The optional size argument, if given, is an approximate bound on the\n\
+total number of bytes in the lines returned.\n\
+");
+
+/* This is a hacked version of Python's fileobject.c:file_readlines(). */
+static PyObject *
+BZ2File_readlines(BZ2FileObject *self, PyObject *args)
+{
+	long sizehint = 0;
+	PyObject *list = NULL;
+	PyObject *line;
+	char small_buffer[SMALLCHUNK];
+	char *buffer = small_buffer;
+	size_t buffersize = SMALLCHUNK;
+	PyObject *big_buffer = NULL;
+	size_t nfilled = 0;
+	size_t nread;
+	size_t totalread = 0;
+	char *p, *q, *end;
+	int err;
+	int shortread = 0;
+	int bzerror;
+
+	if (!PyArg_ParseTuple(args, "|l:readlines", &sizehint))
+		return NULL;
+
+	ACQUIRE_LOCK(self);
+	switch (self->mode) {
+		case MODE_READ:
+			break;
+		case MODE_READ_EOF:
+			list = PyList_New(0);
+			goto cleanup;
+		case MODE_CLOSED:
+			PyErr_SetString(PyExc_ValueError,
+					"I/O operation on closed file");
+			goto cleanup;
+		default:
+			PyErr_SetString(PyExc_IOError,
+					"file is not ready for reading");
+			goto cleanup;
+	}
+
+	if ((list = PyList_New(0)) == NULL)
+		goto cleanup;
+
+	for (;;) {
+		Py_BEGIN_ALLOW_THREADS
+		nread = Util_UnivNewlineRead(&bzerror, self->fp,
+					     buffer+nfilled,
+					     buffersize-nfilled, self);
+		self->pos += nread;
+		Py_END_ALLOW_THREADS
+		if (bzerror == BZ_STREAM_END) {
+			self->size = self->pos;
+			self->mode = MODE_READ_EOF;
+			if (nread == 0) {
+				sizehint = 0;
+				break;
+			}
+			shortread = 1;
+		} else if (bzerror != BZ_OK) {
+			Util_CatchBZ2Error(bzerror);
+		  error:
+			Py_DECREF(list);
+			list = NULL;
+			goto cleanup;
+		}
+		totalread += nread;
+		p = memchr(buffer+nfilled, '\n', nread);
+		if (p == NULL) {
+			/* Need a larger buffer to fit this line */
+			nfilled += nread;
+			buffersize *= 2;
+			if (buffersize > INT_MAX) {
+				PyErr_SetString(PyExc_OverflowError,
+			    "line is longer than a Python string can hold");
+				goto error;
+			}
+			if (big_buffer == NULL) {
+				/* Create the big buffer */
+				big_buffer = PyString_FromStringAndSize(
+					NULL, buffersize);
+				if (big_buffer == NULL)
+					goto error;
+				buffer = PyString_AS_STRING(big_buffer);
+				memcpy(buffer, small_buffer, nfilled);
+			}
+			else {
+				/* Grow the big buffer */
+				_PyString_Resize(&big_buffer, buffersize);
+				buffer = PyString_AS_STRING(big_buffer);
+			}
+			continue;
+		}
+		end = buffer+nfilled+nread;
+		q = buffer;
+		do {
+			/* Process complete lines */
+			p++;
+			line = PyString_FromStringAndSize(q, p-q);
+			if (line == NULL)
+				goto error;
+			err = PyList_Append(list, line);
+			Py_DECREF(line);
+			if (err != 0)
+				goto error;
+			q = p;
+			p = memchr(q, '\n', end-q);
+		} while (p != NULL);
+		/* Move the remaining incomplete line to the start */
+		nfilled = end-q;
+		memmove(buffer, q, nfilled);
+		if (sizehint > 0)
+			if (totalread >= (size_t)sizehint)
+				break;
+		if (shortread) {
+			sizehint = 0;
+			break;
+		}
+	}
+	if (nfilled != 0) {
+		/* Partial last line */
+		line = PyString_FromStringAndSize(buffer, nfilled);
+		if (line == NULL)
+			goto error;
+		if (sizehint > 0) {
+			/* Need to complete the last line */
+			PyObject *rest = Util_GetLine(self, 0);
+			if (rest == NULL) {
+				Py_DECREF(line);
+				goto error;
+			}
+			PyString_Concat(&line, rest);
+			Py_DECREF(rest);
+			if (line == NULL)
+				goto error;
+		}
+		err = PyList_Append(list, line);
+		Py_DECREF(line);
+		if (err != 0)
+			goto error;
+	}
+
+  cleanup:
+	RELEASE_LOCK(self);
+	if (big_buffer) {
+		Py_DECREF(big_buffer);
+	}
+	return list;
+}
+
+PyDoc_STRVAR(BZ2File_write__doc__,
+"write(data) -> None\n\
+\n\
+Write the 'data' string to file. Note that due to buffering, close() may\n\
+be needed before the file on disk reflects the data written.\n\
+");
+
+/* This is a hacked version of Python's fileobject.c:file_write(). */
+static PyObject *
+BZ2File_write(BZ2FileObject *self, PyObject *args)
+{
+	PyObject *ret = NULL;
+	char *buf;
+	int len;
+	int bzerror;
+
+	if (!PyArg_ParseTuple(args, "s#", &buf, &len))
+		return NULL;
+	
+	ACQUIRE_LOCK(self);
+	switch (self->mode) {
+		case MODE_WRITE:
+			break;
+	
+		case MODE_CLOSED:
+			PyErr_SetString(PyExc_ValueError,
+					"I/O operation on closed file");
+			goto cleanup;;
+		
+		default:
+			PyErr_SetString(PyExc_IOError,
+					"file is not ready for writing");
+			goto cleanup;;
+	}
+
+	PyFile_SoftSpace((PyObject*)self, 0);
+
+	Py_BEGIN_ALLOW_THREADS
+	BZ2_bzWrite (&bzerror, self->fp, buf, len);
+	self->pos += len;
+	Py_END_ALLOW_THREADS
+	
+	if (bzerror != BZ_OK) {
+		Util_CatchBZ2Error(bzerror);
+		goto cleanup;
+	}
+	
+	Py_INCREF(Py_None);
+	ret = Py_None;
+
+cleanup:
+	RELEASE_LOCK(self);
+	return ret;
+}
+
+PyDoc_STRVAR(BZ2File_writelines__doc__,
+"writelines(sequence_of_strings) -> None\n\
+\n\
+Write the sequence of strings to the file. Note that newlines are not\n\
+added. The sequence can be any iterable object producing strings. This is\n\
+equivalent to calling write() for each string.\n\
+");
+
+/* This is a hacked version of Python's fileobject.c:file_writelines(). */
+static PyObject *
+BZ2File_writelines(BZ2FileObject *self, PyObject *seq)
+{
+#define CHUNKSIZE 1000
+	PyObject *list = NULL;
+	PyObject *iter = NULL;
+	PyObject *ret = NULL;
+	PyObject *line;
+	int i, j, index, len, islist;
+	int bzerror;
+
+	ACQUIRE_LOCK(self);
+	islist = PyList_Check(seq);
+	if  (!islist) {
+		iter = PyObject_GetIter(seq);
+		if (iter == NULL) {
+			PyErr_SetString(PyExc_TypeError,
+				"writelines() requires an iterable argument");
+			goto error;
+		}
+		list = PyList_New(CHUNKSIZE);
+		if (list == NULL)
+			goto error;
+	}
+
+	/* Strategy: slurp CHUNKSIZE lines into a private list,
+	   checking that they are all strings, then write that list
+	   without holding the interpreter lock, then come back for more. */
+	for (index = 0; ; index += CHUNKSIZE) {
+		if (islist) {
+			Py_XDECREF(list);
+			list = PyList_GetSlice(seq, index, index+CHUNKSIZE);
+			if (list == NULL)
+				goto error;
+			j = PyList_GET_SIZE(list);
+		}
+		else {
+			for (j = 0; j < CHUNKSIZE; j++) {
+				line = PyIter_Next(iter);
+				if (line == NULL) {
+					if (PyErr_Occurred())
+						goto error;
+					break;
+				}
+				PyList_SetItem(list, j, line);
+			}
+		}
+		if (j == 0)
+			break;
+
+		/* Check that all entries are indeed strings. If not,
+		   apply the same rules as for file.write() and
+		   convert the rets to strings. This is slow, but
+		   seems to be the only way since all conversion APIs
+		   could potentially execute Python code. */
+		for (i = 0; i < j; i++) {
+			PyObject *v = PyList_GET_ITEM(list, i);
+			if (!PyString_Check(v)) {
+			    	const char *buffer;
+			    	int len;
+				if (PyObject_AsCharBuffer(v, &buffer, &len)) {
+					PyErr_SetString(PyExc_TypeError,
+							"writelines() "
+							"argument must be "
+							"a sequence of "
+							"strings");
+					goto error;
+				}
+				line = PyString_FromStringAndSize(buffer,
+								  len);
+				if (line == NULL)
+					goto error;
+				Py_DECREF(v);
+				PyList_SET_ITEM(list, i, line);
+			}
+		}
+
+		PyFile_SoftSpace((PyObject*)self, 0);
+
+		/* Since we are releasing the global lock, the
+		   following code may *not* execute Python code. */
+		Py_BEGIN_ALLOW_THREADS
+		for (i = 0; i < j; i++) {
+		    	line = PyList_GET_ITEM(list, i);
+			len = PyString_GET_SIZE(line);
+			BZ2_bzWrite (&bzerror, self->fp,
+				     PyString_AS_STRING(line), len);
+			if (bzerror != BZ_OK) {
+				Py_BLOCK_THREADS
+				Util_CatchBZ2Error(bzerror);
+				goto error;
+			}
+		}
+		Py_END_ALLOW_THREADS
+
+		if (j < CHUNKSIZE)
+			break;
+	}
+
+	Py_INCREF(Py_None);
+	ret = Py_None;
+
+  error:
+	RELEASE_LOCK(self);
+	Py_XDECREF(list);
+  	Py_XDECREF(iter);
+	return ret;
+#undef CHUNKSIZE
+}
+
+PyDoc_STRVAR(BZ2File_seek__doc__,
+"seek(offset [, whence]) -> None\n\
+\n\
+Move to new file position. Argument offset is a byte count. Optional\n\
+argument whence defaults to 0 (offset from start of file, offset\n\
+should be >= 0); other values are 1 (move relative to current position,\n\
+positive or negative), and 2 (move relative to end of file, usually\n\
+negative, although many platforms allow seeking beyond the end of a file).\n\
+\n\
+Note that seeking of bz2 files is emulated, and depending on the parameters\n\
+the operation may be extremely slow.\n\
+");
+
+static PyObject *
+BZ2File_seek(BZ2FileObject *self, PyObject *args)
+{
+	int where = 0;
+	long offset;
+	char small_buffer[SMALLCHUNK];
+	char *buffer = small_buffer;
+	size_t buffersize = SMALLCHUNK;
+	int bytesread = 0;
+	int readsize;
+	int chunksize;
+	int bzerror;
+	int rewind = 0;
+	PyObject *func;
+	PyObject *ret = NULL;
+	
+	if (!PyArg_ParseTuple(args, "l|i:seek", &offset, &where))
+		return NULL;
+
+	ACQUIRE_LOCK(self);
+	Util_DropReadAhead(self);
+	switch (self->mode) {
+		case MODE_READ:
+		case MODE_READ_EOF:
+			break;
+	
+		case MODE_CLOSED:
+			PyErr_SetString(PyExc_ValueError,
+					"I/O operation on closed file");
+			goto cleanup;;
+		
+		default:
+			PyErr_SetString(PyExc_IOError,
+					"seek works only while reading");
+			goto cleanup;;
+	}
+
+	if (offset < 0) {
+		if (where == 1) {
+			offset = self->pos + offset;
+			rewind = 1;
+		} else if (where == 2) {
+			if (self->size == -1) {
+				assert(self->mode != MODE_READ_EOF);
+				for (;;) {
+					Py_BEGIN_ALLOW_THREADS
+					chunksize = Util_UnivNewlineRead(
+							&bzerror, self->fp,
+							buffer, buffersize,
+							self);
+					self->pos += chunksize;
+					Py_END_ALLOW_THREADS
+
+					bytesread += chunksize;
+					if (bzerror == BZ_STREAM_END) {
+						break;
+					} else if (bzerror != BZ_OK) {
+						Util_CatchBZ2Error(bzerror);
+						goto cleanup;
+					}
+				}
+				self->mode = MODE_READ_EOF;
+				self->size = self->pos;
+				bytesread = 0;
+			}
+			offset = self->size + offset;
+			if (offset >= self->pos)
+				offset -= self->pos;
+			else
+				rewind = 1;
+		}
+		if (offset < 0)
+			offset = 0;
+	} else if (where == 0) {
+		if (offset >= self->pos)
+			offset -= self->pos;
+		else
+			rewind = 1;
+	}
+
+	if (rewind) {
+		BZ2_bzReadClose(&bzerror, self->fp);
+		func = Py_FindMethod(PyFile_Type.tp_methods, (PyObject*)self,
+				     "seek");
+		if (bzerror != BZ_OK) {
+			Util_CatchBZ2Error(bzerror);
+			goto cleanup;
+		}
+		if (!func) {
+			PyErr_SetString(PyExc_RuntimeError,
+					"can't find file.seek method");
+			goto cleanup;
+		}
+		ret = PyObject_CallFunction(func, "(i)", 0);
+		if (!ret)
+			goto cleanup;
+		Py_DECREF(ret);
+		ret = NULL;
+		self->pos = 0;
+		self->fp = BZ2_bzReadOpen(&bzerror,
+					  PyFile_AsFile((PyObject*)self),
+					  0, 0, NULL, 0);
+		if (bzerror != BZ_OK) {
+			Util_CatchBZ2Error(bzerror);
+			goto cleanup;
+		}
+		self->mode = MODE_READ;
+	} else if (self->mode == MODE_READ_EOF) {
+		goto exit;
+	}
+
+	if (offset == 0)
+		goto exit;
+
+	/* Before getting here, offset must be set to the number of bytes
+	 * to walk forward. */
+	for (;;) {
+		if (offset-bytesread > buffersize)
+			readsize = buffersize;
+		else
+			readsize = offset-bytesread;
+		Py_BEGIN_ALLOW_THREADS
+		chunksize = Util_UnivNewlineRead(&bzerror, self->fp,
+						 buffer, readsize, self);
+		self->pos += chunksize;
+		Py_END_ALLOW_THREADS
+		bytesread += chunksize;
+		if (bzerror == BZ_STREAM_END) {
+			self->size = self->pos;
+			self->mode = MODE_READ_EOF;
+			break;
+		} else if (bzerror != BZ_OK) {
+			Util_CatchBZ2Error(bzerror);
+			goto cleanup;
+		}
+		if (bytesread == offset)
+			break;
+	}
+
+exit:
+	Py_INCREF(Py_None);
+	ret = Py_None;
+
+cleanup:
+	RELEASE_LOCK(self);
+	return ret;
+}
+
+PyDoc_STRVAR(BZ2File_tell__doc__,
+"tell() -> int\n\
+\n\
+Return the current file position, an integer (may be a long integer).\n\
+");
+
+static PyObject *
+BZ2File_tell(BZ2FileObject *self, PyObject *args)
+{
+	PyObject *ret = NULL;
+
+	if (self->mode == MODE_CLOSED) {
+		PyErr_SetString(PyExc_ValueError,
+				"I/O operation on closed file");
+		goto cleanup;
+	}
+
+	ret = PyInt_FromLong(self->pos);
+
+cleanup:
+	return ret;
+}
+
+PyDoc_STRVAR(BZ2File_notsup__doc__,
+"Operation not supported.\n\
+");
+
+static PyObject *
+BZ2File_notsup(BZ2FileObject *self, PyObject *args)
+{
+	PyErr_SetString(PyExc_IOError, "operation not supported");
+	return NULL;
+}
+
+PyDoc_STRVAR(BZ2File_close__doc__,
+"close() -> None or (perhaps) an integer\n\
+\n\
+Close the file. Sets data attribute .closed to true. A closed file\n\
+cannot be used for further I/O operations. close() may be called more\n\
+than once without error.\n\
+");
+
+static PyObject *
+BZ2File_close(BZ2FileObject *self)
+{
+	PyObject *file_close;
+	PyObject *ret = NULL;
+	int bzerror = BZ_OK;
+
+	ACQUIRE_LOCK(self);
+	switch (self->mode) {
+		case MODE_READ:
+		case MODE_READ_EOF:
+			BZ2_bzReadClose(&bzerror, self->fp);
+			break;
+		case MODE_WRITE:
+			BZ2_bzWriteClose(&bzerror, self->fp,
+					 0, NULL, NULL);
+			break;
+	}
+	self->mode = MODE_CLOSED;
+	file_close = Py_FindMethod(PyFile_Type.tp_methods, (PyObject*)self,
+				   "close");
+	if (!file_close) {
+		PyErr_SetString(PyExc_RuntimeError,
+				"can't find file.close method");
+		goto cleanup;
+	}
+	ret = PyObject_CallObject(file_close, NULL);
+	if (bzerror != BZ_OK) {
+		Util_CatchBZ2Error(bzerror);
+		Py_XDECREF(ret);
+		ret = NULL;
+		goto cleanup;
+	}
+
+cleanup:
+	RELEASE_LOCK(self);
+	return ret;
+}
+
+static PyMethodDef BZ2File_methods[] = {
+	{"read", (PyCFunction)BZ2File_read, METH_VARARGS, BZ2File_read__doc__},
+	{"readline", (PyCFunction)BZ2File_readline, METH_VARARGS, BZ2File_readline__doc__},
+	{"readlines", (PyCFunction)BZ2File_readlines, METH_VARARGS, BZ2File_readlines__doc__},
+	{"write", (PyCFunction)BZ2File_write, METH_VARARGS, BZ2File_write__doc__},
+	{"writelines", (PyCFunction)BZ2File_writelines, METH_O, BZ2File_writelines__doc__},
+	{"seek", (PyCFunction)BZ2File_seek, METH_VARARGS, BZ2File_seek__doc__},
+	{"tell", (PyCFunction)BZ2File_tell, METH_NOARGS, BZ2File_tell__doc__},
+	{"truncate", (PyCFunction)BZ2File_notsup, METH_VARARGS, BZ2File_notsup__doc__},
+	{"readinto", (PyCFunction)BZ2File_notsup, METH_VARARGS, BZ2File_notsup__doc__},
+	{"close", (PyCFunction)BZ2File_close, METH_NOARGS, BZ2File_close__doc__},
+	{NULL,		NULL}		/* sentinel */
+};
+
+
+/* ===================================================================== */
+/* Slot definitions for BZ2File_Type. */
+
+static int
+BZ2File_init(BZ2FileObject *self, PyObject *args, PyObject *kwargs)
+{
+	PyObject *file_args = NULL;
+	static char *kwlist[] = {"filename", "mode", "buffering",
+				 "compresslevel", 0};
+	char *name = NULL;
+	char *mode = "r";
+	int buffering = -1;
+	int compresslevel = 9;
+	int bzerror;
+	int mode_char = 0;
+	int univ_newline = 0;
+
+	self->size = -1;
+	
+	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "et|sii:BZ2File",
+					 kwlist, Py_FileSystemDefaultEncoding,
+					 &name, &mode, &buffering,
+					 &compresslevel))
+		return -1;
+
+	if (compresslevel < 1 || compresslevel > 9) {
+		PyErr_SetString(PyExc_ValueError,
+				"compresslevel must be between 1 and 9");
+		return -1;
+	}
+
+	for (;;) {
+		int error = 0;
+		switch (*mode) {
+			case 'r':
+			case 'w':
+				if (mode_char)
+					error = 1;
+				mode_char = *mode;
+				break;
+
+			case 'b':
+				break;
+
+			case 'U':
+				univ_newline = 1;
+				break;
+
+			default:
+				error = 1;
+		}
+		if (error) {
+			PyErr_SetString(PyExc_ValueError, "invalid mode");
+			return -1;
+		}
+		mode++;
+		if (*mode == 0)
+			break;
+	}
+
+	if (mode_char == 'r')
+		mode = univ_newline ? "rbU" : "rb";
+	else
+		mode = univ_newline ? "wbU" : "wb";
+	
+	file_args = Py_BuildValue("(ssi)", name, mode, buffering);
+	if (!file_args)
+		goto error;
+
+	if (PyFile_Type.tp_init((PyObject *)self, file_args, NULL) < 0)
+		goto error;
+
+#ifdef WITH_THREAD
+	self->lock = PyThread_allocate_lock();
+	if (!self->lock)
+		goto error;
+#endif
+
+	if (mode_char == 'r')
+		self->fp = BZ2_bzReadOpen(&bzerror,
+					  PyFile_AsFile((PyObject*)self),
+					  0, 0, NULL, 0);
+	else
+		self->fp = BZ2_bzWriteOpen(&bzerror,
+					   PyFile_AsFile((PyObject*)self),
+					   compresslevel, 0, 0);
+
+	if (bzerror != BZ_OK) {
+		Util_CatchBZ2Error(bzerror);
+		goto error;
+	}
+
+	self->mode = (mode_char == 'r') ? MODE_READ : MODE_WRITE;
+
+	Py_XDECREF(file_args);
+	PyMem_Free(name);
+	return 0;
+
+error:
+#ifdef WITH_THREAD
+	if (self->lock)
+		PyThread_free_lock(self->lock);
+#endif
+	Py_XDECREF(file_args);
+	PyMem_Free(name);
+	return -1;
+}
+
+static void
+BZ2File_dealloc(BZ2FileObject *self)
+{
+	int bzerror;
+#ifdef WITH_THREAD
+	if (self->lock)
+		PyThread_free_lock(self->lock);
+#endif
+	switch (self->mode) {
+		case MODE_READ:
+		case MODE_READ_EOF:
+			BZ2_bzReadClose(&bzerror, self->fp);
+			break;
+		case MODE_WRITE:
+			BZ2_bzWriteClose(&bzerror, self->fp,
+					 0, NULL, NULL);
+			break;
+	}
+	((PyObject*)self)->ob_type->tp_free((PyObject *)self);
+}
+
+/* This is a hacked version of Python's fileobject.c:file_getiter(). */
+static PyObject *
+BZ2File_getiter(BZ2FileObject *self)
+{
+	if (self->mode == MODE_CLOSED) {
+		PyErr_SetString(PyExc_ValueError,
+				"I/O operation on closed file");
+		return NULL;
+	}
+	Py_INCREF((PyObject*)self);
+	return (PyObject *)self;
+}
+
+/* This is a hacked version of Python's fileobject.c:file_iternext(). */
+#define READAHEAD_BUFSIZE 8192
+static PyObject *
+BZ2File_iternext(BZ2FileObject *self)
+{
+	PyStringObject* ret;
+	ACQUIRE_LOCK(self);
+	if (self->mode == MODE_CLOSED) {
+		PyErr_SetString(PyExc_ValueError,
+				"I/O operation on closed file");
+		return NULL;
+	}
+	ret = Util_ReadAheadGetLineSkip(self, 0, READAHEAD_BUFSIZE);
+	RELEASE_LOCK(self);
+	if (ret == NULL || PyString_GET_SIZE(ret) == 0) {
+		Py_XDECREF(ret);
+		return NULL;
+	}
+	return (PyObject *)ret;
+}
+
+/* ===================================================================== */
+/* BZ2File_Type definition. */
+
+PyDoc_VAR(BZ2File__doc__) =
+PyDoc_STR(
+"BZ2File(name [, mode='r', buffering=0, compresslevel=9]) -> file object\n\
+\n\
+Open a bz2 file. The mode can be 'r' or 'w', for reading (default) or\n\
+writing. When opened for writing, the file will be created if it doesn't\n\
+exist, and truncated otherwise. If the buffering argument is given, 0 means\n\
+unbuffered, and larger numbers specify the buffer size. If compresslevel\n\
+is given, must be a number between 1 and 9.\n\
+")
+#ifdef WITH_UNIVERSAL_NEWLINES
+PyDoc_STR(
+"\n\
+Add a 'U' to mode to open the file for input with universal newline\n\
+support. Any line ending in the input file will be seen as a '\\n' in\n\
+Python. Also, a file so opened gains the attribute 'newlines'; the value\n\
+for this attribute is one of None (no newline read yet), '\\r', '\\n',\n\
+'\\r\\n' or a tuple containing all the newline types seen. Universal\n\
+newlines are available only when reading.\n\
+")
+#endif
+;
+
+statichere PyTypeObject BZ2File_Type = {
+	PyObject_HEAD_INIT(NULL)
+	0,			/*ob_size*/
+	"bz2.BZ2File",		/*tp_name*/
+	sizeof(BZ2FileObject),	/*tp_basicsize*/
+	0,			/*tp_itemsize*/
+	(destructor)BZ2File_dealloc, /*tp_dealloc*/
+	0,			/*tp_print*/
+	0,			/*tp_getattr*/
+	0,			/*tp_setattr*/
+	0,			/*tp_compare*/
+	0,			/*tp_repr*/
+	0,			/*tp_as_number*/
+	0,			/*tp_as_sequence*/
+	0,			/*tp_as_mapping*/
+	0,			/*tp_hash*/
+        0,                      /*tp_call*/
+        0,                      /*tp_str*/
+        PyObject_GenericGetAttr,/*tp_getattro*/
+        PyObject_GenericSetAttr,/*tp_setattro*/
+        0,                      /*tp_as_buffer*/
+        Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /*tp_flags*/
+        BZ2File__doc__,         /*tp_doc*/
+        0,                      /*tp_traverse*/
+        0,                      /*tp_clear*/
+        0,                      /*tp_richcompare*/
+        0,                      /*tp_weaklistoffset*/
+        (getiterfunc)BZ2File_getiter, /*tp_iter*/
+        (iternextfunc)BZ2File_iternext, /*tp_iternext*/
+        BZ2File_methods,        /*tp_methods*/
+        0,                      /*tp_members*/
+        0,                      /*tp_getset*/
+        0,                      /*tp_base*/
+        0,                      /*tp_dict*/
+        0,                      /*tp_descr_get*/
+        0,                      /*tp_descr_set*/
+        0,                      /*tp_dictoffset*/
+        (initproc)BZ2File_init, /*tp_init*/
+        PyType_GenericAlloc,    /*tp_alloc*/
+        0,                      /*tp_new*/
+      	_PyObject_Del,          /*tp_free*/
+        0,                      /*tp_is_gc*/
+};
+
+
+/* ===================================================================== */
+/* Methods of BZ2Comp. */
+
+PyDoc_STRVAR(BZ2Comp_compress__doc__,
+"compress(data) -> string\n\
+\n\
+Provide more data to the compressor object. It will return chunks of\n\
+compressed data whenever possible. When you've finished providing data\n\
+to compress, call the flush() method to finish the compression process,\n\
+and return what is left in the internal buffers.\n\
+");
+
+static PyObject *
+BZ2Comp_compress(BZ2CompObject *self, PyObject *args)
+{
+	char *data;
+	int datasize;
+	int bufsize = SMALLCHUNK;
+	long totalout;
+	PyObject *ret;
+	bz_stream *bzs = &self->bzs;
+	int bzerror;
+
+	if (!PyArg_ParseTuple(args, "s#", &data, &datasize))
+		return NULL;
+
+	ACQUIRE_LOCK(self);
+	if (!self->running) {
+		PyErr_SetString(PyExc_ValueError, "this object was already "
+						  "flushed");
+		goto error;
+	}
+
+	ret = PyString_FromStringAndSize(NULL, bufsize);
+	if (!ret)
+		goto error;
+
+	bzs->next_in = data;
+	bzs->avail_in = datasize;
+	bzs->next_out = BUF(ret);
+	bzs->avail_out = bufsize;
+
+	totalout = BZS_TOTAL_OUT(bzs);
+
+	for (;;) {
+		Py_BEGIN_ALLOW_THREADS
+		bzerror = BZ2_bzCompress(bzs, BZ_RUN);
+		Py_END_ALLOW_THREADS
+		if (bzerror != BZ_RUN_OK) {
+			Util_CatchBZ2Error(bzerror);
+			goto error;
+		}
+		if (bzs->avail_out == 0) {
+			bufsize = Util_NewBufferSize(bufsize);
+			if (_PyString_Resize(&ret, bufsize) < 0) {
+				BZ2_bzCompressEnd(bzs);
+				goto error;
+			}
+			bzs->next_out = BUF(ret) + (BZS_TOTAL_OUT(bzs)
+						    - totalout);
+			bzs->avail_out = bufsize - (bzs->next_out - BUF(ret));
+		} else if (bzs->avail_in == 0) {
+			break;
+		}
+	}
+
+	_PyString_Resize(&ret, BZS_TOTAL_OUT(bzs) - totalout);
+
+	RELEASE_LOCK(self);
+	return ret;
+
+error:
+	RELEASE_LOCK(self);
+	Py_XDECREF(ret);
+	return NULL;
+}
+
+PyDoc_STRVAR(BZ2Comp_flush__doc__,
+"flush() -> string\n\
+\n\
+Finish the compression process and return what is left in internal buffers.\n\
+You must not use the compressor object after calling this method.\n\
+");
+
+static PyObject *
+BZ2Comp_flush(BZ2CompObject *self)
+{
+	int bufsize = SMALLCHUNK;
+	PyObject *ret;
+	bz_stream *bzs = &self->bzs;
+	int totalout;
+	int bzerror;
+
+	ACQUIRE_LOCK(self);
+	if (!self->running) {
+		PyErr_SetString(PyExc_ValueError, "object was already "
+						  "flushed");
+		goto error;
+	}
+	self->running = 0;
+
+	ret = PyString_FromStringAndSize(NULL, bufsize);
+	if (!ret)
+		goto error;
+
+	bzs->next_out = BUF(ret);
+	bzs->avail_out = bufsize;
+
+	totalout = BZS_TOTAL_OUT(bzs);
+
+	for (;;) {
+		Py_BEGIN_ALLOW_THREADS
+		bzerror = BZ2_bzCompress(bzs, BZ_FINISH);
+		Py_END_ALLOW_THREADS
+		if (bzerror == BZ_STREAM_END) {
+			break;
+		} else if (bzerror != BZ_FINISH_OK) {
+			Util_CatchBZ2Error(bzerror);
+			goto error;
+		}
+		if (bzs->avail_out == 0) {
+			bufsize = Util_NewBufferSize(bufsize);
+			if (_PyString_Resize(&ret, bufsize) < 0)
+				goto error;
+			bzs->next_out = BUF(ret);
+			bzs->next_out = BUF(ret) + (BZS_TOTAL_OUT(bzs)
+						    - totalout);
+			bzs->avail_out = bufsize - (bzs->next_out - BUF(ret));
+		}
+	}
+
+	if (bzs->avail_out != 0)
+		_PyString_Resize(&ret, BZS_TOTAL_OUT(bzs) - totalout);
+
+	RELEASE_LOCK(self);
+	return ret;
+
+error:
+	RELEASE_LOCK(self);
+	Py_XDECREF(ret);
+	return NULL;
+}
+
+static PyMethodDef BZ2Comp_methods[] = {
+	{"compress", (PyCFunction)BZ2Comp_compress, METH_VARARGS, BZ2Comp_compress__doc__},
+	{"flush", (PyCFunction)BZ2Comp_flush, METH_NOARGS, BZ2Comp_flush__doc__},
+	{NULL,		NULL}		/* sentinel */
+};
+
+
+/* ===================================================================== */
+/* Slot definitions for BZ2Comp_Type. */
+
+static int
+BZ2Comp_init(BZ2CompObject *self, PyObject *args, PyObject *kwargs)
+{
+	int compresslevel = 9;
+	int bzerror;
+	static char *kwlist[] = {"compresslevel", 0};
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "|i:BZ2Compressor",
+					 kwlist, &compresslevel))
+		return -1;
+
+	if (compresslevel < 1 || compresslevel > 9) {
+		PyErr_SetString(PyExc_ValueError,
+				"compresslevel must be between 1 and 9");
+		goto error;
+	}
+
+#ifdef WITH_THREAD
+	self->lock = PyThread_allocate_lock();
+	if (!self->lock)
+		goto error;
+#endif
+
+	memset(&self->bzs, 0, sizeof(bz_stream));
+	bzerror = BZ2_bzCompressInit(&self->bzs, compresslevel, 0, 0);
+	if (bzerror != BZ_OK) {
+		Util_CatchBZ2Error(bzerror);
+		goto error;
+	}
+
+	self->running = 1;
+
+	return 0;
+error:
+#ifdef WITH_THREAD
+	if (self->lock)
+		PyThread_free_lock(self->lock);
+#endif
+	return -1;
+}
+
+static void
+BZ2Comp_dealloc(BZ2CompObject *self)
+{
+#ifdef WITH_THREAD
+	if (self->lock)
+		PyThread_free_lock(self->lock);
+#endif
+	BZ2_bzCompressEnd(&self->bzs);
+	((PyObject*)self)->ob_type->tp_free((PyObject *)self);
+}
+
+
+/* ===================================================================== */
+/* BZ2Comp_Type definition. */
+
+PyDoc_STRVAR(BZ2Comp__doc__,
+"BZ2Compressor([compresslevel=9]) -> compressor object\n\
+\n\
+Create a new compressor object. This object may be used to compress\n\
+data sequentially. If you want to compress data in one shot, use the\n\
+compress() function instead. The compresslevel parameter, if given,\n\
+must be a number between 1 and 9.\n\
+");
+
+statichere PyTypeObject BZ2Comp_Type = {
+	PyObject_HEAD_INIT(NULL)
+	0,			/*ob_size*/
+	"bz2.BZ2Compressor",	/*tp_name*/
+	sizeof(BZ2CompObject),	/*tp_basicsize*/
+	0,			/*tp_itemsize*/
+	(destructor)BZ2Comp_dealloc, /*tp_dealloc*/
+	0,			/*tp_print*/
+	0,			/*tp_getattr*/
+	0,			/*tp_setattr*/
+	0,			/*tp_compare*/
+	0,			/*tp_repr*/
+	0,			/*tp_as_number*/
+	0,			/*tp_as_sequence*/
+	0,			/*tp_as_mapping*/
+	0,			/*tp_hash*/
+        0,                      /*tp_call*/
+        0,                      /*tp_str*/
+        PyObject_GenericGetAttr,/*tp_getattro*/
+        PyObject_GenericSetAttr,/*tp_setattro*/
+        0,                      /*tp_as_buffer*/
+        Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /*tp_flags*/
+        BZ2Comp__doc__,         /*tp_doc*/
+        0,                      /*tp_traverse*/
+        0,                      /*tp_clear*/
+        0,                      /*tp_richcompare*/
+        0,                      /*tp_weaklistoffset*/
+        0,                      /*tp_iter*/
+        0,                      /*tp_iternext*/
+        BZ2Comp_methods,        /*tp_methods*/
+        0,                      /*tp_members*/
+        0,                      /*tp_getset*/
+        0,                      /*tp_base*/
+        0,                      /*tp_dict*/
+        0,                      /*tp_descr_get*/
+        0,                      /*tp_descr_set*/
+        0,                      /*tp_dictoffset*/
+        (initproc)BZ2Comp_init, /*tp_init*/
+        PyType_GenericAlloc,    /*tp_alloc*/
+        PyType_GenericNew,      /*tp_new*/
+      	_PyObject_Del,          /*tp_free*/
+        0,                      /*tp_is_gc*/
+};
+
+
+/* ===================================================================== */
+/* Members of BZ2Decomp. */
+
+#define OFF(x) offsetof(BZ2DecompObject, x)
+
+static PyMemberDef BZ2Decomp_members[] = {
+	{"unused_data", T_OBJECT, OFF(unused_data), RO},
+	{NULL}	/* Sentinel */
+};
+
+
+/* ===================================================================== */
+/* Methods of BZ2Decomp. */
+
+PyDoc_STRVAR(BZ2Decomp_decompress__doc__,
+"decompress(data) -> string\n\
+\n\
+Provide more data to the decompressor object. It will return chunks\n\
+of decompressed data whenever possible. If you try to decompress data\n\
+after the end of stream is found, EOFError will be raised. If any data\n\
+was found after the end of stream, it'll be ignored and saved in\n\
+unused_data attribute.\n\
+");
+
+static PyObject *
+BZ2Decomp_decompress(BZ2DecompObject *self, PyObject *args)
+{
+	char *data;
+	int datasize;
+	int bufsize = SMALLCHUNK;
+	long totalout;
+	PyObject *ret;
+	bz_stream *bzs = &self->bzs;
+	int bzerror;
+
+	if (!PyArg_ParseTuple(args, "s#", &data, &datasize))
+		return NULL;
+
+	ACQUIRE_LOCK(self);
+	if (!self->running) {
+		PyErr_SetString(PyExc_EOFError, "end of stream was "
+						"already found");
+		goto error;
+	}
+
+	ret = PyString_FromStringAndSize(NULL, bufsize);
+	if (!ret)
+		goto error;
+
+	bzs->next_in = data;
+	bzs->avail_in = datasize;
+	bzs->next_out = BUF(ret);
+	bzs->avail_out = bufsize;
+
+	totalout = BZS_TOTAL_OUT(bzs);
+
+	for (;;) {
+		Py_BEGIN_ALLOW_THREADS
+		bzerror = BZ2_bzDecompress(bzs);
+		Py_END_ALLOW_THREADS
+		if (bzerror == BZ_STREAM_END) {
+			if (bzs->avail_in != 0) {
+				Py_DECREF(self->unused_data);
+				self->unused_data =
+				    PyString_FromStringAndSize(bzs->next_in,
+							       bzs->avail_in);
+			}
+			self->running = 0;
+			break;
+		}
+		if (bzerror != BZ_OK) {
+			Util_CatchBZ2Error(bzerror);
+			goto error;
+		}
+		if (bzs->avail_out == 0) {
+			bufsize = Util_NewBufferSize(bufsize);
+			if (_PyString_Resize(&ret, bufsize) < 0) {
+				BZ2_bzDecompressEnd(bzs);
+				goto error;
+			}
+			bzs->next_out = BUF(ret);
+			bzs->next_out = BUF(ret) + (BZS_TOTAL_OUT(bzs)
+						    - totalout);
+			bzs->avail_out = bufsize - (bzs->next_out - BUF(ret));
+		} else if (bzs->avail_in == 0) {
+			break;
+		}
+	}
+
+	if (bzs->avail_out != 0)
+		_PyString_Resize(&ret, BZS_TOTAL_OUT(bzs) - totalout);
+
+	RELEASE_LOCK(self);
+	return ret;
+
+error:
+	RELEASE_LOCK(self);
+	Py_XDECREF(ret);
+	return NULL;
+}
+
+static PyMethodDef BZ2Decomp_methods[] = {
+	{"decompress", (PyCFunction)BZ2Decomp_decompress, METH_VARARGS, BZ2Decomp_decompress__doc__},
+	{NULL,		NULL}		/* sentinel */
+};
+
+
+/* ===================================================================== */
+/* Slot definitions for BZ2Decomp_Type. */
+
+static int
+BZ2Decomp_init(BZ2DecompObject *self, PyObject *args, PyObject *kwargs)
+{
+	int bzerror;
+
+	if (!PyArg_ParseTuple(args, ":BZ2Decompressor"))
+		return -1;
+
+#ifdef WITH_THREAD
+	self->lock = PyThread_allocate_lock();
+	if (!self->lock)
+		goto error;
+#endif
+
+	self->unused_data = PyString_FromString("");
+	if (!self->unused_data)
+		goto error;
+
+	memset(&self->bzs, 0, sizeof(bz_stream));
+	bzerror = BZ2_bzDecompressInit(&self->bzs, 0, 0);
+	if (bzerror != BZ_OK) {
+		Util_CatchBZ2Error(bzerror);
+		goto error;
+	}
+
+	self->running = 1;
+
+	return 0;
+
+error:
+#ifdef WITH_THREAD
+	if (self->lock)
+		PyThread_free_lock(self->lock);
+#endif
+	Py_XDECREF(self->unused_data);
+	return -1;
+}
+
+static void
+BZ2Decomp_dealloc(BZ2DecompObject *self)
+{
+#ifdef WITH_THREAD
+	if (self->lock)
+		PyThread_free_lock(self->lock);
+#endif
+	Py_XDECREF(self->unused_data);
+	BZ2_bzDecompressEnd(&self->bzs);
+	((PyObject*)self)->ob_type->tp_free((PyObject *)self);
+}
+
+
+/* ===================================================================== */
+/* BZ2Decomp_Type definition. */
+
+PyDoc_STRVAR(BZ2Decomp__doc__,
+"BZ2Decompressor() -> decompressor object\n\
+\n\
+Create a new decompressor object. This object may be used to decompress\n\
+data sequentially. If you want to decompress data in one shot, use the\n\
+decompress() function instead.\n\
+");
+
+statichere PyTypeObject BZ2Decomp_Type = {
+	PyObject_HEAD_INIT(NULL)
+	0,			/*ob_size*/
+	"bz2.BZ2Decompressor",	/*tp_name*/
+	sizeof(BZ2DecompObject), /*tp_basicsize*/
+	0,			/*tp_itemsize*/
+	(destructor)BZ2Decomp_dealloc, /*tp_dealloc*/
+	0,			/*tp_print*/
+	0,			/*tp_getattr*/
+	0,			/*tp_setattr*/
+	0,			/*tp_compare*/
+	0,			/*tp_repr*/
+	0,			/*tp_as_number*/
+	0,			/*tp_as_sequence*/
+	0,			/*tp_as_mapping*/
+	0,			/*tp_hash*/
+        0,                      /*tp_call*/
+        0,                      /*tp_str*/
+        PyObject_GenericGetAttr,/*tp_getattro*/
+        PyObject_GenericSetAttr,/*tp_setattro*/
+        0,                      /*tp_as_buffer*/
+        Py_TPFLAGS_DEFAULT|Py_TPFLAGS_BASETYPE, /*tp_flags*/
+        BZ2Decomp__doc__,       /*tp_doc*/
+        0,                      /*tp_traverse*/
+        0,                      /*tp_clear*/
+        0,                      /*tp_richcompare*/
+        0,                      /*tp_weaklistoffset*/
+        0,                      /*tp_iter*/
+        0,                      /*tp_iternext*/
+        BZ2Decomp_methods,      /*tp_methods*/
+        BZ2Decomp_members,      /*tp_members*/
+        0,                      /*tp_getset*/
+        0,                      /*tp_base*/
+        0,                      /*tp_dict*/
+        0,                      /*tp_descr_get*/
+        0,                      /*tp_descr_set*/
+        0,                      /*tp_dictoffset*/
+        (initproc)BZ2Decomp_init, /*tp_init*/
+        PyType_GenericAlloc,    /*tp_alloc*/
+        PyType_GenericNew,      /*tp_new*/
+      	_PyObject_Del,          /*tp_free*/
+        0,                      /*tp_is_gc*/
+};
+
+
+/* ===================================================================== */
+/* Module functions. */
+
+PyDoc_STRVAR(bz2_compress__doc__,
+"compress(data [, compresslevel=9]) -> string\n\
+\n\
+Compress data in one shot. If you want to compress data sequentially,\n\
+use an instance of BZ2Compressor instead. The compresslevel parameter, if\n\
+given, must be a number between 1 and 9.\n\
+");
+
+static PyObject *
+bz2_compress(PyObject *self, PyObject *args, PyObject *kwargs)
+{
+	int compresslevel=9;
+	char *data;
+	int datasize;
+	int bufsize;
+	PyObject *ret;
+	bz_stream _bzs;
+	bz_stream *bzs = &_bzs;
+	int bzerror;
+	static char *kwlist[] = {"data", "compresslevel", 0};
+
+	if (!PyArg_ParseTupleAndKeywords(args, kwargs, "s#|i",
+					 kwlist, &data, &datasize,
+					 &compresslevel))
+		return NULL;
+
+	if (compresslevel < 1 || compresslevel > 9) {
+		PyErr_SetString(PyExc_ValueError,
+				"compresslevel must be between 1 and 9");
+		return NULL;
+	}
+
+	/* Conforming to bz2 manual, this is large enough to fit compressed
+	 * data in one shot. We will check it later anyway. */
+	bufsize = datasize + (datasize/100+1) + 600;
+
+	ret = PyString_FromStringAndSize(NULL, bufsize);
+	if (!ret)
+		return NULL;
+
+	memset(bzs, 0, sizeof(bz_stream));
+
+	bzs->next_in = data;
+	bzs->avail_in = datasize;
+	bzs->next_out = BUF(ret);
+	bzs->avail_out = bufsize;
+
+	bzerror = BZ2_bzCompressInit(bzs, compresslevel, 0, 0);
+	if (bzerror != BZ_OK) {
+		Util_CatchBZ2Error(bzerror);
+		Py_DECREF(ret);
+		return NULL;
+	}
+	
+	for (;;) {
+		Py_BEGIN_ALLOW_THREADS
+		bzerror = BZ2_bzCompress(bzs, BZ_FINISH);
+		Py_END_ALLOW_THREADS
+		if (bzerror == BZ_STREAM_END) {
+			break;
+		} else if (bzerror != BZ_FINISH_OK) {
+			BZ2_bzCompressEnd(bzs);
+			Util_CatchBZ2Error(bzerror);
+			Py_DECREF(ret);
+			return NULL;
+		}
+		if (bzs->avail_out == 0) {
+			bufsize = Util_NewBufferSize(bufsize);
+			if (_PyString_Resize(&ret, bufsize) < 0) {
+				BZ2_bzCompressEnd(bzs);
+				Py_DECREF(ret);
+				return NULL;
+			}
+			bzs->next_out = BUF(ret) + BZS_TOTAL_OUT(bzs);
+			bzs->avail_out = bufsize - (bzs->next_out - BUF(ret));
+		}
+	}
+
+	if (bzs->avail_out != 0)
+		_PyString_Resize(&ret, BZS_TOTAL_OUT(bzs));
+	BZ2_bzCompressEnd(bzs);
+
+	return ret;
+}
+
+PyDoc_STRVAR(bz2_decompress__doc__,
+"decompress(data) -> decompressed data\n\
+\n\
+Decompress data in one shot. If you want to decompress data sequentially,\n\
+use an instance of BZ2Decompressor instead.\n\
+");
+
+static PyObject *
+bz2_decompress(PyObject *self, PyObject *args)
+{
+	char *data;
+	int datasize;
+	int bufsize = SMALLCHUNK;
+	PyObject *ret;
+	bz_stream _bzs;
+	bz_stream *bzs = &_bzs;
+	int bzerror;
+
+	if (!PyArg_ParseTuple(args, "s#", &data, &datasize))
+		return NULL;
+
+	if (datasize == 0)
+		return PyString_FromString("");
+
+	ret = PyString_FromStringAndSize(NULL, bufsize);
+	if (!ret)
+		return NULL;
+
+	memset(bzs, 0, sizeof(bz_stream));
+
+	bzs->next_in = data;
+	bzs->avail_in = datasize;
+	bzs->next_out = BUF(ret);
+	bzs->avail_out = bufsize;
+
+	bzerror = BZ2_bzDecompressInit(bzs, 0, 0);
+	if (bzerror != BZ_OK) {
+		Util_CatchBZ2Error(bzerror);
+		Py_DECREF(ret);
+		return NULL;
+	}
+	
+	for (;;) {
+		Py_BEGIN_ALLOW_THREADS
+		bzerror = BZ2_bzDecompress(bzs);
+		Py_END_ALLOW_THREADS
+		if (bzerror == BZ_STREAM_END) {
+			break;
+		} else if (bzerror != BZ_OK) {
+			BZ2_bzDecompressEnd(bzs);
+			Util_CatchBZ2Error(bzerror);
+			Py_DECREF(ret);
+			return NULL;
+		}
+		if (bzs->avail_out == 0) {
+			bufsize = Util_NewBufferSize(bufsize);
+			if (_PyString_Resize(&ret, bufsize) < 0) {
+				BZ2_bzDecompressEnd(bzs);
+				Py_DECREF(ret);
+				return NULL;
+			}
+			bzs->next_out = BUF(ret) + BZS_TOTAL_OUT(bzs);
+			bzs->avail_out = bufsize - (bzs->next_out - BUF(ret));
+		} else if (bzs->avail_in == 0) {
+			BZ2_bzDecompressEnd(bzs);
+			PyErr_SetString(PyExc_ValueError,
+					"couldn't find end of stream");
+			Py_DECREF(ret);
+			return NULL;
+		}
+	}
+
+	if (bzs->avail_out != 0)
+		_PyString_Resize(&ret, BZS_TOTAL_OUT(bzs));
+	BZ2_bzDecompressEnd(bzs);
+
+	return ret;
+}
+
+static PyMethodDef bz2_methods[] = {
+	{"compress", (PyCFunction) bz2_compress, METH_VARARGS|METH_KEYWORDS,
+		bz2_compress__doc__},
+	{"decompress", (PyCFunction) bz2_decompress, METH_VARARGS,
+		bz2_decompress__doc__},
+	{NULL,		NULL}		/* sentinel */
+};
+
+/* ===================================================================== */
+/* Initialization function. */
+
+PyDoc_STRVAR(bz2__doc__,
+"The python bz2 module provides a comprehensive interface for\n\
+the bz2 compression library. It implements a complete file\n\
+interface, one shot (de)compression functions, and types for\n\
+sequential (de)compression.\n\
+");
+
+DL_EXPORT(void)
+initbz2(void)
+{
+	PyObject *m;
+
+	BZ2File_Type.ob_type = &PyType_Type;
+	BZ2File_Type.tp_base = &PyFile_Type;
+	BZ2File_Type.tp_new = PyFile_Type.tp_new;
+
+	BZ2Comp_Type.ob_type = &PyType_Type;
+	BZ2Decomp_Type.ob_type = &PyType_Type;
+
+	m = Py_InitModule3("bz2", bz2_methods, bz2__doc__);
+
+	PyModule_AddObject(m, "__author__", PyString_FromString(__author__));
+
+	Py_INCREF(&BZ2File_Type);
+	PyModule_AddObject(m, "BZ2File", (PyObject *)&BZ2File_Type);
+
+	Py_INCREF(&BZ2Comp_Type);
+	PyModule_AddObject(m, "BZ2Compressor", (PyObject *)&BZ2Comp_Type);
+
+	Py_INCREF(&BZ2Decomp_Type);
+	PyModule_AddObject(m, "BZ2Decompressor", (PyObject *)&BZ2Decomp_Type);
+}
diff --git a/setup.py b/setup.py
index 7ddef8b..e0c6686 100644
--- a/setup.py
+++ b/setup.py
@@ -702,6 +702,11 @@
                     exts.append( Extension('zlib', ['zlibmodule.c'],
                                            libraries = ['z']) )
 
+        # Gustavo Niemeyer's bz2 module.
+        if (self.compiler.find_library_file(lib_dirs, 'bz2')):
+            exts.append( Extension('bz2', ['bz2module.c'],
+                                   libraries = ['bz2']) )
+
         # Interface to the Expat XML parser
         #
         # Expat was written by James Clark and is now maintained by a