moving into final svn structure
diff --git a/CHANGELOG b/CHANGELOG
new file mode 100644
index 0000000..a62c1e7
--- /dev/null
+++ b/CHANGELOG
@@ -0,0 +1,15 @@
+
+
+
+0.1 Rev 117:
+
+    Fixed several bugs raised by James Antill:
+    1. HEAD didn't get an Accept: header added like GET.
+    2. HEAD requests did not use the cache.
+    3. GET requests with Range: headers would erroneously return a full cached response.
+    And one feature request for 'method' to default to GET.
+
+0.1 Rev 86 
+    
+    Initial Release
+
diff --git a/MANIFEST b/MANIFEST
new file mode 100644
index 0000000..f7d6af9
--- /dev/null
+++ b/MANIFEST
@@ -0,0 +1,5 @@
+httplib2.py
+setup.py
+README
+CHANGELOG
+httplib2test.py
diff --git a/Makefile b/Makefile
new file mode 100644
index 0000000..42d7605
--- /dev/null
+++ b/Makefile
@@ -0,0 +1,2 @@
+release:
+	python2.4 setup.py sdist --formats=gztar,zip
diff --git a/README b/README
new file mode 100644
index 0000000..a2a33f9
--- /dev/null
+++ b/README
@@ -0,0 +1,122 @@
+Httplib2
+
+--------------------------------------------------------------------
+Introduction
+
+A comprehensive HTTP client library, httplib2.py supports many 
+features left out of other HTTP libraries.
+
+HTTP and HTTPS
+    HTTPS support is only available if the socket module was 
+    compiled with SSL support. 
+Keep-Alive
+    Supports HTTP 1.1 Keep-Alive, keeping the socket open and 
+    performing multiple requests over the same connection if 
+    possible. 
+Authentication
+    The following three types of HTTP Authentication are 
+    supported. These can be used over both HTTP and HTTPS.
+
+        * Digest
+        * Basic
+        * WSSE
+
+Caching
+    The module can optionally operate with a private cache that 
+    understands the Cache-Control: header and uses both the ETag 
+    and Last-Modified cache validators. 
+All Methods
+    The module can handle any HTTP request method, not just GET 
+    and POST.
+Redirects
+    Automatically follows 3XX redirects on GETs.
+Compression
+    Handles both 'compress' and 'gzip' types of compression.
+Lost update support
+    Automatically adds back ETags into PUT requests to resources i
+    we have already cached. This implements Section 3.2 of 
+    Detecting the Lost Update Problem Using Unreserved Checkout
+Unit Tested
+    A large and growing set of unit tests. 
+
+
+For more information on this module, see:
+
+    http://bitworking.org/projects/httplib2/
+
+
+--------------------------------------------------------------------
+Installation
+
+The httplib2 module is shipped as a distutils package.  To install
+the library, unpack the distribution archive, and issue the following
+command:
+
+    $ python setup.py install
+
+
+--------------------------------------------------------------------
+Usage
+A simple retrieval:
+
+  import httplib2
+  h = httplib2.Http(".cache")
+  (resp_headers, content) = h.request("http://example.org/", "GET")
+
+The 'content' is the content retrieved from the URL. The content 
+is already decompressed or unzipped if necessary.
+
+To PUT some content to a server that uses SSL and Basic authentication:
+
+  import httplib2
+  h = httplib2.Http(".cache")
+  h.add_credentals('name', 'password')
+  (resp, content) = h.request("https://example.org/chapter/2", 
+                            "PUT", body="This is text", 
+                            headers={'content-type':'text/plain'} )
+
+Use the Cache-Control: header to control how the caching operates.
+
+  import httplib2
+  h = httplib2.Http(".cache")
+  (resp, content) = h.request("http://bitworking.org/", "GET")
+  ...
+  (resp, content) = h.request("http://bitworking.org/", "GET", 
+                            headers={'cache-control':'no-cache'})
+
+The first request will be cached and since this is a request 
+to bitworking.org it will be set to be cached for two hours, 
+because that is how I have my server configured. Any subsequent 
+GET to that URI will return the value from the on-disk cache 
+and no request will be made to the server. You can use the 
+Cache-Control: header to change the caches behavior and in 
+this example the second request adds the Cache-Control: 
+header with a value of 'no-cache' which tells the library 
+that the cached copy must not be used when handling this request. 
+
+
+--------------------------------------------------------------------
+Httplib2 Software License
+
+Copyright (c) 2006 by Joe Gregorio
+
+Permission is hereby granted, free of charge, to any person 
+obtaining a copy of this software and associated documentation 
+files (the "Software"), to deal in the Software without restriction, 
+including without limitation the rights to use, copy, modify, merge, 
+publish, distribute, sublicense, and/or sell copies of the Software, 
+and to permit persons to whom the Software is furnished to do so, 
+subject to the following conditions:
+
+The above copyright notice and this permission notice shall be 
+included in all copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, 
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES 
+OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND 
+NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS 
+BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN 
+ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN 
+CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE 
+SOFTWARE.
+
diff --git a/httplib2.py b/httplib2.py
new file mode 100644
index 0000000..10f50ef
--- /dev/null
+++ b/httplib2.py
@@ -0,0 +1,633 @@
+"""
+httplib2
+
+A caching http interface that supports ETags and gzip
+to conserve bandwidth. 
+
+Requires Python 2.4 or later
+"""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = []
+__license__ = "MIT"
+__history__ = """
+Fixed several bugs raised by James Antill:
+    1. HEAD didn't get an Accept: header added like GET.
+    2. HEAD requests did not use the cache.
+    3. GET requests with Range: headers would erroneously return a full cached response.
+
+and one feature request for 'method' to default to GET.
+
+"""
+__version__ = "0.1.0 ($Rev$)"
+
+import re 
+import md5
+import rfc822
+import StringIO
+import gzip
+import zlib
+import httplib
+import urlparse
+import base64
+import os
+import copy
+import calendar
+import time
+import random
+import sha
+from gettext import gettext as _
+
+# All exceptions raised here derive from HttpLib2Error
+class HttpLib2Error(Exception): pass
+
+class RedirectMissingLocation(HttpLib2Error): pass
+class RedirectLimit(HttpLib2Error): pass
+class FailedToDecompressContent(HttpLib2Error): pass
+class UnimplementedDigestAuthOptionError(HttpLib2Error): pass
+class IllFormedDigestChallengeError(HttpLib2Error): pass
+
+# Open Items:
+# -----------
+# Proxy support
+
+# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?)
+
+# Pluggable cache storage (supports storing the cache in
+#   flat files by default. We need a plug-in architecture
+#   that can support Berkeley DB and Squid)
+
+# == Known Issues ==
+# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator.
+# Does not handle Cache-Control: max-stale
+# Does not use Age: headers when calculating cache freshness.
+
+
+# The number of redirections to follow before giving up.
+# Note that only GET redirects are automatically followed.
+# Will also honor 301 requests by saving that info and never
+# requesting that URI again.
+DEFAULT_MAX_REDIRECTS = 5
+
+# Which headers are hop-by-hop headers by default
+HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade']
+
+URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?")
+
+def parse_uri(uri):
+    """Parses a URI using the regex given in Appendix B of RFC 3986.
+
+        (scheme, authority, path, query, fragment) = parse_uri(uri)
+    """
+    groups = URI.match(uri).groups()
+    return (groups[1], groups[3], groups[4], groups[6], groups[8])
+
+def _normalize_headers(headers):
+    return dict([ (key.lower(), value)  for (key, value) in headers.iteritems()])
+
+def _parse_cache_control(headers):
+    retval = {}
+    if headers.has_key('cache-control'):
+        parts =  headers['cache-control'].split(',')
+        parts_with_args = [tuple([x.strip() for x in part.split("=")]) for part in parts if -1 != part.find("=")]
+        parts_wo_args = [(name.strip(), 1) for name in parts if -1 == name.find("=")]
+        retval = dict(parts_with_args + parts_wo_args)
+    return retval 
+
+WWW_AUTH = re.compile("^(?:,?\s*(\w+)\s*=\s*\"([^\"]*?)\")(.*)$")
+# Yes, some parameters don't have quotes. Why again am I spending so much time doing HTTP?
+WWW_AUTH2 = re.compile("^(?:,?\s*(\w+)\s*=\s*(\w+))(.*)$")
+def _parse_www_authenticate(headers, headername='www-authenticate'):
+    """Returns a dictionary of dictionaries, one dict
+    per auth_scheme."""
+    retval = {}
+    if headers.has_key(headername):
+        authenticate = headers[headername].strip()
+        while authenticate:
+            # Break off the scheme at the beginning of the line
+            if headername == 'authentication-info':
+                (auth_scheme, the_rest) = ('digest', authenticate)                
+            else:
+                (auth_scheme, the_rest) = authenticate.split(" ", 1)
+            # Now loop over all the key value pairs that come after the scheme, 
+            # being careful not to roll into the next scheme
+            match = WWW_AUTH.search(the_rest)
+            match2 = WWW_AUTH2.search(the_rest)
+            auth_params = {}
+            while match or match2:
+                if match2 and len(match2.groups()) == 3:
+                    (key, value, the_rest) = match2.groups()
+                    auth_params[key.lower()] = value
+                elif match and len(match.groups()) == 3:
+                    (key, value, the_rest) = match.groups()
+                    auth_params[key.lower()] = value
+                match = WWW_AUTH.search(the_rest)
+                match2 = WWW_AUTH2.search(the_rest)
+            retval[auth_scheme.lower()] = auth_params
+            authenticate = the_rest.strip()
+    return retval
+
+
+def _entry_disposition(response_headers, request_headers):
+    """Determine freshness from the Date, Expires and Cache-Control headers.
+
+    We don't handle the following:
+
+    1. Cache-Control: max-stale
+    2. Age: headers are not used in the calculations.
+
+    Not that this algorithm is simpler than you might think 
+    because we are operating as a private (non-shared) cache.
+    This let's us ignore 's-maxage'. We can also ignore
+    'proxy-invalidate' since we aren't a proxy.
+    We will never return a stale document as 
+    fresh as a design decision, and thus the non-implementation 
+    of 'max-stale'. This also let's us safely ignore 'must-revalidate' 
+    since we operate as if every server has sent 'must-revalidate'.
+    Since we are private we get to ignore both 'public' and
+    'private' parameters. We also ignore 'no-transform' since
+    we don't do any transformations.    
+    The 'no-store' parameter is handled at a higher level.
+    So the only Cache-Control parameters he look at are:
+
+    no-cache
+    only-if-cached
+    max-age
+    min-fresh
+    """
+    
+    retval = "STALE"
+    cc = _parse_cache_control(request_headers)
+    cc_response = _parse_cache_control(response_headers)
+
+    if request_headers.has_key('pragma') and request_headers['pragma'].lower().find('no-cache') != -1:
+        retval = "TRANSPARENT"
+        if 'cache-control' not in request_headers:
+            request_headers['cache-control'] = 'no-cache'
+    elif cc.has_key('no-cache'):
+        retval = "TRANSPARENT"
+    elif cc_response.has_key('no-cache'):
+        retval = "STALE"
+    elif cc.has_key('only-if-cached'):
+        retval = "FRESH"
+    elif response_headers.has_key('date'):
+        date = calendar.timegm(rfc822.parsedate_tz(response_headers['date']))
+        now = time.time()
+        current_age = max(0, now - date)
+        if cc_response.has_key('max-age'):
+            freshness_lifetime = int(cc_response['max-age'])
+        elif response_headers.has_key('expires'):
+            expires = rfc822.parsedate_tz(response_headers['expires'])
+            freshness_lifetime = max(0, calendar.timegm(expires) - date)
+        else:
+            freshness_lifetime = 0
+        if cc.has_key('max-age'):
+            freshness_lifetime = min(freshness_lifetime, int(cc['max-age']))
+        if cc.has_key('min-fresh'):
+            current_age += int(cc['min-fresh'])
+        if freshness_lifetime > current_age:
+            retval = "FRESH"
+    return retval 
+
+def _decompressContent(response, new_content):
+    content = new_content
+    try:
+        if response.get('content-encoding', None) == 'gzip':
+            content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read()
+        if response.get('content-encoding', None) == 'deflate':
+            content = zlib.decompress(content)
+    except:
+        content = ""
+        raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'))
+    return content
+
+def _updateCache(request_headers, response_headers, content, cacheFullPath):
+    if cacheFullPath:
+        cc = _parse_cache_control(request_headers)
+        cc_response = _parse_cache_control(response_headers)
+        if cc.has_key('no-store') or cc_response.has_key('no-store'):
+            if os.path.exists(cacheFullPath):
+                os.remove(cacheFullPath)
+        else:
+            f = open(cacheFullPath, "w")
+            info = rfc822.Message(StringIO.StringIO(""))
+            for key, value in response_headers.iteritems():
+                info[key] = value
+        
+            f.write(str(info))
+            f.write("\n")
+            f.write(content)
+            f.close()
+
+def _cnonce():
+    dig = md5.new("%s:%s" % (time.ctime(), ["0123456789"[random.randrange(0, 9)] for i in range(20)])).hexdigest()
+    return dig[:16]
+
+def _wsse_username_token(cnonce, iso_now, password):
+    return base64.encodestring(sha.new("%s%s%s" % (cnonce, iso_now, password)).digest()).strip()
+
+
+# For credentials we need two things, first 
+# a pool of credential to try (not necesarily tied to BAsic, Digest, etc.)
+# Then we also need a list of URIs that have already demanded authentication
+# That list is tricky since sub-URIs can take the same auth, or the 
+# auth scheme may change as you descend the tree.
+# So we also need each Auth instance to be able to tell us
+# how close to the 'top' it is.
+
+class Authentication:
+    def __init__(self, credentials, host, request_uri, headers, response, content):
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        self.path = path
+        self.host = host
+        self.credentials = credentials
+
+    def depth(self, request_uri):
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        return request_uri[len(self.path):].count("/")
+
+    def inscope(self, host, request_uri):
+        # XXX Should we normalize the request_uri?
+        (scheme, authority, path, query, fragment) = parse_uri(request_uri)
+        return (host == self.host) and path.startswith(self.path)
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header. Over-rise this in sub-classes."""
+        pass
+
+    def response(self, response, content):
+        """Gives us a chance to update with new nonces
+        or such returned from the last authorized response.
+        Over-rise this in sub-classes if necessary.
+
+        Return TRUE is the request is to be retried, for 
+        example Digest may return stale=true.
+        """
+        return False
+
+
+
+class BasicAuthentication(Authentication):
+    def __init__(self, credentials, host, request_uri, headers, response, content):
+        Authentication.__init__(self, credentials, host, request_uri, headers, response, content)
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers['authorization'] = 'Basic ' + base64.encodestring("%s:%s" % self.credentials).strip()  
+
+
+class DigestAuthentication(Authentication):
+    """Only do qop='auth' and MD5, since that 
+    is all Apache currently implements"""
+    def __init__(self, credentials, host, request_uri, headers, response, content):
+        Authentication.__init__(self, credentials, host, request_uri, headers, response, content)
+        challenge = _parse_www_authenticate(response, 'www-authenticate')
+        self.challenge = challenge['digest']
+        qop = self.challenge.get('qop')
+        self.challenge['qop'] = ('auth' in [x.strip() for x in qop.split()]) and 'auth' or None
+        if self.challenge['qop'] is None:
+            raise UnimplementedDigestAuthOption( _("Unsupported value for qop: %s." % qop))
+        self.challenge['algorithm'] = self.challenge.get('algorith', 'MD5')
+        if self.challenge['algorithm'] != 'MD5':
+            raise UnimplementedDigestAuthOption( _("Unsupported value for algorithm: %s." % self.challenge['algorithm']))
+        self.A1 = "".join([self.credentials[0], ":", self.challenge['realm'], ":", self.credentials[1]])   
+        self.challenge['nc'] = 1
+
+    def request(self, method, request_uri, headers, content, cnonce = None):
+        """Modify the request headers"""
+        H = lambda x: md5.new(x).hexdigest()
+        KD = lambda s, d: H("%s:%s" % (s, d))
+        A2 = "".join([method, ":", request_uri])
+        self.challenge['cnonce'] = cnonce or _cnonce() 
+        request_digest  = '"%s"' % KD(H(self.A1), "%s:%s:%s:%s:%s" % (self.challenge['nonce'], 
+                    '%08x' % self.challenge['nc'], 
+                    self.challenge['cnonce'], 
+                    self.challenge['qop'], H(A2)
+                    )) 
+        headers['Authorization'] = 'Digest username="%s", realm="%s", nonce="%s", uri="%s", algorithm=%s, response=%s, qop=%s, nc=%08x, cnonce="%s"' % (
+                self.credentials[0], 
+                self.challenge['realm'],
+                self.challenge['nonce'],
+                request_uri, 
+                self.challenge['algorithm'],
+                request_digest,
+                self.challenge['qop'],
+                self.challenge['nc'],
+                self.challenge['cnonce'],
+                )
+        self.challenge['nc'] += 1
+
+    def response(self, response, content):
+        if not response.has_key('authentication-info'):
+            challenge = _parse_www_authenticate(response, 'www-authenticate')['digest']
+            if 'true' == challenge.get('stale'):
+                self.challenge['nonce'] = challenge['nonce']
+                self.challenge['nc'] = 1 
+                return True
+        else:
+            updated_challenge = _parse_www_authenticate(response, 'authentication-info')['digest']
+
+            if updated_challenge.has_key('nextnonce'):
+                self.challenge['nonce'] = updated_challenge['nextnonce']
+                self.challenge['nc'] = 1 
+        return False
+
+
+class WsseAuthentication(Authentication):
+    """This is thinly tested and should not be relied upon.
+    At this time there isn't any third party server to test against.
+    Blogger and TypePad implemented this algorithm at one point
+    but Blogger has since switched to Basic over HTTPS and 
+    TypePad has implemented it wrong, by never issuing a 401
+    challenge but instead requiring your client to telepathically know that
+    their endpoint is expecting WSSE profile="UsernameToken"."""
+    def __init__(self, credentials, host, request_uri, headers, response, content):
+        Authentication.__init__(self, credentials, host, request_uri, headers, response, content)
+
+    def request(self, method, request_uri, headers, content):
+        """Modify the request headers to add the appropriate
+        Authorization header."""
+        headers['Authorization'] = 'WSSE profile="UsernameToken"'
+        iso_now = time.strftime("%Y-%m-%dT%H:%M:%SZ", time.gmtime())
+        cnonce = _cnonce()
+        password_digest = _wsse_username_token(cnonce, iso_now, self.credentials[1])
+        headers['X-WSSE'] = 'UsernameToken Username="%s", PasswordDigest="%s", Nonce="%s", Created="%s"' % (
+                self.credentials[0],
+                password_digest,
+                cnonce,
+                iso_now)
+
+
+AUTH_SCHEME_CLASSES = {
+    "basic": BasicAuthentication,
+    "wsse": WsseAuthentication,
+    "digest": DigestAuthentication
+}
+
+AUTH_SCHEME_ORDER = ["digest", "wsse", "basic"]
+
+
+class Http:
+    """An http client that handles all 
+    methods, caching, ETags, compression,
+    https, Basic, Digest, etc.
+    """
+    def __init__(self, cache=None):
+        # Map domain name to an httplib connection
+        self.connections = {}
+        # The location of the cache, for now a directory
+        # where cached responses are held.
+        self.cache = cache
+        if self.cache and not os.path.isdir(cache): 
+            os.makedirs(self.cache)
+
+        # tuples of name, password
+        self.credentials = []
+
+        # authorization objects
+        self.authorizations = []
+
+    def _auth_from_challenge(self, host, request_uri, headers, response, content):
+        """A generator that creates Authorization objects
+           that can be applied to requests.
+        """
+        challenges = _parse_www_authenticate(response, 'www-authenticate')
+        for cred in self.credentials:
+            for scheme in AUTH_SCHEME_ORDER:
+                if challenges.has_key(scheme):
+                    yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content) 
+
+    def add_credentials(self, name, password):
+        self.credentials.append((name, password))
+
+    def clear_credentials(self):
+        self.credentials = []
+        self.authorizations = []
+
+    def _conn_request(self, conn, request_uri, method, body, headers):
+        for i in range(2):
+            try:
+                conn.request(method, request_uri, body, headers)
+                response = conn.getresponse()
+                content = response.read()
+                response = Response(response)
+                content = _decompressContent(response, content)
+            except httplib.BadStatusLine, e:
+                if not e.line:
+                    conn.close()
+                    conn.connect()
+                    continue
+
+            break;
+        return (response, content)
+
+
+    def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cacheFullPath):
+        """Do the actual request using the connection object
+        and also follow one level of redirects if necessary"""
+
+        auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)]
+        auth = auths and sorted(auths)[0][1] or None
+        if auth: 
+            auth.request(method, request_uri, headers, body)
+
+        (response, content) = self._conn_request(conn, request_uri, method, body, headers)
+
+        if auth: 
+            if auth.response(response, body):
+                auth.request(method, request_uri, headers, body)
+                (response, content) = self._conn_request(conn, request_uri, method, body, headers )
+                response._stale_digest = 1
+
+        if response.status == 401:
+            for authorization in self._auth_from_challenge(host, request_uri, headers, response, content):
+                authorization.request(method, request_uri, headers, body) 
+                (response, content) = self._conn_request(conn, request_uri, method, body, headers, )
+                if response.status != 401:
+                    self.authorizations.append(authorization)
+                    authorization.response(response, body)
+                    break
+
+        if method in ["GET", "HEAD"] or response.status == 303:
+            if response.status in [300, 301, 302, 303, 307]:
+                # Pick out the location header and basically start from the beginning
+                # remembering first to strip the ETag header and decrement our 'depth'
+                if redirections:
+                    if not response.has_key('location') and response.status != 300:
+                        raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."))
+                    if response.status == 301:
+                        response['-x-permanent-redirect-url'] = response['location']
+                        _updateCache(headers, response, content, cacheFullPath)
+                    if headers.has_key('if-none-match'):
+                        del headers['if-none-match']
+                    if headers.has_key('if-modified-since'):
+                        del headers['if-modified-since']
+                    if response.has_key('location'):
+                        old_response = copy.deepcopy(response)
+                        location = response['location']
+                        (scheme, authority, path, query, fragment) = parse_uri(location)
+                        if authority == None:
+                            location = urlparse.urljoin(absolute_uri, location)
+                        redirect_method = ((response.status == 303) and (method not in ["GET", "HEAD"])) and "GET" or method
+                        (response, content) = self.request(location, redirect_method, headers = headers, redirections = redirections - 1)
+                        response._previous = old_response
+                else:
+                    raise RedirectLimit( _("Redirected more times than rediection_limit allows."))
+            elif response.status in [200, 203] and method == "GET":
+                # Don't cache 206's since we aren't going to handle byte range requests
+                _updateCache(headers, response, content, cacheFullPath)
+
+        return (response, content)
+
+    def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS):
+        """Returns an httplib2.Response and the response content.
+
+        uri    - MUST be an absolute HTTP URI
+        """
+        if headers is None:
+            headers = {}
+        else:
+            headers = _normalize_headers(headers)
+
+        if not headers.has_key('user-agent'):
+            headers['user-agent'] = "Python-httplib2/%s" % __version__
+
+        (scheme, authority, path, query, fragment) = parse_uri(uri)
+        authority = authority.lower()
+        if not path: 
+            path = "/"
+        # Could do syntax based normalization of the URI before
+        # computing the digest. See Section 6.2.2 of Std 66.
+        request_uri = query and "?".join([path, query]) or path
+        defrag_uri = scheme + "://" + authority + request_uri
+
+        if not self.connections.has_key(authority):
+            connection_type = (scheme == 'https') and httplib.HTTPSConnection or httplib.HTTPConnection
+            conn = self.connections[authority] = connection_type(authority)
+        else:
+            conn = self.connections[authority]
+
+        if method in ["GET", "HEAD"] and 'range' not in headers:
+            headers['accept-encoding'] = 'compress, gzip'
+
+        info = rfc822.Message(StringIO.StringIO(""))
+        if self.cache:
+            cacheFullPath = os.path.join(self.cache, md5.new(defrag_uri).hexdigest())
+            if os.path.exists(cacheFullPath):
+                try:
+                    f = file(cacheFullPath, "r")
+                    info = rfc822.Message(f)
+                    f.seek(0)
+                    content = f.read().split('\n\n', 1)[1]
+                    f.close()
+                except:
+                    os.remove(cacheFullPath)
+        else:
+            cacheFullPath = None
+                    
+        if method in ["PUT"] and self.cache and info.has_key('etag'):
+            # http://www.w3.org/1999/04/Editing/ 
+            headers['if-match'] = info['etag']
+
+        if method not in ["GET", "HEAD"] and self.cache and os.path.exists(cacheFullPath):
+            # RFC 2616 Section 13.10
+            os.remove(cacheFullPath)
+
+        if method in ["GET", "HEAD"] and self.cache and 'range' not in headers:
+            if info.has_key('-x-permanent-redirect-url'):
+                # Should cached permanent redirects be counted in our redirection count? For now, yes.
+                (response, new_content) = self.request(info['-x-permanent-redirect-url'], "GET", headers = headers, redirections = redirections - 1)
+                response._previous = Response(info)
+                response._previous.fromcache = True
+            else:
+                # Determine our course of action:
+                #   Is the cached entry fresh or stale?
+                #   Has the client requested a non-cached response?
+                #   
+                # There seems to be three possible answers: 
+                # 1. [FRESH] Return the cache entry w/o doing a GET
+                # 2. [STALE] Do the GET (but add in cache validators if available)
+                # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request
+                entry_disposition = _entry_disposition(info, headers) 
+                
+                if entry_disposition == "FRESH":
+                    response = Response(info)
+                    response.fromcache = True
+                    return (response, content)
+                elif entry_disposition == "STALE":
+                    if info.has_key('etag'):
+                        headers['if-none-match'] = info['etag']
+                    if info.has_key('last-modified'):
+                        headers['if-modified-since'] = info['last-modified']
+                elif entry_disposition == "TRANSPARENT":
+                    pass
+                if entry_disposition != "FRESH":
+                    (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cacheFullPath)
+
+            if response.status == 304 and method == "GET":
+                # Rewrite the cache entry with the new end-to-end headers
+                # Take all headers that are in response 
+                # and overwrite their values in info.
+                # unless they are hop-by-hop, or are listed in the connection header.
+
+                hopbyhop = HOP_BY_HOP
+                hopbyhop.append([x.strip() for x in response.get('connection', '').split(',')])
+                end2end = [header for header in response.keys() if header not in hopbyhop]
+                for key in end2end:
+                    info[key] = response[key]
+                merged_response = Response(info)
+                if hasattr(response, "_stale_digest"):
+                    merged_response._stale_digest = response._stale_digest
+                _updateCache(headers, merged_response, content, cacheFullPath)
+                response = merged_response
+                response.status = 200
+                response.fromcache = True 
+
+            elif response.status == 200:
+                content = new_content
+            else:
+                if os.path.exists(cacheFullPath):
+                    os.remove(cacheFullPath)
+                content = new_content 
+        else: 
+            (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cacheFullPath)
+        return (response, content)
+
+ 
+
+class Response(dict):
+    """An object more like rfc822.Message than httplib.HTTPResponse."""
+   
+    """Is this response from our local cache"""
+    fromcache = False
+
+    """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """
+    version = 11
+
+    "Status code returned by server. "
+    status = 200
+
+    reason = "Ok"
+    """Reason phrase returned by server."""
+
+    _previous = None
+
+    def __init__(self, info):
+        # info is either an rfc822.Message or 
+        # an httplib.HTTPResponse object.
+        if isinstance(info, httplib.HTTPResponse):
+            for key, value in info.getheaders(): # This is where the 2.4 requirement comes from
+                self[key] = value 
+            self.status = info.status
+            self['status'] = str(self.status)
+            self.reason = info.reason
+            self.version = info.version
+        elif isinstance(info, rfc822.Message):
+            for key, value in info.items(): # This is where the 2.4 requirement comes from
+                self[key] = value 
+            self.status = int(self['status'])
+
+
diff --git a/httplib2test.py b/httplib2test.py
new file mode 100755
index 0000000..e972626
--- /dev/null
+++ b/httplib2test.py
@@ -0,0 +1,818 @@
+#!/usr/bin/env python2.4
+"""
+httplib2test
+
+A set of unit tests for httplib2.py.
+
+Requires Python 2.4 or later
+"""
+
+__author__ = "Joe Gregorio (joe@bitworking.org)"
+__copyright__ = "Copyright 2006, Joe Gregorio"
+__contributors__ = []
+__license__ = "MIT"
+__history__ = """ """
+__version__ = "0.1 ($Rev: 118 $)"
+
+
+import unittest, httplib2, os, urlparse, time, base64
+
+# The test resources base uri
+base = 'http://bitworking.org/projects/httplib2/test/'
+#base = 'http://localhost/projects/httplib2/test/'
+
+class ParserTest(unittest.TestCase):
+    def testFromStd66(self):
+        self.assertEqual( ('http', 'example.com', '', None, None ), httplib2.parse_uri("http://example.com"))
+        self.assertEqual( ('https', 'example.com', '', None, None ), httplib2.parse_uri("https://example.com"))
+        self.assertEqual( ('https', 'example.com:8080', '', None, None ), httplib2.parse_uri("https://example.com:8080"))
+        self.assertEqual( ('http', 'example.com', '/', None, None ), httplib2.parse_uri("http://example.com/"))
+        self.assertEqual( ('http', 'example.com', '/path', None, None ), httplib2.parse_uri("http://example.com/path"))
+        self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', None ), httplib2.parse_uri("http://example.com/path?a=1&b=2"))
+        self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"))
+        self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"))
+
+http = httplib2.Http(".cache")
+
+class HttpTest(unittest.TestCase):
+    def setUp(self):
+        [os.remove(os.path.join(".cache", file)) for file in os.listdir(".cache")]
+        http.clear_credentials()
+
+    def testGetIsDefaultMethod(self):
+        # Test that GET is the default method
+        uri = urlparse.urljoin(base, "methods/method_reflector.cgi")
+        (response, content) = http.request(uri)
+        self.assertEqual(response['x-method'], "GET")
+
+    def testDifferentMethods(self):
+        # Test that all methods can be used
+        uri = urlparse.urljoin(base, "methods/method_reflector.cgi")
+        for method in ["GET", "PUT", "DELETE", "POST"]:
+            (response, content) = http.request(uri, method, body=" ")
+            self.assertEqual(response['x-method'], method)
+
+    def testGetNoCache(self):
+        # Test that can do a GET w/o the cache turned on.
+        http = httplib2.Http()
+        uri = urlparse.urljoin(base, "304/test_etag.txt")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response._previous, None)
+
+    def testUserAgent(self):
+        # Test that we provide a default user-agent
+        uri = urlparse.urljoin(base, "user-agent/test.cgi")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertTrue(content.startswith("Python-httplib2/"))
+
+    def testUserAgentNonDefault(self):
+        # Test that the default user-agent can be over-ridden
+        uri = urlparse.urljoin(base, "user-agent/test.cgi")
+        (response, content) = http.request(uri, "GET", headers={'User-Agent': 'fred/1.0'})
+        self.assertEqual(response.status, 200)
+        self.assertTrue(content.startswith("fred/1.0"))
+
+    def testGet300WithLocation(self):
+        # Test the we automatically follow 300 redirects if a Location: header is provided
+        uri = urlparse.urljoin(base, "300/with-location-header.asis")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 300)
+        self.assertEqual(response._previous.fromcache, False)
+
+        # Confirm that the intermediate 300 is not cached
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 300)
+        self.assertEqual(response._previous.fromcache, False)
+
+    def testGet300WithoutLocation(self):
+        # Not giving a Location: header in a 300 response is acceptable
+        # In which case we just return the 300 response
+        uri = urlparse.urljoin(base, "300/without-location-header.asis")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 300)
+        self.assertTrue(response['content-type'].startswith("text/html"))
+        self.assertEqual(response._previous, None)
+
+    def testGet301(self):
+        # Test that we automatically follow 301 redirects
+        # and that we cache the 301 response
+        uri = urlparse.urljoin(base, "301/onestep.asis")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 301)
+        self.assertEqual(response._previous.fromcache, False)
+
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 301)
+        self.assertEqual(response._previous.fromcache, True)
+
+    def testGet302(self):
+        # Test that we automatically follow 302 redirects
+        # and that we DO NOT cache the 302 response
+        uri = urlparse.urljoin(base, "302/onestep.asis")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 302)
+        self.assertEqual(response._previous.fromcache, False)
+
+        uri = urlparse.urljoin(base, "302/onestep.asis")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 302)
+        self.assertEqual(response._previous.fromcache, False)
+
+        uri = urlparse.urljoin(base, "302/twostep.asis")
+
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 302)
+        self.assertEqual(response._previous.fromcache, False)
+
+    def testGet302RedirectionLimit(self):
+        # Test that we can set a lower redirection limit
+        # and that we raise an exception when we exceed
+        # that limit.
+        uri = urlparse.urljoin(base, "302/twostep.asis")
+        try:
+            (response, content) = http.request(uri, "GET", redirections = 1)
+            self.fail("This should not happen")
+        except httplib2.RedirectLimit:
+            pass
+        except Exception, e:
+            self.fail("Threw wrong kind of exception ")
+
+    def testGet302NoLocation(self):
+        # Test that we throw an exception when we get
+        # a 302 with no Location: header.
+        uri = urlparse.urljoin(base, "302/no-location.asis")
+        try:
+            (response, content) = http.request(uri, "GET")
+            self.fail("Should never reach here")
+        except httplib2.RedirectMissingLocation:
+            pass
+        except Exception, e:
+            self.fail("Threw wrong kind of exception ")
+
+    def testGet302ViaHttps(self):
+        # goole always redirects to http://google.com
+        (response, content) = http.request("https://google.com", "GET")
+        self.assertEqual(200, response.status)
+        self.assertEqual(302, response._previous.status)
+
+    def testGetViaHttps(self):
+        # Test that we can handle HTTPS
+        (response, content) = http.request("https://google.com/adsense/", "GET")
+        self.assertEqual(200, response.status)
+        self.assertEqual(None, response._previous)
+
+    def testGetViaHttpsSpecViolationOnLocation(self):
+        # Test that we follow redirects through HTTPS
+        # even if they violate the spec by including
+        # a relative Location: header instead of an 
+        # absolute one.
+        (response, content) = http.request("https://google.com/adsense", "GET")
+        self.assertEqual(200, response.status)
+        self.assertNotEqual(None, response._previous)
+
+    def testGet303(self):
+        # Do a follow-up GET on a Location: header
+        # returned from a POST that gave a 303.
+        uri = urlparse.urljoin(base, "303/303.cgi")
+        (response, content) = http.request(uri, "POST", " ")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 303)
+
+    def test303ForDifferentMethods(self):
+        # Test that all methods can be used
+        uri = urlparse.urljoin(base, "303/redirect-to-reflector.cgi")
+        # HEAD really does send a HEAD, but apparently Apache changes 
+        # every HEAD into a GET, so our script returns x-method: GET.
+        for (method, method_on_303) in [("PUT", "GET"), ("DELETE", "GET"), ("POST", "GET"), ("GET", "GET"), ("HEAD", "GET")]: 
+            (response, content) = http.request(uri, method, body=" ")
+            self.assertEqual(response['x-method'], method_on_303)
+
+    def testGet304(self):
+        # Test that we use ETags properly to validate our cache
+        uri = urlparse.urljoin(base, "304/test_etag.txt")
+        (response, content) = http.request(uri, "GET")
+        self.assertNotEqual(response['etag'], "")
+
+        (response, content) = http.request(uri, "GET")
+        (response, content) = http.request(uri, "GET", headers = {'cache-control': 'must-revalidate'})
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+
+        (response, content) = http.request(uri, "HEAD")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+
+        (response, content) = http.request(uri, "GET", headers = {'range': 'bytes=0-0'})
+        self.assertEqual(response.status, 206)
+        self.assertEqual(response.fromcache, False)
+
+    def testGet304EndToEnd(self):
+       # Test that end to end headers get overwritten in the cache
+        uri = urlparse.urljoin(base, "304/end2end.cgi")
+        (response, content) = http.request(uri, "GET")
+        self.assertNotEqual(response['etag'], "")
+        old_date = response['date']
+        time.sleep(2)
+
+        (response, content) = http.request(uri, "GET", headers = {'Cache-Control': 'max-age=0'})
+        # The response should be from the cache, but the Date: header should be updated.
+        new_date = response['date']
+        self.assertNotEqual(new_date, old_date)
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+
+    def testGet304LastModified(self):
+        # Test that we can still handle a 304 
+        # by only using the last-modified cache validator.
+        uri = urlparse.urljoin(base, "304/last-modified-only/last-modified-only.txt")
+        (response, content) = http.request(uri, "GET")
+
+        self.assertNotEqual(response['last-modified'], "")
+        (response, content) = http.request(uri, "GET")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+
+    def testGet307(self):
+        # Test that we do follow 307 redirects but
+        # do not cache the 307
+        uri = urlparse.urljoin(base, "307/onestep.asis")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 307)
+        self.assertEqual(response._previous.fromcache, False)
+
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+        self.assertEqual(content, "This is the final destination.\n")
+        self.assertEqual(response._previous.status, 307)
+        self.assertEqual(response._previous.fromcache, False)
+
+    def testGet410(self):
+        # Test that we pass 410's through
+        uri = urlparse.urljoin(base, "410/410.asis")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 410)
+
+    def testGetGZip(self):
+        # Test that we support gzip compression
+        uri = urlparse.urljoin(base, "gzip/final-destination.txt")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response['content-encoding'], "gzip")
+        self.assertEqual(content, "This is the final destination.\n")
+
+    def testGetGZipFailure(self):
+        # Test that we raise a good exception when the gzip fails
+        uri = urlparse.urljoin(base, "gzip/failed-compression.asis")
+        try:
+            (response, content) = http.request(uri, "GET")
+            self.fail("Should never reach here")
+        except httplib2.FailedToDecompressContent:
+            pass
+        except Exception:
+            self.fail("Threw wrong kind of exception")
+
+    def testGetDeflate(self):
+        # Test that we support deflate compression
+        uri = urlparse.urljoin(base, "deflate/deflated.asis")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response['content-encoding'], "deflate")
+        self.assertEqual(content, "This is the final destination.")
+
+    def testGetDeflateFailure(self):
+        # Test that we raise a good exception when the deflate fails
+        uri = urlparse.urljoin(base, "deflate/deflated.asis")
+        uri = urlparse.urljoin(base, "deflate/failed-compression.asis")
+        try:
+            (response, content) = http.request(uri, "GET")
+            self.fail("Should never reach here")
+        except httplib2.FailedToDecompressContent:
+            pass
+        except Exception:
+            self.fail("Threw wrong kind of exception")
+
+    def testGetDuplicateHeaders(self):
+        # Test that duplicate headers get concatenated via ','
+        uri = urlparse.urljoin(base, "duplicate-headers/multilink.asis")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(content, "This is content\n")
+        self.assertEqual(response['link'].split(",")[0], '<http://bitworking.org>; rel="home"; title="BitWorking"')
+
+    def testGetCacheControlNoCache(self):
+        # Test Cache-Control: no-cache on requests
+        uri = urlparse.urljoin(base, "304/test_etag.txt")
+        (response, content) = http.request(uri, "GET")
+        self.assertNotEqual(response['etag'], "")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+
+        (response, content) = http.request(uri, "GET", headers={'Cache-Control': 'no-cache'})
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, False)
+
+    def testGetCacheControlPragmaNoCache(self):
+        # Test Pragma: no-cache on requests
+        uri = urlparse.urljoin(base, "304/test_etag.txt")
+        (response, content) = http.request(uri, "GET")
+        self.assertNotEqual(response['etag'], "")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+
+        (response, content) = http.request(uri, "GET", headers={'Pragma': 'no-cache'})
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, False)
+
+    def testGetCacheControlNoStoreRequest(self):
+        # A no-store request means that the response should not be stored.
+        uri = urlparse.urljoin(base, "304/test_etag.txt")
+
+        (response, content) = http.request(uri, "GET", headers={'Cache-Control': 'no-store'})
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, False)
+
+        (response, content) = http.request(uri, "GET", headers={'Cache-Control': 'no-store'})
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, False)
+
+    def testGetCacheControlNoStoreResponse(self):
+        # A no-store response means that the response should not be stored.
+        uri = urlparse.urljoin(base, "no-store/no-store.asis")
+
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, False)
+
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, False)
+        self.assertEqual(0, len(os.listdir(".cache")))
+
+    def testGetCacheControlNoCacheNoStoreRequest(self):
+        # Test that a no-store, no-cache clears the entry from the cache
+        # even if it was cached previously.
+        uri = urlparse.urljoin(base, "304/test_etag.txt")
+
+        (response, content) = http.request(uri, "GET")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.fromcache, True)
+        (response, content) = http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'})
+        (response, content) = http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'})
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, False)
+        self.assertEqual(0, len(os.listdir(".cache")))
+
+    def testUpdateInvalidatesCache(self):
+        # Test that calling PUT or DELETE on a 
+        # URI that is cache invalidates that cache.
+        uri = urlparse.urljoin(base, "304/test_etag.txt")
+
+        (response, content) = http.request(uri, "GET")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.fromcache, True)
+        (response, content) = http.request(uri, "DELETE")
+        self.assertEqual(response.status, 405)
+
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.fromcache, False)
+
+    def testUpdateUsesCachedETag(self):
+        # Test that we natively support http://www.w3.org/1999/04/Editing/ 
+        uri = urlparse.urljoin(base, "conditional-updates/test.cgi")
+
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, False)
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+        self.assertEqual(response.fromcache, True)
+        (response, content) = http.request(uri, "PUT")
+        self.assertEqual(response.status, 200)
+        (response, content) = http.request(uri, "PUT")
+        self.assertEqual(response.status, 412)
+
+    def testBasicAuth(self):
+        # Test Basic Authentication
+        uri = urlparse.urljoin(base, "basic/file.txt")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 401)
+
+        uri = urlparse.urljoin(base, "basic/")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 401)
+
+        http.add_credentials('joe', 'password')
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+
+        uri = urlparse.urljoin(base, "basic/file.txt")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+
+    def testBasicAuthTwoDifferentCredentials(self):
+        # Test Basic Authentication with multple sets of credentials
+        uri = urlparse.urljoin(base, "basic2/file.txt")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 401)
+
+        uri = urlparse.urljoin(base, "basic2/")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 401)
+
+        http.add_credentials('fred', 'barney')
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+
+        uri = urlparse.urljoin(base, "basic2/file.txt")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+
+    def testBasicAuthNested(self):
+        # Test Basic Authentication with resources
+        # that are nested
+        uri = urlparse.urljoin(base, "basic-nested/")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 401)
+
+        uri = urlparse.urljoin(base, "basic-nested/subdir")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 401)
+
+        # Now add in creditials one at a time and test.
+        http.add_credentials('joe', 'password')
+
+        uri = urlparse.urljoin(base, "basic-nested/")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+
+        uri = urlparse.urljoin(base, "basic-nested/subdir")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 401)
+
+        http.add_credentials('fred', 'barney')
+
+        uri = urlparse.urljoin(base, "basic-nested/")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+
+        uri = urlparse.urljoin(base, "basic-nested/subdir")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+
+    def testDigestAuth(self):
+        # Test that we support Digest Authentication
+        uri = urlparse.urljoin(base, "digest/")
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 401)
+
+        http.add_credentials('joe', 'password')
+        (response, content) = http.request(uri, "GET")
+        self.assertEqual(response.status, 200)
+
+        uri = urlparse.urljoin(base, "digest/file.txt")
+        (response, content) = http.request(uri, "GET")
+
+    def testDigestAuthNextNonceAndNC(self):
+        # Test that if the server sets nextnonce that we reset
+        # the nonce count back to 1
+        uri = urlparse.urljoin(base, "digest/file.txt")
+        http.add_credentials('joe', 'password')
+        (response, content) = http.request(uri, "GET", headers = {"cache-control":"no-cache"})
+        info = httplib2._parse_www_authenticate(response, 'authentication-info')
+        self.assertEqual(response.status, 200)
+        (response, content) = http.request(uri, "GET", headers = {"cache-control":"no-cache"})
+        info2 = httplib2._parse_www_authenticate(response, 'authentication-info')
+        self.assertEqual(response.status, 200)
+
+        if info.has_key('nextnonce'):
+            self.assertEqual(info2['nc'], 1)
+
+    def testDigestAuthStale(self):
+        # Test that we can handle a nonce becoming stale
+        uri = urlparse.urljoin(base, "digest-expire/file.txt")
+        http.add_credentials('joe', 'password')
+        (response, content) = http.request(uri, "GET", headers = {"cache-control":"no-cache"})
+        info = httplib2._parse_www_authenticate(response, 'authentication-info')
+        self.assertEqual(response.status, 200)
+
+        time.sleep(3)
+        # Sleep long enough that the nonce becomes stale
+
+        (response, content) = http.request(uri, "GET", headers = {"cache-control":"no-cache"})
+        self.assertFalse(response.fromcache)
+        self.assertTrue(response._stale_digest)
+        info3 = httplib2._parse_www_authenticate(response, 'authentication-info')
+        self.assertEqual(response.status, 200)
+
+    def reflector(self, content):
+        return  dict( [tuple(x.split("=")) for x in content.strip().split("\n")] )
+
+    def testReflector(self):
+        uri = urlparse.urljoin(base, "reflector/reflector.cgi")
+        (response, content) = http.request(uri, "GET")
+        d = self.reflector(content)
+        self.assertTrue(d.has_key('HTTP_USER_AGENT')) 
+
+
+class HttpPrivateTest(unittest.TestCase):
+
+    def testParseCacheControl(self):
+        # Test that we can parse the Cache-Control header
+        self.assertEqual({}, httplib2._parse_cache_control({}))
+        self.assertEqual({'no-cache': 1}, httplib2._parse_cache_control({'cache-control': ' no-cache'}))
+        cc = httplib2._parse_cache_control({'cache-control': ' no-cache, max-age = 7200'})
+        self.assertEqual(cc['no-cache'], 1)
+        self.assertEqual(cc['max-age'], '7200')
+        cc = httplib2._parse_cache_control({'cache-control': ' , '})
+        self.assertEqual(cc[''], 1)
+
+    def testNormalizeHeaders(self):
+        # Test that we normalize headers to lowercase 
+        h = httplib2._normalize_headers({'Cache-Control': 'no-cache', 'Other': 'Stuff'})
+        self.assertTrue(h.has_key('cache-control'))
+        self.assertTrue(h.has_key('other'))
+        self.assertEqual('Stuff', h['other'])
+
+    def testExpirationModelTransparent(self):
+        # Test that no-cache makes our request TRANSPARENT
+        response_headers = {
+            'cache-control': 'max-age=7200'
+        }
+        request_headers = {
+            'cache-control': 'no-cache'
+        }
+        self.assertEqual("TRANSPARENT", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testExpirationModelNoCacheResponse(self):
+        # The date and expires point to an entry that should be
+        # FRESH, but the no-cache over-rides that.
+        now = time.time()
+        response_headers = {
+            'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+            'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)),
+            'cache-control': 'no-cache'
+        }
+        request_headers = {
+        }
+        self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testExpirationModelStaleRequestMustReval(self):
+        # must-revalidate forces STALE
+        self.assertEqual("STALE", httplib2._entry_disposition({}, {'cache-control': 'must-revalidate'}))
+
+    def testExpirationModelStaleResponseMustReval(self):
+        # must-revalidate forces STALE
+        self.assertEqual("STALE", httplib2._entry_disposition({'cache-control': 'must-revalidate'}, {}))
+
+    def testExpirationModelFresh(self):
+        response_headers = {
+            'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
+            'cache-control': 'max-age=2'
+        }
+        request_headers = {
+        }
+        self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
+        time.sleep(3)
+        self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testExpirationMaxAge0(self):
+        response_headers = {
+            'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
+            'cache-control': 'max-age=0'
+        }
+        request_headers = {
+        }
+        self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testExpirationModelDateAndExpires(self):
+        now = time.time()
+        response_headers = {
+            'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+            'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)),
+        }
+        request_headers = {
+        }
+        self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
+        time.sleep(3)
+        self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testExpirationModelDateOnly(self):
+        now = time.time()
+        response_headers = {
+            'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+3)),
+        }
+        request_headers = {
+        }
+        self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testExpirationModelOnlyIfCached(self):
+        response_headers = {
+        }
+        request_headers = {
+            'cache-control': 'only-if-cached',
+        }
+        self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testExpirationModelMaxAgeBoth(self):
+        now = time.time()
+        response_headers = {
+            'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+            'cache-control': 'max-age=2'
+        }
+        request_headers = {
+            'cache-control': 'max-age=0'
+        }
+        self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testExpirationModelDateAndExpiresMinFresh1(self):
+        now = time.time()
+        response_headers = {
+            'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+            'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)),
+        }
+        request_headers = {
+            'cache-control': 'min-fresh=2'
+        }
+        self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testExpirationModelDateAndExpiresMinFresh2(self):
+        now = time.time()
+        response_headers = {
+            'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
+            'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)),
+        }
+        request_headers = {
+            'cache-control': 'min-fresh=2'
+        }
+        self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
+
+    def testParseWWWAuthenticateEmpty(self):
+        res = httplib2._parse_www_authenticate({})
+        self.assertEqual(len(res.keys()), 0) 
+
+    def testParseWWWAuthenticateBasic(self):
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me"'})
+        basic = res['basic']
+        self.assertEqual('me', basic['realm'])
+
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm="MD5"'})
+        basic = res['basic']
+        self.assertEqual('me', basic['realm'])
+        self.assertEqual('MD5', basic['algorithm'])
+
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm=MD5'})
+        basic = res['basic']
+        self.assertEqual('me', basic['realm'])
+        self.assertEqual('MD5', basic['algorithm'])
+
+    def testParseWWWAuthenticateBasic2(self):
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me",other="fred" '})
+        basic = res['basic']
+        self.assertEqual('me', basic['realm'])
+        self.assertEqual('fred', basic['other'])
+
+    def testParseWWWAuthenticateBasic3(self):
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic REAlm="me" '})
+        basic = res['basic']
+        self.assertEqual('me', basic['realm'])
+
+
+    def testParseWWWAuthenticateDigest(self):
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 
+                'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"'})
+        digest = res['digest']
+        self.assertEqual('testrealm@host.com', digest['realm'])
+        self.assertEqual('auth,auth-int', digest['qop'])
+
+
+    def testParseWWWAuthenticateMultiple(self):
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 
+                'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41" Basic REAlm="me" '})
+        digest = res['digest']
+        self.assertEqual('testrealm@host.com', digest['realm'])
+        self.assertEqual('auth,auth-int', digest['qop'])
+        self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
+        self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
+        basic = res['basic']
+        self.assertEqual('me', basic['realm'])
+
+    def testParseWWWAuthenticateMultiple2(self):
+        # Handle an added comma between challenges, which might get thrown in if the challenges were
+        # originally sent in separate www-authenticate headers.
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 
+                'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me" '})
+        digest = res['digest']
+        self.assertEqual('testrealm@host.com', digest['realm'])
+        self.assertEqual('auth,auth-int', digest['qop'])
+        self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
+        self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
+        basic = res['basic']
+        self.assertEqual('me', basic['realm'])
+
+    def testParseWWWAuthenticateMultiple3(self):
+        # Handle an added comma between challenges, which might get thrown in if the challenges were
+        # originally sent in separate www-authenticate headers.
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 
+                'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'})
+        digest = res['digest']
+        self.assertEqual('testrealm@host.com', digest['realm'])
+        self.assertEqual('auth,auth-int', digest['qop'])
+        self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
+        self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
+        basic = res['basic']
+        self.assertEqual('me', basic['realm'])
+        wsse = res['wsse']
+        self.assertEqual('foo', wsse['realm'])
+        self.assertEqual('UsernameToken', wsse['profile'])
+
+    def testParseWWWAuthenticateMultiple4(self):
+        res = httplib2._parse_www_authenticate({ 'www-authenticate': 
+                'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'}) 
+        digest = res['digest']
+        self.assertEqual('test-real.m@host.com', digest['realm'])
+        self.assertEqual('\tauth,auth-int', digest['qop'])
+        self.assertEqual('(*)&^&$%#', digest['nonce'])
+
+    def testParseWWWAuthenticateMoreQuoteCombos(self):
+        res = httplib2._parse_www_authenticate({'www-authenticate':'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'})
+        digest = res['digest']
+        self.assertEqual('myrealm', digest['realm'])
+
+    def testDigestObject(self):
+        credentials = ('joe', 'password')
+        host = None
+        request_uri = '/projects/httplib2/test/digest/' 
+        headers = {}
+        response = {
+            'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth"'
+        }
+        content = ""
+        
+        d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content)
+        d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46") 
+        our_request = "Authorization: %s" % headers['Authorization']
+        working_request = 'Authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46"'
+        self.assertEqual(our_request, working_request)
+
+
+    def testDigestObjectStale(self):
+        credentials = ('joe', 'password')
+        host = None
+        request_uri = '/projects/httplib2/test/digest/' 
+        headers = {}
+        response = httplib2.Response({ })
+        response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
+        response.status = 401
+        content = ""
+        d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content)
+        # Returns true to force a retry
+        self.assertTrue( d.response(response, content) )
+
+    def testDigestObjectAuthInfo(self):
+        credentials = ('joe', 'password')
+        host = None
+        request_uri = '/projects/httplib2/test/digest/' 
+        headers = {}
+        response = httplib2.Response({ })
+        response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
+        response['authentication-info'] = 'nextnonce="fred"'
+        content = ""
+        d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content)
+        # Returns true to force a retry
+        self.assertFalse( d.response(response, content) )
+        self.assertEqual('fred', d.challenge['nonce'])
+        self.assertEqual(1, d.challenge['nc'])
+
+    def testWsseAlgorithm(self):
+        digest = httplib2._wsse_username_token("d36e316282959a9ed4c89851497a717f", "2003-12-15T14:43:07Z", "taadtaadpstcsm")
+        expected = "quR/EWLAV4xLf9Zqyw4pDmfV9OY="
+        self.assertEqual(expected, digest)
+
+
+unittest.main()
+
diff --git a/index.html b/index.html
new file mode 100644
index 0000000..011efb9
--- /dev/null
+++ b/index.html
@@ -0,0 +1,171 @@
+<html>
+<head>
+     <!--#include virtual="header.html" -->
+    <title>Joe Gregorio | BitWorking | Projects | httplib2.py</title>
+</head>
+<body class='main' id="top" name="top" >
+    <div class="body">
+        <!--#include virtual="titlebar.html" -->
+
+        <div class="content">
+            
+            <div class="item">
+
+                <h2>Httplib2</h2>
+                <p>A comprehensive HTTP client library, <code>httplib2.py</code> 
+                supports many features left out of other HTTP libraries.
+                </p>
+                <dl>
+                    <dt>HTTP and HTTPS</dt>
+                    <dd>HTTPS support is only available if the socket module was compiled with SSL support.
+                    </dd>
+
+                    <dt>Keep-Alive</dt>
+                    <dd>Supports HTTP 1.1 Keep-Alive, keeping the socket 
+                    open and performing multiple requests over the same connection
+                    if possible.
+                    </dd>
+
+                    <dt>Authentication</dt>
+                    <dd>The following three types of HTTP Authentication are supported. 
+                    These can be used over both HTTP and HTTPS.
+                    <ul>
+                        <li><a href="http://www.faqs.org/rfcs/rfc2617.html">Digest</a></li>
+                        <li><a href="http://www.faqs.org/rfcs/rfc2617.html">Basic</a></li>
+                        <li><a href="http://www.xml.com/pub/a/2003/12/17/dive.html">WSSE</a></li>
+                    </ul>
+                    </dd>
+
+                    <dt>Caching</dt>
+                    <dd>The module can optionally operate with a private
+                    cache that understands the Cache-Control: header and
+                    uses both the ETag and Last-Modified cache validators.
+                    </dd>
+
+                    <dt>All Methods</dt>
+                    <dd>The module can handle any HTTP request method, not just GET and POST.</dd>
+
+                    <dt>Redirects</dt>
+                    <dd>Automatically follows 3XX redirects on GETs.</dd>
+
+                    <dt>Compression</dt>
+                    <dd>Handles both 'compress' and 'gzip' types of compression.</dd>
+
+                    <dt>Lost update support</dt>
+                    <dd>Automatically adds back ETags into PUT requests to resources
+                    we have already cached. This implements Section 3.2 of 
+                    <a href="http://www.w3.org/1999/04/Editing/#Table">Detecting the Lost Update Problem Using Unreserved Checkout</a></dd>
+
+                    <dt>Unit Tested</dt>
+                    <dd>A large and growing set of unit tests.</dd>
+
+                </dl>
+
+<h2>Usage</h2>
+
+<p>A simple retrieval:</p>
+
+<pre><code>    import httplib2
+    h = httplib2.Http(".cache")
+    (resp_headers, content) = h.request("http://example.org/", "GET")
+</code></pre>
+
+<p>The 'content' is the content retrieved from the URL.
+The content is already decompressed or unzipped if necessary.
+</p>
+
+<p>To PUT some content to a server that uses SSL
+and Basic authentication:</p>
+
+<pre><code>    import httplib2
+    h = httplib2.Http(".cache")
+    h.add_credentals('name', 'password')
+    (resp, content) = h.request("https://example.org/chapter/2", 
+        "PUT", body="This is text", 
+        headers={'content-type':'text/plain'} )
+</code></pre>
+
+<p>Use the Cache-Control: header to control
+   how the caching operates.</p>
+
+<pre><code>    import httplib2
+    h = httplib2.Http(".cache")
+    (resp, content) = h.request("http://bitworking.org/", "GET")
+    ...
+    (resp, content) = h.request("http://bitworking.org/", "GET", 
+        headers={'cache-control':'no-cache'})
+</code></pre>
+
+<p>The first request will be cached and since this is a request to 
+bitworking.org it will be set to be cached for two hours, because
+that is how I have my server configured.
+Any subsequent GET to that URI will return the value from the
+on-disk cache and no request will be made to the server.
+You can use the Cache-Control: header to change the caches behavior and
+in this example the second request adds the Cache-Control: header with a value
+of 'no-cache' which tells the library that the cached copy
+must not be used when handling this request.
+</p>
+
+<h2>Requirements</h2>
+
+<p>Requires Python 2.4 or later. Does not require
+any libraries beyond what is found in the core library.</p>
+
+<h2>To Do</h2>
+
+<p>This module is not perfect and needs the following:</p>
+<ul>
+    <li>Support for Proxies</li>
+    <li>A setup.py script</li>
+    <li>A pluggable store for the cache. Right now the store is just flat files in a directory. 
+      I would like to have plugins that allow keeping the cache in Berkeley DB, Squid, MySQL, etc.</li>
+    <li>More unit tests</li>
+</ul>
+
+<h2>Project Goal</h2>
+
+<p>To become a worthy addition to the Pyhton core library.</p>
+
+<h2>Additional Information</h2>
+
+<p>
+   <dl>
+       <dt>Author</dt>
+       <dd>Joe Gregorio</dd>
+
+       <dt>License</dt>
+       <dd>MIT</dd>
+
+       <dt>Contributors</dt>
+       <dd>(Your name here)</dd>
+    </dl>
+</p>
+    
+<h2>Download</h2>
+
+<p><a href="httplib2.py.txt">httplib2.py</a> - The uncompressed source code of the single file
+that constitutes this module.
+</p>
+
+<p> <a href="httplib2test.py.txt">httplibi2test.py</a> - The uncompressed source code of the single file
+that constitutes this modules unit test suite.
+</p>
+
+<p> <a href="test">test</a> - The resources used in the unit test cases. </p>
+
+<h2>Revision History</h2>
+   <dl>
+       <dt>0.1</dt>
+	   <dd>Initial Release.</dd>
+   </dl>
+ 
+<p>This page last updated on: $LastChangedDate$.</p>
+
+            </div>
+        </div>
+     <!--#include virtual="footer.html" -->
+    </div>
+</body>
+
+</html>
diff --git a/setup.py b/setup.py
new file mode 100644
index 0000000..6e99aeb
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,72 @@
+from distutils.core import setup
+setup(name='httplib2',
+        version='0.1.0', 
+        author='Joe Gregorio',
+        author_email='joe@bitworking.org',
+        url='http://bitworking.org/projects/httplib2/',
+        description='A comprehensive HTTP client library.',
+        long_description="""
+Httplib2
+========
+A comprehensive HTTP client library, httplib2.py supports many features left out of other HTTP libraries.
+
+HTTP and HTTPS
+--------------
+HTTPS support is only available if the socket module was compiled with SSL support. 
+
+    
+Keep-Alive
+----------
+Supports HTTP 1.1 Keep-Alive, keeping the socket open and performing multiple requests over the same connection if possible. 
+
+    
+Authentication
+--------------
+The following three types of HTTP Authentication are supported. These can be used over both HTTP and HTTPS.
+
+* Digest
+* Basic
+* WSSE
+
+Caching
+-------
+The module can optionally operate with a private cache that understands the Cache-Control: header and uses both the ETag and Last-Modified cache validators. 
+
+
+All Methods
+-----------
+The module can handle any HTTP request method, not just GET and POST.
+
+
+Redirects
+---------
+Automatically follows 3XX redirects on GETs.
+
+
+Compression
+-----------
+Handles both 'compress' and 'gzip' types of compression.
+
+
+Lost update support
+-------------------
+Automatically adds back ETags into PUT requests to resources we have already cached. This implements Section 3.2 of Detecting the Lost Update Problem Using Unreserved Checkout
+
+
+Unit Tested
+-----------
+A large and growing set of unit tests.
+        """,
+        py_modules=['httplib2'],
+        classifiers=[
+        'Development Status :: 3 - Alpha',
+        'Environment :: Web Environment',
+        'Intended Audience :: Developers',
+        'License :: OSI Approved :: MIT License',
+        'Operating System :: OS Independent',
+        'Programming Language :: Python',
+        'Topic :: Internet :: WWW/HTTP',
+        'Topic :: Software Development :: Libraries',
+        ],
+        )
+
diff --git a/test/.htaccess b/test/.htaccess
new file mode 100644
index 0000000..78b80f6
--- /dev/null
+++ b/test/.htaccess
@@ -0,0 +1,5 @@
+AddHandler send-as-is asis
+Options +ExecCGI
+ExpiresActive On
+ExpiresDefault "access plus 2 hours"
+SecFilterEngine Off
diff --git a/test/300/final-destination.txt b/test/300/final-destination.txt
new file mode 100644
index 0000000..4ffba65
--- /dev/null
+++ b/test/300/final-destination.txt
@@ -0,0 +1 @@
+This is the final destination.
diff --git a/test/300/with-location-header.asis b/test/300/with-location-header.asis
new file mode 100644
index 0000000..16dbf9f
--- /dev/null
+++ b/test/300/with-location-header.asis
@@ -0,0 +1,4 @@
+Status: 300 Mutliple Choices
+Location: http://bitworking.org/projects/httplib2/test/300/final-destination.txt
+
+
diff --git a/test/300/without-location-header.asis b/test/300/without-location-header.asis
new file mode 100644
index 0000000..4625e31
--- /dev/null
+++ b/test/300/without-location-header.asis
@@ -0,0 +1,11 @@
+Status: 300 Mutliple Choices
+Content-Type: text/html
+
+<html>
+  <body>
+     <ol>
+        <li><a href="http://example.com/">Choice A</a></li>
+        <li><a href="http://example.org">Choice B</a></li>
+     </ol>
+  </body>
+</html>
diff --git a/test/301/final-destination.txt b/test/301/final-destination.txt
new file mode 100644
index 0000000..4ffba65
--- /dev/null
+++ b/test/301/final-destination.txt
@@ -0,0 +1 @@
+This is the final destination.
diff --git a/test/301/onestep.asis b/test/301/onestep.asis
new file mode 100644
index 0000000..7360725
--- /dev/null
+++ b/test/301/onestep.asis
@@ -0,0 +1,14 @@
+Status: 301 Now where did I leave that URL
+Location: http://bitworking.org/projects/httplib2/test/302/final-destination.txt
+Content-type: text/html
+
+<html>
+<head>
+<title>Lame excuses'R'us</title>
+</head>
+<body>
+<h1>Fred's exceptionally wonderful page has moved to
+<a href="http://example.com/foo/bar.html">Joe's</a> site.
+</h1>
+</body>
+</html>
diff --git a/test/302/.myhtaccess b/test/302/.myhtaccess
new file mode 100644
index 0000000..844154f
--- /dev/null
+++ b/test/302/.myhtaccess
@@ -0,0 +1 @@
+Redirect temp onestep final-destination.txt
diff --git a/test/302/final-destination.txt b/test/302/final-destination.txt
new file mode 100644
index 0000000..4ffba65
--- /dev/null
+++ b/test/302/final-destination.txt
@@ -0,0 +1 @@
+This is the final destination.
diff --git a/test/302/no-location.asis b/test/302/no-location.asis
new file mode 100644
index 0000000..4f9d717
--- /dev/null
+++ b/test/302/no-location.asis
@@ -0,0 +1,6 @@
+Content-Type: text/plain
+Status: 302 Found
+
+This is content.
+Note there is no Location header given.
+This should err.
diff --git a/test/302/onestep.asis b/test/302/onestep.asis
new file mode 100644
index 0000000..3db9033
--- /dev/null
+++ b/test/302/onestep.asis
@@ -0,0 +1,14 @@
+Status: 302 Now where did I leave that URL
+Location: http://bitworking.org/projects/httplib2/test/302/final-destination.txt
+Content-type: text/html
+
+<html>
+<head>
+<title>Lame excuses'R'us</title>
+</head>
+<body>
+<h1>Fred's exceptionally wonderful page has moved to
+<a href="http://example.com/foo/bar.html">Joe's</a> site.
+</h1>
+</body>
+</html>
diff --git a/test/302/twostep.asis b/test/302/twostep.asis
new file mode 100644
index 0000000..ad0d457
--- /dev/null
+++ b/test/302/twostep.asis
@@ -0,0 +1,14 @@
+Status: 302 Now where did I leave that URL
+Location: http://bitworking.org/projects/httplib2/test/302/onestep.asis
+Content-type: text/html
+
+<html>
+<head>
+<title>Lame excuses'R'us</title>
+</head>
+<body>
+<h1>Fred's exceptionally wonderful page has moved to
+<a href="http://example.com/foo/bar.html">Joe's</a> site.
+</h1>
+</body>
+</html>
diff --git a/test/303/303.cgi b/test/303/303.cgi
new file mode 100755
index 0000000..c6500b8
--- /dev/null
+++ b/test/303/303.cgi
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import os
+
+# Always returns an empty response body
+# and adds in the X-Method: header with the
+# method that was sent to the CGI
+
+print "Status: 303 See Other"
+print "Location: http://bitworking.org/projects/httplib2/test/303/final-destination.txt"
+print "X-Method: %s" % os.environ['REQUEST_METHOD']
+print ""
+
+
diff --git a/test/303/final-destination.txt b/test/303/final-destination.txt
new file mode 100644
index 0000000..4ffba65
--- /dev/null
+++ b/test/303/final-destination.txt
@@ -0,0 +1 @@
+This is the final destination.
diff --git a/test/303/redirect-to-reflector.cgi b/test/303/redirect-to-reflector.cgi
new file mode 100755
index 0000000..b42100a
--- /dev/null
+++ b/test/303/redirect-to-reflector.cgi
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import os
+
+# Always returns an empty response body
+# and adds in the X-Method: header with the
+# method that was sent to the CGI
+
+print "Status: 303 See Other"
+print "Location: http://bitworking.org/projects/httplib2/test/methods/method_reflector.cgi"
+print "X-Method: %s" % os.environ['REQUEST_METHOD']
+print ""
+
+
diff --git a/test/304/end2end.cgi b/test/304/end2end.cgi
new file mode 100755
index 0000000..f158f66
--- /dev/null
+++ b/test/304/end2end.cgi
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import os
+
+
+etag = os.environ.get("HTTP_IF_NONE_MATCH", None)
+if etag:
+    print "Status: 304 Not Modified"
+else:
+    print "Status: 200 Ok"
+    print 'ETag: "123456779"'
+    print "Content-Type: text/html"
+    print ""
+    print "<html></html>"
diff --git a/test/304/last-modified-only/.htaccess b/test/304/last-modified-only/.htaccess
new file mode 100644
index 0000000..7ec3ed9
--- /dev/null
+++ b/test/304/last-modified-only/.htaccess
@@ -0,0 +1 @@
+FileETag None
diff --git a/test/304/last-modified-only/last-modified-only.txt b/test/304/last-modified-only/last-modified-only.txt
new file mode 100644
index 0000000..baad073
--- /dev/null
+++ b/test/304/last-modified-only/last-modified-only.txt
@@ -0,0 +1 @@
+This file should automatically get an ETag from Apache.
diff --git a/test/304/test_etag.txt b/test/304/test_etag.txt
new file mode 100644
index 0000000..baad073
--- /dev/null
+++ b/test/304/test_etag.txt
@@ -0,0 +1 @@
+This file should automatically get an ETag from Apache.
diff --git a/test/307/final-destination.txt b/test/307/final-destination.txt
new file mode 100644
index 0000000..4ffba65
--- /dev/null
+++ b/test/307/final-destination.txt
@@ -0,0 +1 @@
+This is the final destination.
diff --git a/test/307/onestep.asis b/test/307/onestep.asis
new file mode 100644
index 0000000..314cfa0
--- /dev/null
+++ b/test/307/onestep.asis
@@ -0,0 +1,14 @@
+Status: 307 Temporary Redirect
+Location: http://bitworking.org/projects/httplib2/test/307/final-destination.txt
+Content-type: text/html
+
+<html>
+<head>
+<title>Lame excuses'R'us</title>
+</head>
+<body>
+<h1>Fred's exceptionally wonderful page has moved to
+<a href="http://example.com/foo/bar.html">Joe's</a> site.
+</h1>
+</body>
+</html>
diff --git a/test/410/410.asis b/test/410/410.asis
new file mode 100644
index 0000000..9bf5f76
--- /dev/null
+++ b/test/410/410.asis
@@ -0,0 +1,11 @@
+Status: 410 Gone
+Content-type: text/html
+
+<html>
+<head>
+<title>Gone</title>
+</head>
+<body>
+<h1>Don't request me again.</h1>
+</body>
+</html>
diff --git a/test/basic-nested/.htaccess b/test/basic-nested/.htaccess
new file mode 100644
index 0000000..21623cd
--- /dev/null
+++ b/test/basic-nested/.htaccess
@@ -0,0 +1,4 @@
+AuthUserFile /home/jcgregorio/web/bitworking.org/projects/httplib2/test/basic/passwdfile 
+AuthName "a realm with spaces"
+AuthType Basic
+require valid-user
diff --git a/test/basic-nested/file.txt b/test/basic-nested/file.txt
new file mode 100755
index 0000000..395b52f
--- /dev/null
+++ b/test/basic-nested/file.txt
@@ -0,0 +1,2 @@
+This is the content.
+
diff --git a/test/basic-nested/passwdfile b/test/basic-nested/passwdfile
new file mode 100644
index 0000000..7de4f23
--- /dev/null
+++ b/test/basic-nested/passwdfile
@@ -0,0 +1 @@
+joe:J5h11U4s90MWc
diff --git a/test/basic-nested/subdir/.htaccess b/test/basic-nested/subdir/.htaccess
new file mode 100644
index 0000000..1fe3c22
--- /dev/null
+++ b/test/basic-nested/subdir/.htaccess
@@ -0,0 +1,4 @@
+AuthUserFile /home/jcgregorio/web/bitworking.org/projects/httplib2/test/basic2/passwdfile 
+AuthName "justarealm"
+AuthType Basic
+require valid-user
diff --git a/test/basic-nested/subdir/file.txt b/test/basic-nested/subdir/file.txt
new file mode 100755
index 0000000..395b52f
--- /dev/null
+++ b/test/basic-nested/subdir/file.txt
@@ -0,0 +1,2 @@
+This is the content.
+
diff --git a/test/basic-nested/subdir/passwdfile b/test/basic-nested/subdir/passwdfile
new file mode 100644
index 0000000..2ddbadc
--- /dev/null
+++ b/test/basic-nested/subdir/passwdfile
@@ -0,0 +1 @@
+fred:TBd7idzkX/v6Q
diff --git a/test/basic/.htaccess b/test/basic/.htaccess
new file mode 100644
index 0000000..21623cd
--- /dev/null
+++ b/test/basic/.htaccess
@@ -0,0 +1,4 @@
+AuthUserFile /home/jcgregorio/web/bitworking.org/projects/httplib2/test/basic/passwdfile 
+AuthName "a realm with spaces"
+AuthType Basic
+require valid-user
diff --git a/test/basic/file.txt b/test/basic/file.txt
new file mode 100755
index 0000000..395b52f
--- /dev/null
+++ b/test/basic/file.txt
@@ -0,0 +1,2 @@
+This is the content.
+
diff --git a/test/basic/passwdfile b/test/basic/passwdfile
new file mode 100644
index 0000000..7de4f23
--- /dev/null
+++ b/test/basic/passwdfile
@@ -0,0 +1 @@
+joe:J5h11U4s90MWc
diff --git a/test/basic2/.htaccess b/test/basic2/.htaccess
new file mode 100644
index 0000000..1fe3c22
--- /dev/null
+++ b/test/basic2/.htaccess
@@ -0,0 +1,4 @@
+AuthUserFile /home/jcgregorio/web/bitworking.org/projects/httplib2/test/basic2/passwdfile 
+AuthName "justarealm"
+AuthType Basic
+require valid-user
diff --git a/test/basic2/file.txt b/test/basic2/file.txt
new file mode 100755
index 0000000..395b52f
--- /dev/null
+++ b/test/basic2/file.txt
@@ -0,0 +1,2 @@
+This is the content.
+
diff --git a/test/basic2/passwdfile b/test/basic2/passwdfile
new file mode 100644
index 0000000..2ddbadc
--- /dev/null
+++ b/test/basic2/passwdfile
@@ -0,0 +1 @@
+fred:TBd7idzkX/v6Q
diff --git a/test/conditional-updates/test.cgi b/test/conditional-updates/test.cgi
new file mode 100755
index 0000000..b7eb351
--- /dev/null
+++ b/test/conditional-updates/test.cgi
@@ -0,0 +1,28 @@
+#!/usr/bin/env python
+import os
+
+# Always returns an empty response body
+# and adds in the X-Method: header with the
+# method that was sent to the CGI
+
+method = os.environ['REQUEST_METHOD']
+if "GET" == method:
+    if "123456789" == os.environ.get('HTTP_IF_NONE_MATCH', ''):
+        print "Status: 304 Not Modified"
+    else:
+        print "Status: 200 Ok"
+        print "ETag: 123456789" 
+        print ""
+elif method in ["PUT", "DELETE"]:
+    if "123456789" == os.environ.get('HTTP_IF_MATCH', ''):
+        print "Status: 200 Ok"
+        print ""
+    else:
+        print "Status: 412 Precondition Failed"
+        print ""
+else:
+    print "Status: 405 Method Not Allowed"
+    print ""
+
+
+
diff --git a/test/deflate/deflated-content b/test/deflate/deflated-content
new file mode 100644
index 0000000..4a548b4
--- /dev/null
+++ b/test/deflate/deflated-content
Binary files differ
diff --git a/test/deflate/deflated-headers.txt b/test/deflate/deflated-headers.txt
new file mode 100644
index 0000000..54409ad
--- /dev/null
+++ b/test/deflate/deflated-headers.txt
@@ -0,0 +1,3 @@
+Content-type: text/plain
+Content-Encoding: deflate
+
diff --git a/test/deflate/deflated.asis b/test/deflate/deflated.asis
new file mode 100644
index 0000000..cdb0c02
--- /dev/null
+++ b/test/deflate/deflated.asis
Binary files differ
diff --git a/test/deflate/failed-compression.asis b/test/deflate/failed-compression.asis
new file mode 100644
index 0000000..06f075a
--- /dev/null
+++ b/test/deflate/failed-compression.asis
@@ -0,0 +1,5 @@
+Content-Encoding: gzip
+Content-Type: text/plain
+Status: 200 Ok
+
+This is obviously not compressed.
diff --git a/test/digest-expire/.htaccess b/test/digest-expire/.htaccess
new file mode 100644
index 0000000..66cc47a
--- /dev/null
+++ b/test/digest-expire/.htaccess
@@ -0,0 +1,5 @@
+AuthType Digest
+AuthDigestNonceLifetime 1 
+AuthName "myrealm"
+AuthDigestFile /home/jcgregorio/web/bitworking.org/projects/httplib2/test/digest/digestpw 
+Require valid-user
diff --git a/test/digest-expire/digestpw b/test/digest-expire/digestpw
new file mode 100755
index 0000000..fcdc74f
--- /dev/null
+++ b/test/digest-expire/digestpw
@@ -0,0 +1 @@
+joe:myrealm:079c7228d541e1b282713f4c146de5e7
diff --git a/test/digest-expire/file.txt b/test/digest-expire/file.txt
new file mode 100644
index 0000000..d39bb09
--- /dev/null
+++ b/test/digest-expire/file.txt
@@ -0,0 +1 @@
+This is spinal tap.
diff --git a/test/digest/.htaccess b/test/digest/.htaccess
new file mode 100644
index 0000000..4831a12
--- /dev/null
+++ b/test/digest/.htaccess
@@ -0,0 +1,4 @@
+AuthType Digest
+AuthName "myrealm"
+AuthDigestFile /home/jcgregorio/web/bitworking.org/projects/httplib2/test/digest/digestpw 
+Require valid-user
diff --git a/test/digest/digestpw b/test/digest/digestpw
new file mode 100755
index 0000000..fcdc74f
--- /dev/null
+++ b/test/digest/digestpw
@@ -0,0 +1 @@
+joe:myrealm:079c7228d541e1b282713f4c146de5e7
diff --git a/test/digest/file.txt b/test/digest/file.txt
new file mode 100644
index 0000000..d39bb09
--- /dev/null
+++ b/test/digest/file.txt
@@ -0,0 +1 @@
+This is spinal tap.
diff --git a/test/duplicate-headers/multilink.asis b/test/duplicate-headers/multilink.asis
new file mode 100644
index 0000000..aac20e5
--- /dev/null
+++ b/test/duplicate-headers/multilink.asis
@@ -0,0 +1,6 @@
+Link: <http://bitworking.org>; rel="home"; title="BitWorking"
+Link: <http://bitworking.org/index.rss>; rel="feed"; title="BitWorking"
+Content-Type: text/plain
+Status: 200 Ok
+
+This is content
diff --git a/test/gzip/.htaccess b/test/gzip/.htaccess
new file mode 100644
index 0000000..fedf381
--- /dev/null
+++ b/test/gzip/.htaccess
@@ -0,0 +1 @@
+AddOutputFilterByType DEFLATE text/html text/plain
diff --git a/test/gzip/failed-compression.asis b/test/gzip/failed-compression.asis
new file mode 100644
index 0000000..06f075a
--- /dev/null
+++ b/test/gzip/failed-compression.asis
@@ -0,0 +1,5 @@
+Content-Encoding: gzip
+Content-Type: text/plain
+Status: 200 Ok
+
+This is obviously not compressed.
diff --git a/test/gzip/final-destination.txt b/test/gzip/final-destination.txt
new file mode 100644
index 0000000..4ffba65
--- /dev/null
+++ b/test/gzip/final-destination.txt
@@ -0,0 +1 @@
+This is the final destination.
diff --git a/test/methods/method_reflector.cgi b/test/methods/method_reflector.cgi
new file mode 100755
index 0000000..ae55af8
--- /dev/null
+++ b/test/methods/method_reflector.cgi
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+import os
+
+# Always returns an empty response body
+# and adds in the X-Method: header with the
+# method that was sent to the CGI
+
+print "Status: 200 Ok"
+print "X-Method: %s" % os.environ['REQUEST_METHOD']
+print ""
+
+
diff --git a/test/no-store/no-store.asis b/test/no-store/no-store.asis
new file mode 100644
index 0000000..160ad44
--- /dev/null
+++ b/test/no-store/no-store.asis
@@ -0,0 +1,8 @@
+Status: 200 Ok
+Last-Modified: Fri, 30 Dec 2005 21:57:33 GMT
+Etag: "11c415a-8826-eb9c2d40"
+Cache-Control: max-age=7200, no-store
+Expires: Mon, 02 Jan 2006 04:06:44 GMT
+Content-Type: text/plain
+
+fred
diff --git a/test/reflector/reflector.cgi b/test/reflector/reflector.cgi
new file mode 100755
index 0000000..2171dd0
--- /dev/null
+++ b/test/reflector/reflector.cgi
@@ -0,0 +1,13 @@
+#!/usr/bin/env python
+import os
+
+# Always returns an empty response body
+# and adds in the X-Method: header with the
+# method that was sent to the CGI
+
+print "Status: 200 Ok"
+print "Content-type: text/plain"
+print ""
+print "\n".join(["%s=%s" % (key, value) for key, value in  os.environ.iteritems()])
+
+
diff --git a/test/test.asis b/test/test.asis
new file mode 100644
index 0000000..d1d567b
--- /dev/null
+++ b/test/test.asis
@@ -0,0 +1,14 @@
+Status: 301 Now where did I leave that URL
+Location: http://example.com/foo/bar.html
+Content-type: text/html
+
+<html>
+<head>
+<title>Lame excuses'R'us</title>
+</head>
+<body>
+<h1>Fred's exceptionally wonderful page has moved to
+<a href="http://example.com/foo/bar.html">Joe's</a> site.
+</h1>
+</body>
+</html>
diff --git a/test/user-agent/test.cgi b/test/user-agent/test.cgi
new file mode 100755
index 0000000..a7b5454
--- /dev/null
+++ b/test/user-agent/test.cgi
@@ -0,0 +1,12 @@
+#!/usr/bin/env python
+import os
+
+# Always returns an empty response body
+# and adds in the X-Method: header with the
+# method that was sent to the CGI
+
+print "Status: 200 Ok"
+print "Content-Type: text/plain"
+print ""
+print os.environ.get('HTTP_USER_AGENT', '')
+