pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1 | #!/usr/bin/env python3
|
| 2 | """
|
| 3 | httplib2test
|
| 4 |
|
| 5 | A set of unit tests for httplib2.py.
|
| 6 |
|
| 7 | Requires Python 3.0 or later
|
| 8 | """
|
| 9 |
|
| 10 | __author__ = "Joe Gregorio (joe@bitworking.org)"
|
| 11 | __copyright__ = "Copyright 2006, Joe Gregorio"
|
| 12 | __contributors__ = ["Mark Pilgrim"]
|
| 13 | __license__ = "MIT"
|
| 14 | __history__ = """ """
|
| 15 | __version__ = "0.2 ($Rev: 118 $)"
|
| 16 |
|
Joe Gregorio | b6c90c4 | 2011-02-11 01:03:22 -0500 | [diff] [blame] | 17 | import base64
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 18 | import http.client
|
| 19 | import httplib2
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 20 | import io
|
Joe Gregorio | b6c90c4 | 2011-02-11 01:03:22 -0500 | [diff] [blame] | 21 | import os
|
Joe Gregorio | 46546a6 | 2012-10-03 14:31:10 -0400 | [diff] [blame] | 22 | import pickle
|
Joe Gregorio | b6c90c4 | 2011-02-11 01:03:22 -0500 | [diff] [blame] | 23 | import socket
|
Joe Gregorio | b53de9b | 2011-06-07 15:44:51 -0400 | [diff] [blame] | 24 | import ssl
|
Joe Gregorio | b6c90c4 | 2011-02-11 01:03:22 -0500 | [diff] [blame] | 25 | import sys
|
| 26 | import time
|
| 27 | import unittest
|
| 28 | import urllib.parse
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 29 |
|
| 30 | # The test resources base uri
|
| 31 | base = 'http://bitworking.org/projects/httplib2/test/'
|
| 32 | #base = 'http://localhost/projects/httplib2/test/'
|
| 33 | cacheDirName = ".cache"
|
| 34 |
|
| 35 |
|
| 36 | class CredentialsTest(unittest.TestCase):
|
| 37 | def test(self):
|
| 38 | c = httplib2.Credentials()
|
| 39 | c.add("joe", "password")
|
| 40 | self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0])
|
| 41 | self.assertEqual(("joe", "password"), list(c.iter(""))[0])
|
| 42 | c.add("fred", "password2", "wellformedweb.org")
|
| 43 | self.assertEqual(("joe", "password"), list(c.iter("bitworking.org"))[0])
|
| 44 | self.assertEqual(1, len(list(c.iter("bitworking.org"))))
|
| 45 | self.assertEqual(2, len(list(c.iter("wellformedweb.org"))))
|
| 46 | self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org")))
|
| 47 | c.clear()
|
| 48 | self.assertEqual(0, len(list(c.iter("bitworking.org"))))
|
| 49 | c.add("fred", "password2", "wellformedweb.org")
|
| 50 | self.assertTrue(("fred", "password2") in list(c.iter("wellformedweb.org")))
|
| 51 | self.assertEqual(0, len(list(c.iter("bitworking.org"))))
|
| 52 | self.assertEqual(0, len(list(c.iter(""))))
|
| 53 |
|
| 54 |
|
| 55 | class ParserTest(unittest.TestCase):
|
| 56 | def testFromStd66(self):
|
| 57 | self.assertEqual( ('http', 'example.com', '', None, None ), httplib2.parse_uri("http://example.com"))
|
| 58 | self.assertEqual( ('https', 'example.com', '', None, None ), httplib2.parse_uri("https://example.com"))
|
| 59 | self.assertEqual( ('https', 'example.com:8080', '', None, None ), httplib2.parse_uri("https://example.com:8080"))
|
| 60 | self.assertEqual( ('http', 'example.com', '/', None, None ), httplib2.parse_uri("http://example.com/"))
|
| 61 | self.assertEqual( ('http', 'example.com', '/path', None, None ), httplib2.parse_uri("http://example.com/path"))
|
| 62 | self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', None ), httplib2.parse_uri("http://example.com/path?a=1&b=2"))
|
| 63 | self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"))
|
| 64 | self.assertEqual( ('http', 'example.com', '/path', 'a=1&b=2', 'fred' ), httplib2.parse_uri("http://example.com/path?a=1&b=2#fred"))
|
| 65 |
|
| 66 |
|
| 67 | class UrlNormTest(unittest.TestCase):
|
| 68 | def test(self):
|
| 69 | self.assertEqual( "http://example.org/", httplib2.urlnorm("http://example.org")[-1])
|
| 70 | self.assertEqual( "http://example.org/", httplib2.urlnorm("http://EXAMple.org")[-1])
|
| 71 | self.assertEqual( "http://example.org/?=b", httplib2.urlnorm("http://EXAMple.org?=b")[-1])
|
| 72 | self.assertEqual( "http://example.org/mypath?a=b", httplib2.urlnorm("http://EXAMple.org/mypath?a=b")[-1])
|
| 73 | self.assertEqual( "http://localhost:80/", httplib2.urlnorm("http://localhost:80")[-1])
|
| 74 | self.assertEqual( httplib2.urlnorm("http://localhost:80/"), httplib2.urlnorm("HTTP://LOCALHOST:80"))
|
| 75 | try:
|
| 76 | httplib2.urlnorm("/")
|
| 77 | self.fail("Non-absolute URIs should raise an exception")
|
| 78 | except httplib2.RelativeURIError:
|
| 79 | pass
|
| 80 |
|
| 81 | class UrlSafenameTest(unittest.TestCase):
|
| 82 | def test(self):
|
| 83 | # Test that different URIs end up generating different safe names
|
| 84 | self.assertEqual( "example.org,fred,a=b,58489f63a7a83c3b7794a6a398ee8b1f", httplib2.safename("http://example.org/fred/?a=b"))
|
| 85 | self.assertEqual( "example.org,fred,a=b,8c5946d56fec453071f43329ff0be46b", httplib2.safename("http://example.org/fred?/a=b"))
|
| 86 | self.assertEqual( "www.example.org,fred,a=b,499c44b8d844a011b67ea2c015116968", httplib2.safename("http://www.example.org/fred?/a=b"))
|
| 87 | self.assertEqual( httplib2.safename(httplib2.urlnorm("http://www")[-1]), httplib2.safename(httplib2.urlnorm("http://WWW")[-1]))
|
| 88 | self.assertEqual( "www.example.org,fred,a=b,692e843a333484ce0095b070497ab45d", httplib2.safename("https://www.example.org/fred?/a=b"))
|
| 89 | self.assertNotEqual( httplib2.safename("http://www"), httplib2.safename("https://www"))
|
| 90 | # Test the max length limits
|
| 91 | uri = "http://" + ("w" * 200) + ".org"
|
| 92 | uri2 = "http://" + ("w" * 201) + ".org"
|
| 93 | self.assertNotEqual( httplib2.safename(uri2), httplib2.safename(uri))
|
| 94 | # Max length should be 200 + 1 (",") + 32
|
| 95 | self.assertEqual(233, len(httplib2.safename(uri2)))
|
| 96 | self.assertEqual(233, len(httplib2.safename(uri)))
|
| 97 | # Unicode
|
| 98 | if sys.version_info >= (2,3):
|
| 99 | self.assertEqual( "xn--http,-4y1d.org,fred,a=b,579924c35db315e5a32e3d9963388193", httplib2.safename("http://\u2304.org/fred/?a=b"))
|
| 100 |
|
| 101 | class _MyResponse(io.BytesIO):
|
| 102 | def __init__(self, body, **kwargs):
|
| 103 | io.BytesIO.__init__(self, body)
|
| 104 | self.headers = kwargs
|
| 105 |
|
| 106 | def items(self):
|
| 107 | return self.headers.items()
|
| 108 |
|
| 109 | def iteritems(self):
|
| 110 | return iter(self.headers.items())
|
| 111 |
|
| 112 |
|
| 113 | class _MyHTTPConnection(object):
|
| 114 | "This class is just a mock of httplib.HTTPConnection used for testing"
|
| 115 |
|
| 116 | def __init__(self, host, port=None, key_file=None, cert_file=None,
|
| 117 | strict=None, timeout=None, proxy_info=None):
|
| 118 | self.host = host
|
| 119 | self.port = port
|
| 120 | self.timeout = timeout
|
| 121 | self.log = ""
|
Joe Gregorio | b53de9b | 2011-06-07 15:44:51 -0400 | [diff] [blame] | 122 | self.sock = None
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 123 |
|
| 124 | def set_debuglevel(self, level):
|
| 125 | pass
|
| 126 |
|
| 127 | def connect(self):
|
| 128 | "Connect to a host on a given port."
|
| 129 | pass
|
| 130 |
|
| 131 | def close(self):
|
| 132 | pass
|
| 133 |
|
| 134 | def request(self, method, request_uri, body, headers):
|
| 135 | pass
|
| 136 |
|
| 137 | def getresponse(self):
|
| 138 | return _MyResponse(b"the body", status="200")
|
| 139 |
|
| 140 |
|
| 141 | class HttpTest(unittest.TestCase):
|
| 142 | def setUp(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 143 | if os.path.exists(cacheDirName):
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 144 | [os.remove(os.path.join(cacheDirName, file)) for file in os.listdir(cacheDirName)]
|
| 145 | self.http = httplib2.Http(cacheDirName)
|
| 146 | self.http.clear_credentials()
|
| 147 |
|
Joe Gregorio | f3ee17b | 2011-02-13 11:59:51 -0500 | [diff] [blame] | 148 | def testIPv6NoSSL(self):
|
| 149 | try:
|
| 150 | self.http.request("http://[::1]/")
|
| 151 | except socket.gaierror:
|
| 152 | self.fail("should get the address family right for IPv6")
|
| 153 | except socket.error:
|
| 154 | # Even if IPv6 isn't installed on a machine it should just raise socket.error
|
| 155 | pass
|
| 156 |
|
| 157 | def testIPv6SSL(self):
|
| 158 | try:
|
| 159 | self.http.request("https://[::1]/")
|
| 160 | except socket.gaierror:
|
| 161 | self.fail("should get the address family right for IPv6")
|
| 162 | except socket.error:
|
| 163 | # Even if IPv6 isn't installed on a machine it should just raise socket.error
|
| 164 | pass
|
| 165 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 166 | def testConnectionType(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 167 | self.http.force_exception_to_status_code = False
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 168 | response, content = self.http.request("http://bitworking.org", connection_type=_MyHTTPConnection)
|
| 169 | self.assertEqual(response['content-location'], "http://bitworking.org")
|
| 170 | self.assertEqual(content, b"the body")
|
| 171 |
|
| 172 | def testGetUnknownServer(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 173 | self.http.force_exception_to_status_code = False
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 174 | try:
|
| 175 | self.http.request("http://fred.bitworking.org/")
|
| 176 | self.fail("An httplib2.ServerNotFoundError Exception must be thrown on an unresolvable server.")
|
| 177 | except httplib2.ServerNotFoundError:
|
| 178 | pass
|
| 179 |
|
| 180 | # Now test with exceptions turned off
|
| 181 | self.http.force_exception_to_status_code = True
|
| 182 |
|
| 183 | (response, content) = self.http.request("http://fred.bitworking.org/")
|
| 184 | self.assertEqual(response['content-type'], 'text/plain')
|
| 185 | self.assertTrue(content.startswith(b"Unable to find"))
|
| 186 | self.assertEqual(response.status, 400)
|
| 187 |
|
Joe Gregorio | b6c90c4 | 2011-02-11 01:03:22 -0500 | [diff] [blame] | 188 | def testGetConnectionRefused(self):
|
| 189 | self.http.force_exception_to_status_code = False
|
| 190 | try:
|
| 191 | self.http.request("http://localhost:7777/")
|
| 192 | self.fail("An socket.error exception must be thrown on Connection Refused.")
|
| 193 | except socket.error:
|
| 194 | pass
|
| 195 |
|
| 196 | # Now test with exceptions turned off
|
| 197 | self.http.force_exception_to_status_code = True
|
| 198 |
|
| 199 | (response, content) = self.http.request("http://localhost:7777/")
|
| 200 | self.assertEqual(response['content-type'], 'text/plain')
|
| 201 | self.assertTrue(b"Connection refused" in content)
|
| 202 | self.assertEqual(response.status, 400)
|
| 203 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 204 | def testGetIRI(self):
|
| 205 | if sys.version_info >= (2,3):
|
| 206 | uri = urllib.parse.urljoin(base, "reflector/reflector.cgi?d=\N{CYRILLIC CAPITAL LETTER DJE}")
|
| 207 | (response, content) = self.http.request(uri, "GET")
|
| 208 | d = self.reflector(content)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 209 | self.assertTrue('QUERY_STRING' in d)
|
| 210 | self.assertTrue(d['QUERY_STRING'].find('%D0%82') > 0)
|
| 211 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 212 | def testGetIsDefaultMethod(self):
|
| 213 | # Test that GET is the default method
|
| 214 | uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi")
|
| 215 | (response, content) = self.http.request(uri)
|
| 216 | self.assertEqual(response['x-method'], "GET")
|
| 217 |
|
| 218 | def testDifferentMethods(self):
|
| 219 | # Test that all methods can be used
|
| 220 | uri = urllib.parse.urljoin(base, "methods/method_reflector.cgi")
|
| 221 | for method in ["GET", "PUT", "DELETE", "POST"]:
|
| 222 | (response, content) = self.http.request(uri, method, body=b" ")
|
| 223 | self.assertEqual(response['x-method'], method)
|
| 224 |
|
Joe Gregorio | b628c0b | 2009-07-16 12:28:04 -0400 | [diff] [blame] | 225 | def testHeadRead(self):
|
| 226 | # Test that we don't try to read the response of a HEAD request
|
| 227 | # since httplib blocks response.read() for HEAD requests.
|
| 228 | # Oddly enough this doesn't appear as a problem when doing HEAD requests
|
| 229 | # against Apache servers.
|
| 230 | uri = "http://www.google.com/"
|
| 231 | (response, content) = self.http.request(uri, "HEAD")
|
| 232 | self.assertEqual(response.status, 200)
|
| 233 | self.assertEqual(content, b"")
|
| 234 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 235 | def testGetNoCache(self):
|
| 236 | # Test that can do a GET w/o the cache turned on.
|
| 237 | http = httplib2.Http()
|
| 238 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
| 239 | (response, content) = http.request(uri, "GET")
|
| 240 | self.assertEqual(response.status, 200)
|
| 241 | self.assertEqual(response.previous, None)
|
| 242 |
|
Joe Gregorio | e202d21 | 2009-07-16 14:57:52 -0400 | [diff] [blame] | 243 | def testGetOnlyIfCachedCacheHit(self):
|
| 244 | # Test that can do a GET with cache and 'only-if-cached'
|
| 245 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
| 246 | (response, content) = self.http.request(uri, "GET")
|
| 247 | (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'})
|
| 248 | self.assertEqual(response.fromcache, True)
|
| 249 | self.assertEqual(response.status, 200)
|
| 250 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 251 | def testGetOnlyIfCachedCacheMiss(self):
|
| 252 | # Test that can do a GET with no cache with 'only-if-cached'
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 253 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
Joe Gregorio | e202d21 | 2009-07-16 14:57:52 -0400 | [diff] [blame] | 254 | (response, content) = self.http.request(uri, "GET", headers={'cache-control': 'only-if-cached'})
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 255 | self.assertEqual(response.fromcache, False)
|
Joe Gregorio | e202d21 | 2009-07-16 14:57:52 -0400 | [diff] [blame] | 256 | self.assertEqual(response.status, 504)
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 257 |
|
| 258 | def testGetOnlyIfCachedNoCacheAtAll(self):
|
| 259 | # Test that can do a GET with no cache with 'only-if-cached'
|
| 260 | # Of course, there might be an intermediary beyond us
|
| 261 | # that responds to the 'only-if-cached', so this
|
| 262 | # test can't really be guaranteed to pass.
|
| 263 | http = httplib2.Http()
|
| 264 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
| 265 | (response, content) = http.request(uri, "GET", headers={'cache-control': 'only-if-cached'})
|
| 266 | self.assertEqual(response.fromcache, False)
|
Joe Gregorio | e202d21 | 2009-07-16 14:57:52 -0400 | [diff] [blame] | 267 | self.assertEqual(response.status, 504)
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 268 |
|
| 269 | def testUserAgent(self):
|
| 270 | # Test that we provide a default user-agent
|
| 271 | uri = urllib.parse.urljoin(base, "user-agent/test.cgi")
|
| 272 | (response, content) = self.http.request(uri, "GET")
|
| 273 | self.assertEqual(response.status, 200)
|
| 274 | self.assertTrue(content.startswith(b"Python-httplib2/"))
|
| 275 |
|
| 276 | def testUserAgentNonDefault(self):
|
| 277 | # Test that the default user-agent can be over-ridden
|
| 278 |
|
| 279 | uri = urllib.parse.urljoin(base, "user-agent/test.cgi")
|
| 280 | (response, content) = self.http.request(uri, "GET", headers={'User-Agent': 'fred/1.0'})
|
| 281 | self.assertEqual(response.status, 200)
|
| 282 | self.assertTrue(content.startswith(b"fred/1.0"))
|
| 283 |
|
| 284 | def testGet300WithLocation(self):
|
| 285 | # Test the we automatically follow 300 redirects if a Location: header is provided
|
| 286 | uri = urllib.parse.urljoin(base, "300/with-location-header.asis")
|
| 287 | (response, content) = self.http.request(uri, "GET")
|
| 288 | self.assertEqual(response.status, 200)
|
| 289 | self.assertEqual(content, b"This is the final destination.\n")
|
| 290 | self.assertEqual(response.previous.status, 300)
|
| 291 | self.assertEqual(response.previous.fromcache, False)
|
| 292 |
|
| 293 | # Confirm that the intermediate 300 is not cached
|
| 294 | (response, content) = self.http.request(uri, "GET")
|
| 295 | self.assertEqual(response.status, 200)
|
| 296 | self.assertEqual(content, b"This is the final destination.\n")
|
| 297 | self.assertEqual(response.previous.status, 300)
|
| 298 | self.assertEqual(response.previous.fromcache, False)
|
| 299 |
|
| 300 | def testGet300WithLocationNoRedirect(self):
|
| 301 | # Test the we automatically follow 300 redirects if a Location: header is provided
|
| 302 | self.http.follow_redirects = False
|
| 303 | uri = urllib.parse.urljoin(base, "300/with-location-header.asis")
|
| 304 | (response, content) = self.http.request(uri, "GET")
|
| 305 | self.assertEqual(response.status, 300)
|
| 306 |
|
| 307 | def testGet300WithoutLocation(self):
|
| 308 | # Not giving a Location: header in a 300 response is acceptable
|
| 309 | # In which case we just return the 300 response
|
| 310 | uri = urllib.parse.urljoin(base, "300/without-location-header.asis")
|
| 311 | (response, content) = self.http.request(uri, "GET")
|
| 312 | self.assertEqual(response.status, 300)
|
| 313 | self.assertTrue(response['content-type'].startswith("text/html"))
|
| 314 | self.assertEqual(response.previous, None)
|
| 315 |
|
| 316 | def testGet301(self):
|
| 317 | # Test that we automatically follow 301 redirects
|
| 318 | # and that we cache the 301 response
|
| 319 | uri = urllib.parse.urljoin(base, "301/onestep.asis")
|
| 320 | destination = urllib.parse.urljoin(base, "302/final-destination.txt")
|
| 321 | (response, content) = self.http.request(uri, "GET")
|
| 322 | self.assertEqual(response.status, 200)
|
| 323 | self.assertTrue('content-location' in response)
|
| 324 | self.assertEqual(response['content-location'], destination)
|
| 325 | self.assertEqual(content, b"This is the final destination.\n")
|
| 326 | self.assertEqual(response.previous.status, 301)
|
| 327 | self.assertEqual(response.previous.fromcache, False)
|
| 328 |
|
| 329 | (response, content) = self.http.request(uri, "GET")
|
| 330 | self.assertEqual(response.status, 200)
|
| 331 | self.assertEqual(response['content-location'], destination)
|
| 332 | self.assertEqual(content, b"This is the final destination.\n")
|
| 333 | self.assertEqual(response.previous.status, 301)
|
| 334 | self.assertEqual(response.previous.fromcache, True)
|
| 335 |
|
Joe Gregorio | 694a812 | 2011-02-13 21:40:09 -0500 | [diff] [blame] | 336 | def testHead301(self):
|
| 337 | # Test that we automatically follow 301 redirects
|
| 338 | uri = urllib.parse.urljoin(base, "301/onestep.asis")
|
| 339 | (response, content) = self.http.request(uri, "HEAD")
|
| 340 | self.assertEqual(response.status, 200)
|
| 341 | self.assertEqual(response.previous.status, 301)
|
| 342 | self.assertEqual(response.previous.fromcache, False)
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 343 |
|
| 344 | def testGet301NoRedirect(self):
|
| 345 | # Test that we automatically follow 301 redirects
|
| 346 | # and that we cache the 301 response
|
| 347 | self.http.follow_redirects = False
|
| 348 | uri = urllib.parse.urljoin(base, "301/onestep.asis")
|
| 349 | destination = urllib.parse.urljoin(base, "302/final-destination.txt")
|
| 350 | (response, content) = self.http.request(uri, "GET")
|
| 351 | self.assertEqual(response.status, 301)
|
| 352 |
|
| 353 |
|
| 354 | def testGet302(self):
|
| 355 | # Test that we automatically follow 302 redirects
|
| 356 | # and that we DO NOT cache the 302 response
|
| 357 | uri = urllib.parse.urljoin(base, "302/onestep.asis")
|
| 358 | destination = urllib.parse.urljoin(base, "302/final-destination.txt")
|
| 359 | (response, content) = self.http.request(uri, "GET")
|
| 360 | self.assertEqual(response.status, 200)
|
| 361 | self.assertEqual(response['content-location'], destination)
|
| 362 | self.assertEqual(content, b"This is the final destination.\n")
|
| 363 | self.assertEqual(response.previous.status, 302)
|
| 364 | self.assertEqual(response.previous.fromcache, False)
|
| 365 |
|
| 366 | uri = urllib.parse.urljoin(base, "302/onestep.asis")
|
| 367 | (response, content) = self.http.request(uri, "GET")
|
| 368 | self.assertEqual(response.status, 200)
|
| 369 | self.assertEqual(response.fromcache, True)
|
| 370 | self.assertEqual(response['content-location'], destination)
|
| 371 | self.assertEqual(content, b"This is the final destination.\n")
|
| 372 | self.assertEqual(response.previous.status, 302)
|
| 373 | self.assertEqual(response.previous.fromcache, False)
|
| 374 | self.assertEqual(response.previous['content-location'], uri)
|
| 375 |
|
| 376 | uri = urllib.parse.urljoin(base, "302/twostep.asis")
|
| 377 |
|
| 378 | (response, content) = self.http.request(uri, "GET")
|
| 379 | self.assertEqual(response.status, 200)
|
| 380 | self.assertEqual(response.fromcache, True)
|
| 381 | self.assertEqual(content, b"This is the final destination.\n")
|
| 382 | self.assertEqual(response.previous.status, 302)
|
| 383 | self.assertEqual(response.previous.fromcache, False)
|
| 384 |
|
| 385 | def testGet302RedirectionLimit(self):
|
| 386 | # Test that we can set a lower redirection limit
|
| 387 | # and that we raise an exception when we exceed
|
| 388 | # that limit.
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 389 | self.http.force_exception_to_status_code = False
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 390 |
|
| 391 | uri = urllib.parse.urljoin(base, "302/twostep.asis")
|
| 392 | try:
|
| 393 | (response, content) = self.http.request(uri, "GET", redirections = 1)
|
| 394 | self.fail("This should not happen")
|
| 395 | except httplib2.RedirectLimit:
|
| 396 | pass
|
| 397 | except Exception as e:
|
| 398 | self.fail("Threw wrong kind of exception ")
|
| 399 |
|
| 400 | # Re-run the test with out the exceptions
|
Joe Gregorio | 1ed2035 | 2011-02-14 23:30:21 -0500 | [diff] [blame] | 401 | self.http.force_exception_to_status_code = True
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 402 |
|
| 403 | (response, content) = self.http.request(uri, "GET", redirections = 1)
|
| 404 | self.assertEqual(response.status, 500)
|
| 405 | self.assertTrue(response.reason.startswith("Redirected more"))
|
| 406 | self.assertEqual("302", response['status'])
|
| 407 | self.assertTrue(content.startswith(b"<html>"))
|
| 408 | self.assertTrue(response.previous != None)
|
| 409 |
|
| 410 | def testGet302NoLocation(self):
|
| 411 | # Test that we throw an exception when we get
|
| 412 | # a 302 with no Location: header.
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 413 | self.http.force_exception_to_status_code = False
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 414 | uri = urllib.parse.urljoin(base, "302/no-location.asis")
|
| 415 | try:
|
| 416 | (response, content) = self.http.request(uri, "GET")
|
| 417 | self.fail("Should never reach here")
|
| 418 | except httplib2.RedirectMissingLocation:
|
| 419 | pass
|
| 420 | except Exception as e:
|
| 421 | self.fail("Threw wrong kind of exception ")
|
| 422 |
|
| 423 | # Re-run the test with out the exceptions
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 424 | self.http.force_exception_to_status_code = True
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 425 |
|
| 426 | (response, content) = self.http.request(uri, "GET")
|
| 427 | self.assertEqual(response.status, 500)
|
| 428 | self.assertTrue(response.reason.startswith("Redirected but"))
|
| 429 | self.assertEqual("302", response['status'])
|
| 430 | self.assertTrue(content.startswith(b"This is content"))
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 431 |
|
Joe Gregorio | ac335ff | 2011-11-14 12:29:03 -0500 | [diff] [blame] | 432 | def testGet301ViaHttps(self):
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 433 | # Google always redirects to http://google.com
|
Joe Gregorio | ac335ff | 2011-11-14 12:29:03 -0500 | [diff] [blame] | 434 | (response, content) = self.http.request("https://code.google.com/apis/", "GET")
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 435 | self.assertEqual(200, response.status)
|
Joe Gregorio | ac335ff | 2011-11-14 12:29:03 -0500 | [diff] [blame] | 436 | self.assertEqual(301, response.previous.status)
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 437 |
|
| 438 | def testGetViaHttps(self):
|
| 439 | # Test that we can handle HTTPS
|
| 440 | (response, content) = self.http.request("https://google.com/adsense/", "GET")
|
| 441 | self.assertEqual(200, response.status)
|
| 442 |
|
| 443 | def testGetViaHttpsSpecViolationOnLocation(self):
|
| 444 | # Test that we follow redirects through HTTPS
|
| 445 | # even if they violate the spec by including
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 446 | # a relative Location: header instead of an
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 447 | # absolute one.
|
| 448 | (response, content) = self.http.request("https://google.com/adsense", "GET")
|
| 449 | self.assertEqual(200, response.status)
|
| 450 | self.assertNotEqual(None, response.previous)
|
| 451 |
|
| 452 |
|
| 453 | def testGetViaHttpsKeyCert(self):
|
| 454 | # At this point I can only test
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 455 | # that the key and cert files are passed in
|
| 456 | # correctly to httplib. It would be nice to have
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 457 | # a real https endpoint to test against.
|
| 458 | http = httplib2.Http(timeout=2)
|
| 459 |
|
| 460 | http.add_certificate("akeyfile", "acertfile", "bitworking.org")
|
| 461 | try:
|
Joe Gregorio | a2324f6 | 2011-06-06 16:39:56 -0400 | [diff] [blame] | 462 | (response, content) = http.request("https://bitworking.org", "GET")
|
| 463 | except AttributeError:
|
| 464 | self.assertEqual(http.connections["https:bitworking.org"].key_file, "akeyfile")
|
| 465 | self.assertEqual(http.connections["https:bitworking.org"].cert_file, "acertfile")
|
| 466 | except IOError:
|
| 467 | # Skip on 3.2
|
| 468 | pass
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 469 |
|
| 470 | try:
|
| 471 | (response, content) = http.request("https://notthere.bitworking.org", "GET")
|
Joe Gregorio | a2324f6 | 2011-06-06 16:39:56 -0400 | [diff] [blame] | 472 | except httplib2.ServerNotFoundError:
|
| 473 | self.assertEqual(http.connections["https:notthere.bitworking.org"].key_file, None)
|
| 474 | self.assertEqual(http.connections["https:notthere.bitworking.org"].cert_file, None)
|
| 475 | except IOError:
|
| 476 | # Skip on 3.2
|
| 477 | pass
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 478 |
|
Joe Gregorio | b53de9b | 2011-06-07 15:44:51 -0400 | [diff] [blame] | 479 | def testSslCertValidation(self):
|
| 480 | # Test that we get an ssl.SSLError when specifying a non-existent CA
|
| 481 | # certs file.
|
| 482 | http = httplib2.Http(ca_certs='/nosuchfile')
|
| 483 | self.assertRaises(IOError,
|
| 484 | http.request, "https://www.google.com/", "GET")
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 485 |
|
Joe Gregorio | b53de9b | 2011-06-07 15:44:51 -0400 | [diff] [blame] | 486 | # Test that we get a SSLHandshakeError if we try to access
|
| 487 | # https://www.google.com, using a CA cert file that doesn't contain
|
| 488 | # the CA Gogole uses (i.e., simulating a cert that's not signed by a
|
| 489 | # trusted CA).
|
| 490 | other_ca_certs = os.path.join(
|
| 491 | os.path.dirname(os.path.abspath(httplib2.__file__ )),
|
| 492 | "test", "other_cacerts.txt")
|
| 493 | http = httplib2.Http(ca_certs=other_ca_certs)
|
| 494 | self.assertRaises(ssl.SSLError,
|
| 495 | http.request,"https://www.google.com/", "GET")
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 496 |
|
Joe Gregorio | b53de9b | 2011-06-07 15:44:51 -0400 | [diff] [blame] | 497 | def testSniHostnameValidation(self):
|
| 498 | self.http.request("https://google.com/", method="GET")
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 499 |
|
| 500 | def testGet303(self):
|
| 501 | # Do a follow-up GET on a Location: header
|
| 502 | # returned from a POST that gave a 303.
|
| 503 | uri = urllib.parse.urljoin(base, "303/303.cgi")
|
| 504 | (response, content) = self.http.request(uri, "POST", " ")
|
| 505 | self.assertEqual(response.status, 200)
|
| 506 | self.assertEqual(content, b"This is the final destination.\n")
|
| 507 | self.assertEqual(response.previous.status, 303)
|
| 508 |
|
| 509 | def testGet303NoRedirect(self):
|
| 510 | # Do a follow-up GET on a Location: header
|
| 511 | # returned from a POST that gave a 303.
|
| 512 | self.http.follow_redirects = False
|
| 513 | uri = urllib.parse.urljoin(base, "303/303.cgi")
|
| 514 | (response, content) = self.http.request(uri, "POST", " ")
|
| 515 | self.assertEqual(response.status, 303)
|
| 516 |
|
| 517 | def test303ForDifferentMethods(self):
|
| 518 | # Test that all methods can be used
|
| 519 | uri = urllib.parse.urljoin(base, "303/redirect-to-reflector.cgi")
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 520 | for (method, method_on_303) in [("PUT", "GET"), ("DELETE", "GET"), ("POST", "GET"), ("GET", "GET"), ("HEAD", "GET")]:
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 521 | (response, content) = self.http.request(uri, method, body=b" ")
|
| 522 | self.assertEqual(response['x-method'], method_on_303)
|
| 523 |
|
| 524 | def testGet304(self):
|
| 525 | # Test that we use ETags properly to validate our cache
|
| 526 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
| 527 | (response, content) = self.http.request(uri, "GET")
|
| 528 | self.assertNotEqual(response['etag'], "")
|
| 529 |
|
| 530 | (response, content) = self.http.request(uri, "GET")
|
| 531 | (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'must-revalidate'})
|
| 532 | self.assertEqual(response.status, 200)
|
| 533 | self.assertEqual(response.fromcache, True)
|
| 534 |
|
| 535 | cache_file_name = os.path.join(cacheDirName, httplib2.safename(httplib2.urlnorm(uri)[-1]))
|
| 536 | f = open(cache_file_name, "r")
|
| 537 | status_line = f.readline()
|
| 538 | f.close()
|
| 539 |
|
| 540 | self.assertTrue(status_line.startswith("status:"))
|
| 541 |
|
| 542 | (response, content) = self.http.request(uri, "HEAD")
|
| 543 | self.assertEqual(response.status, 200)
|
| 544 | self.assertEqual(response.fromcache, True)
|
| 545 |
|
| 546 | (response, content) = self.http.request(uri, "GET", headers = {'range': 'bytes=0-0'})
|
| 547 | self.assertEqual(response.status, 206)
|
| 548 | self.assertEqual(response.fromcache, False)
|
| 549 |
|
| 550 | def testGetIgnoreEtag(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 551 | # Test that we can forcibly ignore ETags
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 552 | uri = urllib.parse.urljoin(base, "reflector/reflector.cgi")
|
| 553 | (response, content) = self.http.request(uri, "GET")
|
| 554 | self.assertNotEqual(response['etag'], "")
|
| 555 |
|
| 556 | (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'})
|
| 557 | d = self.reflector(content)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 558 | self.assertTrue('HTTP_IF_NONE_MATCH' in d)
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 559 |
|
| 560 | self.http.ignore_etag = True
|
| 561 | (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'})
|
| 562 | d = self.reflector(content)
|
| 563 | self.assertEqual(response.fromcache, False)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 564 | self.assertFalse('HTTP_IF_NONE_MATCH' in d)
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 565 |
|
| 566 | def testOverrideEtag(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 567 | # Test that we can forcibly ignore ETags
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 568 | uri = urllib.parse.urljoin(base, "reflector/reflector.cgi")
|
| 569 | (response, content) = self.http.request(uri, "GET")
|
| 570 | self.assertNotEqual(response['etag'], "")
|
| 571 |
|
| 572 | (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0'})
|
| 573 | d = self.reflector(content)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 574 | self.assertTrue('HTTP_IF_NONE_MATCH' in d)
|
| 575 | self.assertNotEqual(d['HTTP_IF_NONE_MATCH'], "fred")
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 576 |
|
| 577 | (response, content) = self.http.request(uri, "GET", headers = {'cache-control': 'max-age=0', 'if-none-match': 'fred'})
|
| 578 | d = self.reflector(content)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 579 | self.assertTrue('HTTP_IF_NONE_MATCH' in d)
|
| 580 | self.assertEqual(d['HTTP_IF_NONE_MATCH'], "fred")
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 581 |
|
| 582 | #MAP-commented this out because it consistently fails
|
| 583 | # def testGet304EndToEnd(self):
|
| 584 | # # Test that end to end headers get overwritten in the cache
|
| 585 | # uri = urllib.parse.urljoin(base, "304/end2end.cgi")
|
| 586 | # (response, content) = self.http.request(uri, "GET")
|
| 587 | # self.assertNotEqual(response['etag'], "")
|
| 588 | # old_date = response['date']
|
| 589 | # time.sleep(2)
|
| 590 | #
|
| 591 | # (response, content) = self.http.request(uri, "GET", headers = {'Cache-Control': 'max-age=0'})
|
| 592 | # # The response should be from the cache, but the Date: header should be updated.
|
| 593 | # new_date = response['date']
|
| 594 | # self.assertNotEqual(new_date, old_date)
|
| 595 | # self.assertEqual(response.status, 200)
|
| 596 | # self.assertEqual(response.fromcache, True)
|
| 597 |
|
| 598 | def testGet304LastModified(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 599 | # Test that we can still handle a 304
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 600 | # by only using the last-modified cache validator.
|
| 601 | uri = urllib.parse.urljoin(base, "304/last-modified-only/last-modified-only.txt")
|
| 602 | (response, content) = self.http.request(uri, "GET")
|
| 603 |
|
| 604 | self.assertNotEqual(response['last-modified'], "")
|
| 605 | (response, content) = self.http.request(uri, "GET")
|
| 606 | (response, content) = self.http.request(uri, "GET")
|
| 607 | self.assertEqual(response.status, 200)
|
| 608 | self.assertEqual(response.fromcache, True)
|
| 609 |
|
| 610 | def testGet307(self):
|
| 611 | # Test that we do follow 307 redirects but
|
| 612 | # do not cache the 307
|
| 613 | uri = urllib.parse.urljoin(base, "307/onestep.asis")
|
| 614 | (response, content) = self.http.request(uri, "GET")
|
| 615 | self.assertEqual(response.status, 200)
|
| 616 | self.assertEqual(content, b"This is the final destination.\n")
|
| 617 | self.assertEqual(response.previous.status, 307)
|
| 618 | self.assertEqual(response.previous.fromcache, False)
|
| 619 |
|
| 620 | (response, content) = self.http.request(uri, "GET")
|
| 621 | self.assertEqual(response.status, 200)
|
| 622 | self.assertEqual(response.fromcache, True)
|
| 623 | self.assertEqual(content, b"This is the final destination.\n")
|
| 624 | self.assertEqual(response.previous.status, 307)
|
| 625 | self.assertEqual(response.previous.fromcache, False)
|
| 626 |
|
| 627 | def testGet410(self):
|
| 628 | # Test that we pass 410's through
|
| 629 | uri = urllib.parse.urljoin(base, "410/410.asis")
|
| 630 | (response, content) = self.http.request(uri, "GET")
|
| 631 | self.assertEqual(response.status, 410)
|
| 632 |
|
chris dent | 89f1514 | 2009-12-24 14:02:57 -0600 | [diff] [blame] | 633 | def testVaryHeaderSimple(self):
|
| 634 | """
|
| 635 | RFC 2616 13.6
|
| 636 | When the cache receives a subsequent request whose Request-URI
|
| 637 | specifies one or more cache entries including a Vary header field,
|
| 638 | the cache MUST NOT use such a cache entry to construct a response
|
| 639 | to the new request unless all of the selecting request-headers
|
| 640 | present in the new request match the corresponding stored
|
| 641 | request-headers in the original request.
|
| 642 | """
|
| 643 | # test that the vary header is sent
|
| 644 | uri = urllib.parse.urljoin(base, "vary/accept.asis")
|
| 645 | (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
|
| 646 | self.assertEqual(response.status, 200)
|
| 647 | self.assertTrue('vary' in response)
|
| 648 |
|
| 649 | # get the resource again, from the cache since accept header in this
|
| 650 | # request is the same as the request
|
| 651 | (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
|
| 652 | self.assertEqual(response.status, 200)
|
| 653 | self.assertEqual(response.fromcache, True, msg="Should be from cache")
|
| 654 |
|
| 655 | # get the resource again, not from cache since Accept headers does not match
|
| 656 | (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'})
|
| 657 | self.assertEqual(response.status, 200)
|
| 658 | self.assertEqual(response.fromcache, False, msg="Should not be from cache")
|
| 659 |
|
| 660 | # get the resource again, without any Accept header, so again no match
|
| 661 | (response, content) = self.http.request(uri, "GET")
|
| 662 | self.assertEqual(response.status, 200)
|
| 663 | self.assertEqual(response.fromcache, False, msg="Should not be from cache")
|
| 664 |
|
| 665 | def testNoVary(self):
|
Joe Gregorio | 46546a6 | 2012-10-03 14:31:10 -0400 | [diff] [blame] | 666 | pass
|
chris dent | 89f1514 | 2009-12-24 14:02:57 -0600 | [diff] [blame] | 667 | # when there is no vary, a different Accept header (e.g.) should not
|
| 668 | # impact if the cache is used
|
| 669 | # test that the vary header is not sent
|
Joe Gregorio | 46546a6 | 2012-10-03 14:31:10 -0400 | [diff] [blame] | 670 | # uri = urllib.parse.urljoin(base, "vary/no-vary.asis")
|
| 671 | # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
|
| 672 | # self.assertEqual(response.status, 200)
|
| 673 | # self.assertFalse('vary' in response)
|
| 674 | #
|
| 675 | # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
|
| 676 | # self.assertEqual(response.status, 200)
|
| 677 | # self.assertEqual(response.fromcache, True, msg="Should be from cache")
|
| 678 | #
|
| 679 | # (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/html'})
|
| 680 | # self.assertEqual(response.status, 200)
|
| 681 | # self.assertEqual(response.fromcache, True, msg="Should be from cache")
|
chris dent | 89f1514 | 2009-12-24 14:02:57 -0600 | [diff] [blame] | 682 |
|
| 683 | def testVaryHeaderDouble(self):
|
| 684 | uri = urllib.parse.urljoin(base, "vary/accept-double.asis")
|
| 685 | (response, content) = self.http.request(uri, "GET", headers={
|
| 686 | 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'})
|
| 687 | self.assertEqual(response.status, 200)
|
| 688 | self.assertTrue('vary' in response)
|
| 689 |
|
| 690 | # we are from cache
|
| 691 | (response, content) = self.http.request(uri, "GET", headers={
|
| 692 | 'Accept': 'text/plain', 'Accept-Language': 'da, en-gb;q=0.8, en;q=0.7'})
|
| 693 | self.assertEqual(response.fromcache, True, msg="Should be from cache")
|
| 694 |
|
| 695 | (response, content) = self.http.request(uri, "GET", headers={'Accept': 'text/plain'})
|
| 696 | self.assertEqual(response.status, 200)
|
| 697 | self.assertEqual(response.fromcache, False)
|
| 698 |
|
| 699 | # get the resource again, not from cache, varied headers don't match exact
|
| 700 | (response, content) = self.http.request(uri, "GET", headers={'Accept-Language': 'da'})
|
| 701 | self.assertEqual(response.status, 200)
|
| 702 | self.assertEqual(response.fromcache, False, msg="Should not be from cache")
|
| 703 |
|
jcgregorio | 88ef89b | 2010-05-13 23:42:11 -0400 | [diff] [blame] | 704 | def testVaryUnusedHeader(self):
|
| 705 | # A header's value is not considered to vary if it's not used at all.
|
| 706 | uri = urllib.parse.urljoin(base, "vary/unused-header.asis")
|
| 707 | (response, content) = self.http.request(uri, "GET", headers={
|
| 708 | 'Accept': 'text/plain'})
|
| 709 | self.assertEqual(response.status, 200)
|
| 710 | self.assertTrue('vary' in response)
|
| 711 |
|
| 712 | # we are from cache
|
| 713 | (response, content) = self.http.request(uri, "GET", headers={
|
| 714 | 'Accept': 'text/plain',})
|
| 715 | self.assertEqual(response.fromcache, True, msg="Should be from cache")
|
| 716 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 717 | def testHeadGZip(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 718 | # Test that we don't try to decompress a HEAD response
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 719 | uri = urllib.parse.urljoin(base, "gzip/final-destination.txt")
|
| 720 | (response, content) = self.http.request(uri, "HEAD")
|
| 721 | self.assertEqual(response.status, 200)
|
| 722 | self.assertNotEqual(int(response['content-length']), 0)
|
| 723 | self.assertEqual(content, b"")
|
| 724 |
|
| 725 | def testGetGZip(self):
|
| 726 | # Test that we support gzip compression
|
| 727 | uri = urllib.parse.urljoin(base, "gzip/final-destination.txt")
|
| 728 | (response, content) = self.http.request(uri, "GET")
|
| 729 | self.assertEqual(response.status, 200)
|
| 730 | self.assertFalse('content-encoding' in response)
|
| 731 | self.assertTrue('-content-encoding' in response)
|
| 732 | self.assertEqual(int(response['content-length']), len(b"This is the final destination.\n"))
|
| 733 | self.assertEqual(content, b"This is the final destination.\n")
|
| 734 |
|
Joe Gregorio | d1137c5 | 2011-02-13 19:27:35 -0500 | [diff] [blame] | 735 | def testPostAndGZipResponse(self):
|
| 736 | uri = urllib.parse.urljoin(base, "gzip/post.cgi")
|
| 737 | (response, content) = self.http.request(uri, "POST", body=" ")
|
| 738 | self.assertEqual(response.status, 200)
|
| 739 | self.assertFalse('content-encoding' in response)
|
| 740 | self.assertTrue('-content-encoding' in response)
|
| 741 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 742 | def testGetGZipFailure(self):
|
| 743 | # Test that we raise a good exception when the gzip fails
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 744 | self.http.force_exception_to_status_code = False
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 745 | uri = urllib.parse.urljoin(base, "gzip/failed-compression.asis")
|
| 746 | try:
|
| 747 | (response, content) = self.http.request(uri, "GET")
|
| 748 | self.fail("Should never reach here")
|
| 749 | except httplib2.FailedToDecompressContent:
|
| 750 | pass
|
| 751 | except Exception:
|
| 752 | self.fail("Threw wrong kind of exception")
|
| 753 |
|
| 754 | # Re-run the test with out the exceptions
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 755 | self.http.force_exception_to_status_code = True
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 756 |
|
| 757 | (response, content) = self.http.request(uri, "GET")
|
| 758 | self.assertEqual(response.status, 500)
|
| 759 | self.assertTrue(response.reason.startswith("Content purported"))
|
| 760 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 761 | def testIndividualTimeout(self):
|
| 762 | uri = urllib.parse.urljoin(base, "timeout/timeout.cgi")
|
| 763 | http = httplib2.Http(timeout=1)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 764 | http.force_exception_to_status_code = True
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 765 |
|
| 766 | (response, content) = http.request(uri)
|
| 767 | self.assertEqual(response.status, 408)
|
| 768 | self.assertTrue(response.reason.startswith("Request Timeout"))
|
| 769 | self.assertTrue(content.startswith(b"Request Timeout"))
|
| 770 |
|
| 771 |
|
| 772 | def testGetDeflate(self):
|
| 773 | # Test that we support deflate compression
|
| 774 | uri = urllib.parse.urljoin(base, "deflate/deflated.asis")
|
| 775 | (response, content) = self.http.request(uri, "GET")
|
| 776 | self.assertEqual(response.status, 200)
|
| 777 | self.assertFalse('content-encoding' in response)
|
| 778 | self.assertEqual(int(response['content-length']), len("This is the final destination."))
|
| 779 | self.assertEqual(content, b"This is the final destination.")
|
| 780 |
|
| 781 | def testGetDeflateFailure(self):
|
| 782 | # Test that we raise a good exception when the deflate fails
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 783 | self.http.force_exception_to_status_code = False
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 784 |
|
| 785 | uri = urllib.parse.urljoin(base, "deflate/failed-compression.asis")
|
| 786 | try:
|
| 787 | (response, content) = self.http.request(uri, "GET")
|
| 788 | self.fail("Should never reach here")
|
| 789 | except httplib2.FailedToDecompressContent:
|
| 790 | pass
|
| 791 | except Exception:
|
| 792 | self.fail("Threw wrong kind of exception")
|
| 793 |
|
| 794 | # Re-run the test with out the exceptions
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 795 | self.http.force_exception_to_status_code = True
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 796 |
|
| 797 | (response, content) = self.http.request(uri, "GET")
|
| 798 | self.assertEqual(response.status, 500)
|
| 799 | self.assertTrue(response.reason.startswith("Content purported"))
|
| 800 |
|
| 801 | def testGetDuplicateHeaders(self):
|
| 802 | # Test that duplicate headers get concatenated via ','
|
| 803 | uri = urllib.parse.urljoin(base, "duplicate-headers/multilink.asis")
|
| 804 | (response, content) = self.http.request(uri, "GET")
|
| 805 | self.assertEqual(response.status, 200)
|
| 806 | self.assertEqual(content, b"This is content\n")
|
| 807 | self.assertEqual(response['link'].split(",")[0], '<http://bitworking.org>; rel="home"; title="BitWorking"')
|
| 808 |
|
| 809 | def testGetCacheControlNoCache(self):
|
| 810 | # Test Cache-Control: no-cache on requests
|
| 811 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
| 812 | (response, content) = self.http.request(uri, "GET")
|
| 813 | self.assertNotEqual(response['etag'], "")
|
| 814 | (response, content) = self.http.request(uri, "GET")
|
| 815 | self.assertEqual(response.status, 200)
|
| 816 | self.assertEqual(response.fromcache, True)
|
| 817 |
|
| 818 | (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-cache'})
|
| 819 | self.assertEqual(response.status, 200)
|
| 820 | self.assertEqual(response.fromcache, False)
|
| 821 |
|
| 822 | def testGetCacheControlPragmaNoCache(self):
|
| 823 | # Test Pragma: no-cache on requests
|
| 824 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
| 825 | (response, content) = self.http.request(uri, "GET")
|
| 826 | self.assertNotEqual(response['etag'], "")
|
| 827 | (response, content) = self.http.request(uri, "GET")
|
| 828 | self.assertEqual(response.status, 200)
|
| 829 | self.assertEqual(response.fromcache, True)
|
| 830 |
|
| 831 | (response, content) = self.http.request(uri, "GET", headers={'Pragma': 'no-cache'})
|
| 832 | self.assertEqual(response.status, 200)
|
| 833 | self.assertEqual(response.fromcache, False)
|
| 834 |
|
| 835 | def testGetCacheControlNoStoreRequest(self):
|
| 836 | # A no-store request means that the response should not be stored.
|
| 837 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
| 838 |
|
| 839 | (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'})
|
| 840 | self.assertEqual(response.status, 200)
|
| 841 | self.assertEqual(response.fromcache, False)
|
| 842 |
|
| 843 | (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store'})
|
| 844 | self.assertEqual(response.status, 200)
|
| 845 | self.assertEqual(response.fromcache, False)
|
| 846 |
|
| 847 | def testGetCacheControlNoStoreResponse(self):
|
| 848 | # A no-store response means that the response should not be stored.
|
| 849 | uri = urllib.parse.urljoin(base, "no-store/no-store.asis")
|
| 850 |
|
| 851 | (response, content) = self.http.request(uri, "GET")
|
| 852 | self.assertEqual(response.status, 200)
|
| 853 | self.assertEqual(response.fromcache, False)
|
| 854 |
|
| 855 | (response, content) = self.http.request(uri, "GET")
|
| 856 | self.assertEqual(response.status, 200)
|
| 857 | self.assertEqual(response.fromcache, False)
|
| 858 |
|
| 859 | def testGetCacheControlNoCacheNoStoreRequest(self):
|
| 860 | # Test that a no-store, no-cache clears the entry from the cache
|
| 861 | # even if it was cached previously.
|
| 862 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
| 863 |
|
| 864 | (response, content) = self.http.request(uri, "GET")
|
| 865 | (response, content) = self.http.request(uri, "GET")
|
| 866 | self.assertEqual(response.fromcache, True)
|
| 867 | (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'})
|
| 868 | (response, content) = self.http.request(uri, "GET", headers={'Cache-Control': 'no-store, no-cache'})
|
| 869 | self.assertEqual(response.status, 200)
|
| 870 | self.assertEqual(response.fromcache, False)
|
| 871 |
|
| 872 | def testUpdateInvalidatesCache(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 873 | # Test that calling PUT or DELETE on a
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 874 | # URI that is cache invalidates that cache.
|
| 875 | uri = urllib.parse.urljoin(base, "304/test_etag.txt")
|
| 876 |
|
| 877 | (response, content) = self.http.request(uri, "GET")
|
| 878 | (response, content) = self.http.request(uri, "GET")
|
| 879 | self.assertEqual(response.fromcache, True)
|
| 880 | (response, content) = self.http.request(uri, "DELETE")
|
| 881 | self.assertEqual(response.status, 405)
|
| 882 |
|
| 883 | (response, content) = self.http.request(uri, "GET")
|
| 884 | self.assertEqual(response.fromcache, False)
|
| 885 |
|
| 886 | def testUpdateUsesCachedETag(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 887 | # Test that we natively support http://www.w3.org/1999/04/Editing/
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 888 | uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
|
| 889 |
|
| 890 | (response, content) = self.http.request(uri, "GET")
|
| 891 | self.assertEqual(response.status, 200)
|
| 892 | self.assertEqual(response.fromcache, False)
|
| 893 | (response, content) = self.http.request(uri, "GET")
|
| 894 | self.assertEqual(response.status, 200)
|
| 895 | self.assertEqual(response.fromcache, True)
|
Joe Gregorio | 799b207 | 2009-09-29 17:21:19 -0400 | [diff] [blame] | 896 | (response, content) = self.http.request(uri, "PUT", body="foo")
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 897 | self.assertEqual(response.status, 200)
|
Joe Gregorio | 799b207 | 2009-09-29 17:21:19 -0400 | [diff] [blame] | 898 | (response, content) = self.http.request(uri, "PUT", body="foo")
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 899 | self.assertEqual(response.status, 412)
|
| 900 |
|
Joe Gregorio | bd68208 | 2011-05-24 14:06:09 -0400 | [diff] [blame] | 901 |
|
| 902 | def testUpdatePatchUsesCachedETag(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 903 | # Test that we natively support http://www.w3.org/1999/04/Editing/
|
Joe Gregorio | bd68208 | 2011-05-24 14:06:09 -0400 | [diff] [blame] | 904 | uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
|
| 905 |
|
| 906 | (response, content) = self.http.request(uri, "GET")
|
| 907 | self.assertEqual(response.status, 200)
|
| 908 | self.assertEqual(response.fromcache, False)
|
| 909 | (response, content) = self.http.request(uri, "GET")
|
| 910 | self.assertEqual(response.status, 200)
|
| 911 | self.assertEqual(response.fromcache, True)
|
| 912 | (response, content) = self.http.request(uri, "PATCH", body="foo")
|
| 913 | self.assertEqual(response.status, 200)
|
| 914 | (response, content) = self.http.request(uri, "PATCH", body="foo")
|
| 915 | self.assertEqual(response.status, 412)
|
| 916 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 917 | def testUpdateUsesCachedETagAndOCMethod(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 918 | # Test that we natively support http://www.w3.org/1999/04/Editing/
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 919 | uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
|
| 920 |
|
| 921 | (response, content) = self.http.request(uri, "GET")
|
| 922 | self.assertEqual(response.status, 200)
|
| 923 | self.assertEqual(response.fromcache, False)
|
| 924 | (response, content) = self.http.request(uri, "GET")
|
| 925 | self.assertEqual(response.status, 200)
|
| 926 | self.assertEqual(response.fromcache, True)
|
| 927 | self.http.optimistic_concurrency_methods.append("DELETE")
|
| 928 | (response, content) = self.http.request(uri, "DELETE")
|
| 929 | self.assertEqual(response.status, 200)
|
| 930 |
|
| 931 |
|
| 932 | def testUpdateUsesCachedETagOverridden(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 933 | # Test that we natively support http://www.w3.org/1999/04/Editing/
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 934 | uri = urllib.parse.urljoin(base, "conditional-updates/test.cgi")
|
| 935 |
|
| 936 | (response, content) = self.http.request(uri, "GET")
|
| 937 | self.assertEqual(response.status, 200)
|
| 938 | self.assertEqual(response.fromcache, False)
|
| 939 | (response, content) = self.http.request(uri, "GET")
|
| 940 | self.assertEqual(response.status, 200)
|
| 941 | self.assertEqual(response.fromcache, True)
|
Joe Gregorio | 799b207 | 2009-09-29 17:21:19 -0400 | [diff] [blame] | 942 | (response, content) = self.http.request(uri, "PUT", body="foo", headers={'if-match': 'fred'})
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 943 | self.assertEqual(response.status, 412)
|
| 944 |
|
| 945 | def testBasicAuth(self):
|
| 946 | # Test Basic Authentication
|
| 947 | uri = urllib.parse.urljoin(base, "basic/file.txt")
|
| 948 | (response, content) = self.http.request(uri, "GET")
|
| 949 | self.assertEqual(response.status, 401)
|
| 950 |
|
| 951 | uri = urllib.parse.urljoin(base, "basic/")
|
| 952 | (response, content) = self.http.request(uri, "GET")
|
| 953 | self.assertEqual(response.status, 401)
|
| 954 |
|
| 955 | self.http.add_credentials('joe', 'password')
|
| 956 | (response, content) = self.http.request(uri, "GET")
|
| 957 | self.assertEqual(response.status, 200)
|
| 958 |
|
| 959 | uri = urllib.parse.urljoin(base, "basic/file.txt")
|
| 960 | (response, content) = self.http.request(uri, "GET")
|
| 961 | self.assertEqual(response.status, 200)
|
| 962 |
|
| 963 | def testBasicAuthWithDomain(self):
|
| 964 | # Test Basic Authentication
|
| 965 | uri = urllib.parse.urljoin(base, "basic/file.txt")
|
| 966 | (response, content) = self.http.request(uri, "GET")
|
| 967 | self.assertEqual(response.status, 401)
|
| 968 |
|
| 969 | uri = urllib.parse.urljoin(base, "basic/")
|
| 970 | (response, content) = self.http.request(uri, "GET")
|
| 971 | self.assertEqual(response.status, 401)
|
| 972 |
|
| 973 | self.http.add_credentials('joe', 'password', "example.org")
|
| 974 | (response, content) = self.http.request(uri, "GET")
|
| 975 | self.assertEqual(response.status, 401)
|
| 976 |
|
| 977 | uri = urllib.parse.urljoin(base, "basic/file.txt")
|
| 978 | (response, content) = self.http.request(uri, "GET")
|
| 979 | self.assertEqual(response.status, 401)
|
| 980 |
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 981 | domain = urllib.parse.urlparse(base)[1]
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 982 | self.http.add_credentials('joe', 'password', domain)
|
| 983 | (response, content) = self.http.request(uri, "GET")
|
| 984 | self.assertEqual(response.status, 200)
|
| 985 |
|
| 986 | uri = urllib.parse.urljoin(base, "basic/file.txt")
|
| 987 | (response, content) = self.http.request(uri, "GET")
|
| 988 | self.assertEqual(response.status, 200)
|
| 989 |
|
| 990 |
|
| 991 |
|
| 992 |
|
| 993 |
|
| 994 |
|
| 995 | def testBasicAuthTwoDifferentCredentials(self):
|
| 996 | # Test Basic Authentication with multiple sets of credentials
|
| 997 | uri = urllib.parse.urljoin(base, "basic2/file.txt")
|
| 998 | (response, content) = self.http.request(uri, "GET")
|
| 999 | self.assertEqual(response.status, 401)
|
| 1000 |
|
| 1001 | uri = urllib.parse.urljoin(base, "basic2/")
|
| 1002 | (response, content) = self.http.request(uri, "GET")
|
| 1003 | self.assertEqual(response.status, 401)
|
| 1004 |
|
| 1005 | self.http.add_credentials('fred', 'barney')
|
| 1006 | (response, content) = self.http.request(uri, "GET")
|
| 1007 | self.assertEqual(response.status, 200)
|
| 1008 |
|
| 1009 | uri = urllib.parse.urljoin(base, "basic2/file.txt")
|
| 1010 | (response, content) = self.http.request(uri, "GET")
|
| 1011 | self.assertEqual(response.status, 200)
|
| 1012 |
|
| 1013 | def testBasicAuthNested(self):
|
| 1014 | # Test Basic Authentication with resources
|
| 1015 | # that are nested
|
| 1016 | uri = urllib.parse.urljoin(base, "basic-nested/")
|
| 1017 | (response, content) = self.http.request(uri, "GET")
|
| 1018 | self.assertEqual(response.status, 401)
|
| 1019 |
|
| 1020 | uri = urllib.parse.urljoin(base, "basic-nested/subdir")
|
| 1021 | (response, content) = self.http.request(uri, "GET")
|
| 1022 | self.assertEqual(response.status, 401)
|
| 1023 |
|
| 1024 | # Now add in credentials one at a time and test.
|
| 1025 | self.http.add_credentials('joe', 'password')
|
| 1026 |
|
| 1027 | uri = urllib.parse.urljoin(base, "basic-nested/")
|
| 1028 | (response, content) = self.http.request(uri, "GET")
|
| 1029 | self.assertEqual(response.status, 200)
|
| 1030 |
|
| 1031 | uri = urllib.parse.urljoin(base, "basic-nested/subdir")
|
| 1032 | (response, content) = self.http.request(uri, "GET")
|
| 1033 | self.assertEqual(response.status, 401)
|
| 1034 |
|
| 1035 | self.http.add_credentials('fred', 'barney')
|
| 1036 |
|
| 1037 | uri = urllib.parse.urljoin(base, "basic-nested/")
|
| 1038 | (response, content) = self.http.request(uri, "GET")
|
| 1039 | self.assertEqual(response.status, 200)
|
| 1040 |
|
| 1041 | uri = urllib.parse.urljoin(base, "basic-nested/subdir")
|
| 1042 | (response, content) = self.http.request(uri, "GET")
|
| 1043 | self.assertEqual(response.status, 200)
|
| 1044 |
|
| 1045 | def testDigestAuth(self):
|
| 1046 | # Test that we support Digest Authentication
|
| 1047 | uri = urllib.parse.urljoin(base, "digest/")
|
| 1048 | (response, content) = self.http.request(uri, "GET")
|
| 1049 | self.assertEqual(response.status, 401)
|
| 1050 |
|
| 1051 | self.http.add_credentials('joe', 'password')
|
| 1052 | (response, content) = self.http.request(uri, "GET")
|
| 1053 | self.assertEqual(response.status, 200)
|
| 1054 |
|
| 1055 | uri = urllib.parse.urljoin(base, "digest/file.txt")
|
| 1056 | (response, content) = self.http.request(uri, "GET")
|
| 1057 |
|
| 1058 | def testDigestAuthNextNonceAndNC(self):
|
| 1059 | # Test that if the server sets nextnonce that we reset
|
| 1060 | # the nonce count back to 1
|
| 1061 | uri = urllib.parse.urljoin(base, "digest/file.txt")
|
| 1062 | self.http.add_credentials('joe', 'password')
|
| 1063 | (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
|
| 1064 | info = httplib2._parse_www_authenticate(response, 'authentication-info')
|
| 1065 | self.assertEqual(response.status, 200)
|
| 1066 | (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
|
| 1067 | info2 = httplib2._parse_www_authenticate(response, 'authentication-info')
|
| 1068 | self.assertEqual(response.status, 200)
|
| 1069 |
|
| 1070 | if 'nextnonce' in info:
|
| 1071 | self.assertEqual(info2['nc'], 1)
|
| 1072 |
|
| 1073 | def testDigestAuthStale(self):
|
| 1074 | # Test that we can handle a nonce becoming stale
|
| 1075 | uri = urllib.parse.urljoin(base, "digest-expire/file.txt")
|
| 1076 | self.http.add_credentials('joe', 'password')
|
| 1077 | (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
|
| 1078 | info = httplib2._parse_www_authenticate(response, 'authentication-info')
|
| 1079 | self.assertEqual(response.status, 200)
|
| 1080 |
|
| 1081 | time.sleep(3)
|
| 1082 | # Sleep long enough that the nonce becomes stale
|
| 1083 |
|
| 1084 | (response, content) = self.http.request(uri, "GET", headers = {"cache-control":"no-cache"})
|
| 1085 | self.assertFalse(response.fromcache)
|
| 1086 | self.assertTrue(response._stale_digest)
|
| 1087 | info3 = httplib2._parse_www_authenticate(response, 'authentication-info')
|
| 1088 | self.assertEqual(response.status, 200)
|
| 1089 |
|
| 1090 | def reflector(self, content):
|
| 1091 | return dict( [tuple(x.split("=", 1)) for x in content.decode('utf-8').strip().split("\n")] )
|
| 1092 |
|
| 1093 | def testReflector(self):
|
| 1094 | uri = urllib.parse.urljoin(base, "reflector/reflector.cgi")
|
| 1095 | (response, content) = self.http.request(uri, "GET")
|
| 1096 | d = self.reflector(content)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1097 | self.assertTrue('HTTP_USER_AGENT' in d)
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1098 |
|
Joe Gregorio | 84cc10a | 2009-09-01 13:02:49 -0400 | [diff] [blame] | 1099 |
|
| 1100 | def testConnectionClose(self):
|
| 1101 | uri = "http://www.google.com/"
|
| 1102 | (response, content) = self.http.request(uri, "GET")
|
| 1103 | for c in self.http.connections.values():
|
| 1104 | self.assertNotEqual(None, c.sock)
|
| 1105 | (response, content) = self.http.request(uri, "GET", headers={"connection": "close"})
|
| 1106 | for c in self.http.connections.values():
|
| 1107 | self.assertEqual(None, c.sock)
|
| 1108 |
|
Joe Gregorio | 46546a6 | 2012-10-03 14:31:10 -0400 | [diff] [blame] | 1109 | def testPickleHttp(self):
|
| 1110 | pickled_http = pickle.dumps(self.http)
|
| 1111 | new_http = pickle.loads(pickled_http)
|
| 1112 |
|
| 1113 | self.assertEqual(sorted(new_http.__dict__.keys()),
|
| 1114 | sorted(self.http.__dict__.keys()))
|
| 1115 | for key in new_http.__dict__:
|
| 1116 | if key in ('certificates', 'credentials'):
|
| 1117 | self.assertEqual(new_http.__dict__[key].credentials,
|
| 1118 | self.http.__dict__[key].credentials)
|
| 1119 | elif key == 'cache':
|
| 1120 | self.assertEqual(new_http.__dict__[key].cache,
|
| 1121 | self.http.__dict__[key].cache)
|
| 1122 | else:
|
| 1123 | self.assertEqual(new_http.__dict__[key],
|
| 1124 | self.http.__dict__[key])
|
| 1125 |
|
| 1126 | def testPickleHttpWithConnection(self):
|
| 1127 | self.http.request('http://bitworking.org',
|
| 1128 | connection_type=_MyHTTPConnection)
|
| 1129 | pickled_http = pickle.dumps(self.http)
|
| 1130 | new_http = pickle.loads(pickled_http)
|
| 1131 |
|
| 1132 | self.assertEqual(list(self.http.connections.keys()),
|
| 1133 | ['http:bitworking.org'])
|
| 1134 | self.assertEqual(new_http.connections, {})
|
| 1135 |
|
| 1136 | def testPickleCustomRequestHttp(self):
|
| 1137 | def dummy_request(*args, **kwargs):
|
| 1138 | return new_request(*args, **kwargs)
|
| 1139 | dummy_request.dummy_attr = 'dummy_value'
|
| 1140 |
|
| 1141 | self.http.request = dummy_request
|
| 1142 | pickled_http = pickle.dumps(self.http)
|
| 1143 | self.assertFalse(b"S'request'" in pickled_http)
|
| 1144 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1145 | try:
|
| 1146 | import memcache
|
| 1147 | class HttpTestMemCached(HttpTest):
|
| 1148 | def setUp(self):
|
| 1149 | self.cache = memcache.Client(['127.0.0.1:11211'], debug=0)
|
| 1150 | #self.cache = memcache.Client(['10.0.0.4:11211'], debug=1)
|
| 1151 | self.http = httplib2.Http(self.cache)
|
| 1152 | self.cache.flush_all()
|
| 1153 | # Not exactly sure why the sleep is needed here, but
|
| 1154 | # if not present then some unit tests that rely on caching
|
| 1155 | # fail. Memcached seems to lose some sets immediately
|
| 1156 | # after a flush_all if the set is to a value that
|
| 1157 | # was previously cached. (Maybe the flush is handled async?)
|
| 1158 | time.sleep(1)
|
| 1159 | self.http.clear_credentials()
|
| 1160 | except:
|
| 1161 | pass
|
| 1162 |
|
| 1163 |
|
| 1164 |
|
| 1165 | # ------------------------------------------------------------------------
|
| 1166 |
|
| 1167 | class HttpPrivateTest(unittest.TestCase):
|
| 1168 |
|
| 1169 | def testParseCacheControl(self):
|
| 1170 | # Test that we can parse the Cache-Control header
|
| 1171 | self.assertEqual({}, httplib2._parse_cache_control({}))
|
| 1172 | self.assertEqual({'no-cache': 1}, httplib2._parse_cache_control({'cache-control': ' no-cache'}))
|
| 1173 | cc = httplib2._parse_cache_control({'cache-control': ' no-cache, max-age = 7200'})
|
| 1174 | self.assertEqual(cc['no-cache'], 1)
|
| 1175 | self.assertEqual(cc['max-age'], '7200')
|
| 1176 | cc = httplib2._parse_cache_control({'cache-control': ' , '})
|
| 1177 | self.assertEqual(cc[''], 1)
|
| 1178 |
|
Joe Gregorio | e314e8b | 2009-07-16 20:11:28 -0400 | [diff] [blame] | 1179 | try:
|
| 1180 | cc = httplib2._parse_cache_control({'cache-control': 'Max-age=3600;post-check=1800,pre-check=3600'})
|
| 1181 | self.assertTrue("max-age" in cc)
|
| 1182 | except:
|
| 1183 | self.fail("Should not throw exception")
|
| 1184 |
|
| 1185 |
|
| 1186 |
|
| 1187 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1188 | def testNormalizeHeaders(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1189 | # Test that we normalize headers to lowercase
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1190 | h = httplib2._normalize_headers({'Cache-Control': 'no-cache', 'Other': 'Stuff'})
|
| 1191 | self.assertTrue('cache-control' in h)
|
| 1192 | self.assertTrue('other' in h)
|
| 1193 | self.assertEqual('Stuff', h['other'])
|
| 1194 |
|
| 1195 | def testExpirationModelTransparent(self):
|
| 1196 | # Test that no-cache makes our request TRANSPARENT
|
| 1197 | response_headers = {
|
| 1198 | 'cache-control': 'max-age=7200'
|
| 1199 | }
|
| 1200 | request_headers = {
|
| 1201 | 'cache-control': 'no-cache'
|
| 1202 | }
|
| 1203 | self.assertEqual("TRANSPARENT", httplib2._entry_disposition(response_headers, request_headers))
|
| 1204 |
|
| 1205 | def testMaxAgeNonNumeric(self):
|
| 1206 | # Test that no-cache makes our request TRANSPARENT
|
| 1207 | response_headers = {
|
| 1208 | 'cache-control': 'max-age=fred, min-fresh=barney'
|
| 1209 | }
|
| 1210 | request_headers = {
|
| 1211 | }
|
| 1212 | self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
|
| 1213 |
|
| 1214 |
|
| 1215 | def testExpirationModelNoCacheResponse(self):
|
| 1216 | # The date and expires point to an entry that should be
|
| 1217 | # FRESH, but the no-cache over-rides that.
|
| 1218 | now = time.time()
|
| 1219 | response_headers = {
|
| 1220 | 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
|
| 1221 | 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)),
|
| 1222 | 'cache-control': 'no-cache'
|
| 1223 | }
|
| 1224 | request_headers = {
|
| 1225 | }
|
| 1226 | self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
|
| 1227 |
|
| 1228 | def testExpirationModelStaleRequestMustReval(self):
|
| 1229 | # must-revalidate forces STALE
|
| 1230 | self.assertEqual("STALE", httplib2._entry_disposition({}, {'cache-control': 'must-revalidate'}))
|
| 1231 |
|
| 1232 | def testExpirationModelStaleResponseMustReval(self):
|
| 1233 | # must-revalidate forces STALE
|
| 1234 | self.assertEqual("STALE", httplib2._entry_disposition({'cache-control': 'must-revalidate'}, {}))
|
| 1235 |
|
| 1236 | def testExpirationModelFresh(self):
|
| 1237 | response_headers = {
|
| 1238 | 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
|
| 1239 | 'cache-control': 'max-age=2'
|
| 1240 | }
|
| 1241 | request_headers = {
|
| 1242 | }
|
| 1243 | self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
|
| 1244 | time.sleep(3)
|
| 1245 | self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
|
| 1246 |
|
| 1247 | def testExpirationMaxAge0(self):
|
| 1248 | response_headers = {
|
| 1249 | 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime()),
|
| 1250 | 'cache-control': 'max-age=0'
|
| 1251 | }
|
| 1252 | request_headers = {
|
| 1253 | }
|
| 1254 | self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
|
| 1255 |
|
| 1256 | def testExpirationModelDateAndExpires(self):
|
| 1257 | now = time.time()
|
| 1258 | response_headers = {
|
| 1259 | 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
|
| 1260 | 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)),
|
| 1261 | }
|
| 1262 | request_headers = {
|
| 1263 | }
|
| 1264 | self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
|
| 1265 | time.sleep(3)
|
| 1266 | self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
|
| 1267 |
|
| 1268 | def testExpiresZero(self):
|
| 1269 | now = time.time()
|
| 1270 | response_headers = {
|
| 1271 | 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
|
| 1272 | 'expires': "0",
|
| 1273 | }
|
| 1274 | request_headers = {
|
| 1275 | }
|
| 1276 | self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
|
| 1277 |
|
| 1278 | def testExpirationModelDateOnly(self):
|
| 1279 | now = time.time()
|
| 1280 | response_headers = {
|
| 1281 | 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+3)),
|
| 1282 | }
|
| 1283 | request_headers = {
|
| 1284 | }
|
| 1285 | self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
|
| 1286 |
|
| 1287 | def testExpirationModelOnlyIfCached(self):
|
| 1288 | response_headers = {
|
| 1289 | }
|
| 1290 | request_headers = {
|
| 1291 | 'cache-control': 'only-if-cached',
|
| 1292 | }
|
| 1293 | self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
|
| 1294 |
|
| 1295 | def testExpirationModelMaxAgeBoth(self):
|
| 1296 | now = time.time()
|
| 1297 | response_headers = {
|
| 1298 | 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
|
| 1299 | 'cache-control': 'max-age=2'
|
| 1300 | }
|
| 1301 | request_headers = {
|
| 1302 | 'cache-control': 'max-age=0'
|
| 1303 | }
|
| 1304 | self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
|
| 1305 |
|
| 1306 | def testExpirationModelDateAndExpiresMinFresh1(self):
|
| 1307 | now = time.time()
|
| 1308 | response_headers = {
|
| 1309 | 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
|
| 1310 | 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+2)),
|
| 1311 | }
|
| 1312 | request_headers = {
|
| 1313 | 'cache-control': 'min-fresh=2'
|
| 1314 | }
|
| 1315 | self.assertEqual("STALE", httplib2._entry_disposition(response_headers, request_headers))
|
| 1316 |
|
| 1317 | def testExpirationModelDateAndExpiresMinFresh2(self):
|
| 1318 | now = time.time()
|
| 1319 | response_headers = {
|
| 1320 | 'date': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now)),
|
| 1321 | 'expires': time.strftime("%a, %d %b %Y %H:%M:%S GMT", time.gmtime(now+4)),
|
| 1322 | }
|
| 1323 | request_headers = {
|
| 1324 | 'cache-control': 'min-fresh=2'
|
| 1325 | }
|
| 1326 | self.assertEqual("FRESH", httplib2._entry_disposition(response_headers, request_headers))
|
| 1327 |
|
| 1328 | def testParseWWWAuthenticateEmpty(self):
|
| 1329 | res = httplib2._parse_www_authenticate({})
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1330 | self.assertEqual(len(list(res.keys())), 0)
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1331 |
|
| 1332 | def testParseWWWAuthenticate(self):
|
| 1333 | # different uses of spaces around commas
|
| 1334 | res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="test realm" , foo=foo ,bar="bar", baz=baz,qux=qux'})
|
| 1335 | self.assertEqual(len(list(res.keys())), 1)
|
| 1336 | self.assertEqual(len(list(res['test'].keys())), 5)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1337 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1338 | # tokens with non-alphanum
|
| 1339 | res = httplib2._parse_www_authenticate({ 'www-authenticate': 'T*!%#st realm=to*!%#en, to*!%#en="quoted string"'})
|
| 1340 | self.assertEqual(len(list(res.keys())), 1)
|
| 1341 | self.assertEqual(len(list(res['t*!%#st'].keys())), 2)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1342 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1343 | # quoted string with quoted pairs
|
| 1344 | res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Test realm="a \\"test\\" realm"'})
|
| 1345 | self.assertEqual(len(list(res.keys())), 1)
|
| 1346 | self.assertEqual(res['test']['realm'], 'a "test" realm')
|
| 1347 |
|
| 1348 | def testParseWWWAuthenticateStrict(self):
|
| 1349 | httplib2.USE_WWW_AUTH_STRICT_PARSING = 1;
|
| 1350 | self.testParseWWWAuthenticate();
|
| 1351 | httplib2.USE_WWW_AUTH_STRICT_PARSING = 0;
|
| 1352 |
|
| 1353 | def testParseWWWAuthenticateBasic(self):
|
| 1354 | res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me"'})
|
| 1355 | basic = res['basic']
|
| 1356 | self.assertEqual('me', basic['realm'])
|
| 1357 |
|
| 1358 | res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm="MD5"'})
|
| 1359 | basic = res['basic']
|
| 1360 | self.assertEqual('me', basic['realm'])
|
| 1361 | self.assertEqual('MD5', basic['algorithm'])
|
| 1362 |
|
| 1363 | res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me", algorithm=MD5'})
|
| 1364 | basic = res['basic']
|
| 1365 | self.assertEqual('me', basic['realm'])
|
| 1366 | self.assertEqual('MD5', basic['algorithm'])
|
| 1367 |
|
| 1368 | def testParseWWWAuthenticateBasic2(self):
|
| 1369 | res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic realm="me",other="fred" '})
|
| 1370 | basic = res['basic']
|
| 1371 | self.assertEqual('me', basic['realm'])
|
| 1372 | self.assertEqual('fred', basic['other'])
|
| 1373 |
|
| 1374 | def testParseWWWAuthenticateBasic3(self):
|
| 1375 | res = httplib2._parse_www_authenticate({ 'www-authenticate': 'Basic REAlm="me" '})
|
| 1376 | basic = res['basic']
|
| 1377 | self.assertEqual('me', basic['realm'])
|
| 1378 |
|
| 1379 |
|
| 1380 | def testParseWWWAuthenticateDigest(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1381 | res = httplib2._parse_www_authenticate({ 'www-authenticate':
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1382 | 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41"'})
|
| 1383 | digest = res['digest']
|
| 1384 | self.assertEqual('testrealm@host.com', digest['realm'])
|
| 1385 | self.assertEqual('auth,auth-int', digest['qop'])
|
| 1386 |
|
| 1387 |
|
| 1388 | def testParseWWWAuthenticateMultiple(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1389 | res = httplib2._parse_www_authenticate({ 'www-authenticate':
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1390 | 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41" Basic REAlm="me" '})
|
| 1391 | digest = res['digest']
|
| 1392 | self.assertEqual('testrealm@host.com', digest['realm'])
|
| 1393 | self.assertEqual('auth,auth-int', digest['qop'])
|
| 1394 | self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
|
| 1395 | self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
|
| 1396 | basic = res['basic']
|
| 1397 | self.assertEqual('me', basic['realm'])
|
| 1398 |
|
| 1399 | def testParseWWWAuthenticateMultiple2(self):
|
| 1400 | # Handle an added comma between challenges, which might get thrown in if the challenges were
|
| 1401 | # originally sent in separate www-authenticate headers.
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1402 | res = httplib2._parse_www_authenticate({ 'www-authenticate':
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1403 | 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me" '})
|
| 1404 | digest = res['digest']
|
| 1405 | self.assertEqual('testrealm@host.com', digest['realm'])
|
| 1406 | self.assertEqual('auth,auth-int', digest['qop'])
|
| 1407 | self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
|
| 1408 | self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
|
| 1409 | basic = res['basic']
|
| 1410 | self.assertEqual('me', basic['realm'])
|
| 1411 |
|
| 1412 | def testParseWWWAuthenticateMultiple3(self):
|
| 1413 | # Handle an added comma between challenges, which might get thrown in if the challenges were
|
| 1414 | # originally sent in separate www-authenticate headers.
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1415 | res = httplib2._parse_www_authenticate({ 'www-authenticate':
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1416 | 'Digest realm="testrealm@host.com", qop="auth,auth-int", nonce="dcd98b7102dd2f0e8b11d0f600bfb0c093", opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'})
|
| 1417 | digest = res['digest']
|
| 1418 | self.assertEqual('testrealm@host.com', digest['realm'])
|
| 1419 | self.assertEqual('auth,auth-int', digest['qop'])
|
| 1420 | self.assertEqual('dcd98b7102dd2f0e8b11d0f600bfb0c093', digest['nonce'])
|
| 1421 | self.assertEqual('5ccc069c403ebaf9f0171e9517f40e41', digest['opaque'])
|
| 1422 | basic = res['basic']
|
| 1423 | self.assertEqual('me', basic['realm'])
|
| 1424 | wsse = res['wsse']
|
| 1425 | self.assertEqual('foo', wsse['realm'])
|
| 1426 | self.assertEqual('UsernameToken', wsse['profile'])
|
| 1427 |
|
| 1428 | def testParseWWWAuthenticateMultiple4(self):
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1429 | res = httplib2._parse_www_authenticate({ 'www-authenticate':
|
| 1430 | 'Digest realm="test-real.m@host.com", qop \t=\t"\tauth,auth-int", nonce="(*)&^&$%#",opaque="5ccc069c403ebaf9f0171e9517f40e41", Basic REAlm="me", WSSE realm="foo", profile="UsernameToken"'})
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1431 | digest = res['digest']
|
| 1432 | self.assertEqual('test-real.m@host.com', digest['realm'])
|
| 1433 | self.assertEqual('\tauth,auth-int', digest['qop'])
|
| 1434 | self.assertEqual('(*)&^&$%#', digest['nonce'])
|
| 1435 |
|
| 1436 | def testParseWWWAuthenticateMoreQuoteCombos(self):
|
| 1437 | res = httplib2._parse_www_authenticate({'www-authenticate':'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'})
|
| 1438 | digest = res['digest']
|
| 1439 | self.assertEqual('myrealm', digest['realm'])
|
| 1440 |
|
Joe Gregorio | 6fa3cf2 | 2011-02-13 22:45:06 -0500 | [diff] [blame] | 1441 | def testParseWWWAuthenticateMalformed(self):
|
| 1442 | try:
|
| 1443 | res = httplib2._parse_www_authenticate({'www-authenticate':'OAuth "Facebook Platform" "invalid_token" "Invalid OAuth access token."'})
|
| 1444 | self.fail("should raise an exception")
|
| 1445 | except httplib2.MalformedHeader:
|
| 1446 | pass
|
| 1447 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1448 | def testDigestObject(self):
|
| 1449 | credentials = ('joe', 'password')
|
| 1450 | host = None
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1451 | request_uri = '/projects/httplib2/test/digest/'
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1452 | headers = {}
|
| 1453 | response = {
|
| 1454 | 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth"'
|
| 1455 | }
|
| 1456 | content = b""
|
Joe Gregorio | 875a8b5 | 2011-06-13 14:06:23 -0400 | [diff] [blame] | 1457 |
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1458 | d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1459 | d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46")
|
Joe Gregorio | 875a8b5 | 2011-06-13 14:06:23 -0400 | [diff] [blame] | 1460 | our_request = "authorization: %s" % headers['authorization']
|
| 1461 | working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46"'
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1462 | self.assertEqual(our_request, working_request)
|
| 1463 |
|
Joe Gregorio | 03d9910 | 2011-06-22 16:55:52 -0400 | [diff] [blame] | 1464 | def testDigestObjectWithOpaque(self):
|
| 1465 | credentials = ('joe', 'password')
|
| 1466 | host = None
|
| 1467 | request_uri = '/projects/httplib2/test/digest/'
|
| 1468 | headers = {}
|
| 1469 | response = {
|
| 1470 | 'www-authenticate': 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", opaque="atestopaque"'
|
| 1471 | }
|
| 1472 | content = ""
|
| 1473 |
|
| 1474 | d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
|
| 1475 | d.request("GET", request_uri, headers, content, cnonce="33033375ec278a46")
|
| 1476 | our_request = "authorization: %s" % headers['authorization']
|
| 1477 | working_request = 'authorization: Digest username="joe", realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", uri="/projects/httplib2/test/digest/", algorithm=MD5, response="97ed129401f7cdc60e5db58a80f3ea8b", qop=auth, nc=00000001, cnonce="33033375ec278a46", opaque="atestopaque"'
|
| 1478 | self.assertEqual(our_request, working_request)
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1479 |
|
| 1480 | def testDigestObjectStale(self):
|
| 1481 | credentials = ('joe', 'password')
|
| 1482 | host = None
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1483 | request_uri = '/projects/httplib2/test/digest/'
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1484 | headers = {}
|
| 1485 | response = httplib2.Response({ })
|
| 1486 | response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
|
| 1487 | response.status = 401
|
| 1488 | content = b""
|
| 1489 | d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
|
| 1490 | # Returns true to force a retry
|
| 1491 | self.assertTrue( d.response(response, content) )
|
| 1492 |
|
| 1493 | def testDigestObjectAuthInfo(self):
|
| 1494 | credentials = ('joe', 'password')
|
| 1495 | host = None
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1496 | request_uri = '/projects/httplib2/test/digest/'
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1497 | headers = {}
|
| 1498 | response = httplib2.Response({ })
|
| 1499 | response['www-authenticate'] = 'Digest realm="myrealm", nonce="Ygk86AsKBAA=3516200d37f9a3230352fde99977bd6d472d4306", algorithm=MD5, qop="auth", stale=true'
|
| 1500 | response['authentication-info'] = 'nextnonce="fred"'
|
| 1501 | content = b""
|
| 1502 | d = httplib2.DigestAuthentication(credentials, host, request_uri, headers, response, content, None)
|
| 1503 | # Returns true to force a retry
|
| 1504 | self.assertFalse( d.response(response, content) )
|
| 1505 | self.assertEqual('fred', d.challenge['nonce'])
|
| 1506 | self.assertEqual(1, d.challenge['nc'])
|
| 1507 |
|
| 1508 | def testWsseAlgorithm(self):
|
| 1509 | digest = httplib2._wsse_username_token("d36e316282959a9ed4c89851497a717f", "2003-12-15T14:43:07Z", "taadtaadpstcsm")
|
| 1510 | expected = b"quR/EWLAV4xLf9Zqyw4pDmfV9OY="
|
| 1511 | self.assertEqual(expected, digest)
|
| 1512 |
|
| 1513 | def testEnd2End(self):
|
| 1514 | # one end to end header
|
| 1515 | response = {'content-type': 'application/atom+xml', 'te': 'deflate'}
|
| 1516 | end2end = httplib2._get_end2end_headers(response)
|
| 1517 | self.assertTrue('content-type' in end2end)
|
| 1518 | self.assertTrue('te' not in end2end)
|
| 1519 | self.assertTrue('connection' not in end2end)
|
| 1520 |
|
| 1521 | # one end to end header that gets eliminated
|
| 1522 | response = {'connection': 'content-type', 'content-type': 'application/atom+xml', 'te': 'deflate'}
|
| 1523 | end2end = httplib2._get_end2end_headers(response)
|
| 1524 | self.assertTrue('content-type' not in end2end)
|
| 1525 | self.assertTrue('te' not in end2end)
|
| 1526 | self.assertTrue('connection' not in end2end)
|
| 1527 |
|
| 1528 | # Degenerate case of no headers
|
| 1529 | response = {}
|
| 1530 | end2end = httplib2._get_end2end_headers(response)
|
Joe Gregorio | b53de9b | 2011-06-07 15:44:51 -0400 | [diff] [blame] | 1531 | self.assertEqual(0, len(end2end))
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1532 |
|
Joe Gregorio | ffc3d54 | 2013-02-19 15:57:37 -0500 | [diff] [blame] | 1533 | # Degenerate case of connection referrring to a header not passed in
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1534 | response = {'connection': 'content-type'}
|
| 1535 | end2end = httplib2._get_end2end_headers(response)
|
Joe Gregorio | b53de9b | 2011-06-07 15:44:51 -0400 | [diff] [blame] | 1536 | self.assertEqual(0, len(end2end))
|
pilgrim | 00a352e | 2009-05-29 04:04:44 +0000 | [diff] [blame] | 1537 |
|
Joe Gregorio | 74b1d4a | 2012-10-25 14:05:49 -0400 | [diff] [blame] | 1538 |
|
| 1539 | class TestProxyInfo(unittest.TestCase):
|
| 1540 | def setUp(self):
|
| 1541 | self.orig_env = dict(os.environ)
|
| 1542 |
|
| 1543 | def tearDown(self):
|
| 1544 | os.environ.clear()
|
| 1545 | os.environ.update(self.orig_env)
|
| 1546 |
|
| 1547 | def test_from_url(self):
|
| 1548 | pi = httplib2.proxy_info_from_url('http://myproxy.example.com')
|
| 1549 | self.assertEqual(pi.proxy_host, 'myproxy.example.com')
|
| 1550 | self.assertEqual(pi.proxy_port, 80)
|
| 1551 | self.assertEqual(pi.proxy_user, None)
|
| 1552 |
|
| 1553 | def test_from_url_ident(self):
|
| 1554 | pi = httplib2.proxy_info_from_url('http://zoidberg:fish@someproxy:99')
|
| 1555 | self.assertEqual(pi.proxy_host, 'someproxy')
|
| 1556 | self.assertEqual(pi.proxy_port, 99)
|
| 1557 | self.assertEqual(pi.proxy_user, 'zoidberg')
|
| 1558 | self.assertEqual(pi.proxy_pass, 'fish')
|
| 1559 |
|
| 1560 | def test_from_env(self):
|
| 1561 | os.environ['http_proxy'] = 'http://myproxy.example.com:8080'
|
| 1562 | pi = httplib2.proxy_info_from_environment()
|
| 1563 | self.assertEqual(pi.proxy_host, 'myproxy.example.com')
|
| 1564 | self.assertEqual(pi.proxy_port, 8080)
|
| 1565 |
|
| 1566 | def test_from_env_no_proxy(self):
|
| 1567 | os.environ['http_proxy'] = 'http://myproxy.example.com:80'
|
| 1568 | os.environ['https_proxy'] = 'http://myproxy.example.com:81'
|
| 1569 | pi = httplib2.proxy_info_from_environment('https')
|
| 1570 | self.assertEqual(pi.proxy_host, 'myproxy.example.com')
|
| 1571 | self.assertEqual(pi.proxy_port, 81)
|
| 1572 |
|
| 1573 | def test_from_env_none(self):
|
| 1574 | os.environ.clear()
|
| 1575 | pi = httplib2.proxy_info_from_environment()
|
| 1576 | self.assertEqual(pi, None)
|
| 1577 |
|
| 1578 |
|
| 1579 | if __name__ == '__main__':
|
| 1580 | unittest.main()
|