blob: 4a571e8b9f61a235c1c80df415d347495e811791 [file] [log] [blame]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001"""An extensible library for opening URLs using a variety of protocols
2
3The simplest way to use this module is to call the urlopen function,
4which accepts a string containing a URL or a Request object (described
5below). It opens the URL and returns the results as file-like
6object; the returned object has some extra methods described below.
7
8The OpenerDirector manages a collection of Handler objects that do
9all the actual work. Each Handler implements a particular protocol or
10option. The OpenerDirector is a composite object that invokes the
11Handlers needed to open the requested URL. For example, the
12HTTPHandler performs HTTP GET and POST requests and deals with
13non-error returns. The HTTPRedirectHandler automatically deals with
14HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
15deals with digest authentication.
16
17urlopen(url, data=None) -- Basic usage is the same as original
18urllib. pass the url and optionally data to post to an HTTP URL, and
19get a file-like object back. One difference is that you can also pass
20a Request instance instead of URL. Raises a URLError (subclass of
21IOError); for HTTP errors, raises an HTTPError, which can also be
22treated as a valid response.
23
24build_opener -- Function that creates a new OpenerDirector instance.
25Will install the default handlers. Accepts one or more Handlers as
26arguments, either instances or Handler classes that it will
27instantiate. If one of the argument is a subclass of the default
28handler, the argument will be installed instead of the default.
29
30install_opener -- Installs a new opener as the default opener.
31
32objects of interest:
Senthil Kumaran1107c5d2009-11-15 06:20:55 +000033
Senthil Kumaran47fff872009-12-20 07:10:31 +000034OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
35the Handler classes, while dealing with requests and responses.
Jeremy Hylton1afc1692008-06-18 20:49:58 +000036
37Request -- An object that encapsulates the state of a request. The
38state can be as simple as the URL. It can also include extra HTTP
39headers, e.g. a User-Agent.
40
41BaseHandler --
42
43internals:
44BaseHandler and parent
45_call_chain conventions
46
47Example usage:
48
Georg Brandl029986a2008-06-23 11:44:14 +000049import urllib.request
Jeremy Hylton1afc1692008-06-18 20:49:58 +000050
51# set up authentication info
Georg Brandl029986a2008-06-23 11:44:14 +000052authinfo = urllib.request.HTTPBasicAuthHandler()
Jeremy Hylton1afc1692008-06-18 20:49:58 +000053authinfo.add_password(realm='PDQ Application',
54 uri='https://mahler:8092/site-updates.py',
55 user='klem',
56 passwd='geheim$parole')
57
Georg Brandl029986a2008-06-23 11:44:14 +000058proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"})
Jeremy Hylton1afc1692008-06-18 20:49:58 +000059
60# build a new opener that adds authentication and caching FTP handlers
Georg Brandl029986a2008-06-23 11:44:14 +000061opener = urllib.request.build_opener(proxy_support, authinfo,
62 urllib.request.CacheFTPHandler)
Jeremy Hylton1afc1692008-06-18 20:49:58 +000063
64# install it
Georg Brandl029986a2008-06-23 11:44:14 +000065urllib.request.install_opener(opener)
Jeremy Hylton1afc1692008-06-18 20:49:58 +000066
Georg Brandl029986a2008-06-23 11:44:14 +000067f = urllib.request.urlopen('http://www.python.org/')
Jeremy Hylton1afc1692008-06-18 20:49:58 +000068"""
69
70# XXX issues:
71# If an authentication error handler that tries to perform
72# authentication for some reason but fails, how should the error be
73# signalled? The client needs to know the HTTP error code. But if
74# the handler knows that the problem was, e.g., that it didn't know
75# that hash algo that requested in the challenge, it would be good to
76# pass that information along to the client, too.
77# ftp errors aren't handled cleanly
78# check digest against correct (i.e. non-apache) implementation
79
80# Possible extensions:
81# complex proxies XXX not sure what exactly was meant by this
82# abstract factory for opener
83
84import base64
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +000085import bisect
Jeremy Hylton1afc1692008-06-18 20:49:58 +000086import email
87import hashlib
88import http.client
89import io
90import os
91import posixpath
92import random
93import re
94import socket
95import sys
96import time
Senthil Kumaran7bc0d872010-12-19 10:49:52 +000097import collections
Jeremy Hylton1afc1692008-06-18 20:49:58 +000098
Georg Brandl13e89462008-07-01 19:56:00 +000099from urllib.error import URLError, HTTPError, ContentTooShortError
100from urllib.parse import (
101 urlparse, urlsplit, urljoin, unwrap, quote, unquote,
102 splittype, splithost, splitport, splituser, splitpasswd,
Senthil Kumarand95cc752010-08-08 11:27:53 +0000103 splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse)
Georg Brandl13e89462008-07-01 19:56:00 +0000104from urllib.response import addinfourl, addclosehook
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000105
106# check for SSL
107try:
108 import ssl
Senthil Kumaranc2958622010-11-22 04:48:26 +0000109except ImportError:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000110 _have_ssl = False
111else:
112 _have_ssl = True
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000113
114# used in User-Agent header sent
115__version__ = sys.version[:3]
116
117_opener = None
Antoine Pitrou803e6d62010-10-13 10:36:15 +0000118def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
119 *, cafile=None, capath=None):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000120 global _opener
Antoine Pitrou803e6d62010-10-13 10:36:15 +0000121 if cafile or capath:
122 if not _have_ssl:
123 raise ValueError('SSL support not available')
124 context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
125 context.options |= ssl.OP_NO_SSLv2
126 if cafile or capath:
127 context.verify_mode = ssl.CERT_REQUIRED
128 context.load_verify_locations(cafile, capath)
129 check_hostname = True
130 else:
131 check_hostname = False
132 https_handler = HTTPSHandler(context=context, check_hostname=check_hostname)
133 opener = build_opener(https_handler)
134 elif _opener is None:
135 _opener = opener = build_opener()
136 else:
137 opener = _opener
138 return opener.open(url, data, timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000139
140def install_opener(opener):
141 global _opener
142 _opener = opener
143
144# TODO(jhylton): Make this work with the same global opener.
145_urlopener = None
146def urlretrieve(url, filename=None, reporthook=None, data=None):
147 global _urlopener
148 if not _urlopener:
149 _urlopener = FancyURLopener()
150 return _urlopener.retrieve(url, filename, reporthook, data)
151
152def urlcleanup():
153 if _urlopener:
154 _urlopener.cleanup()
155 global _opener
156 if _opener:
157 _opener = None
158
159# copied from cookielib.py
Antoine Pitroufd036452008-08-19 17:56:33 +0000160_cut_port_re = re.compile(r":\d+$", re.ASCII)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000161def request_host(request):
162 """Return request-host, as defined by RFC 2965.
163
164 Variation from RFC: returned value is lowercased, for convenient
165 comparison.
166
167 """
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000168 url = request.full_url
Georg Brandl13e89462008-07-01 19:56:00 +0000169 host = urlparse(url)[1]
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000170 if host == "":
171 host = request.get_header("Host", "")
172
173 # remove port, if present
174 host = _cut_port_re.sub("", host, 1)
175 return host.lower()
176
177class Request:
178
179 def __init__(self, url, data=None, headers={},
180 origin_req_host=None, unverifiable=False):
181 # unwrap('<URL:type://host/path>') --> 'type://host/path'
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000182 self.full_url = unwrap(url)
Senthil Kumaran26430412011-04-13 07:01:19 +0800183 self.full_url, self.fragment = splittag(self.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000184 self.data = data
185 self.headers = {}
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +0000186 self._tunnel_host = None
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000187 for key, value in headers.items():
188 self.add_header(key, value)
189 self.unredirected_hdrs = {}
190 if origin_req_host is None:
191 origin_req_host = request_host(self)
192 self.origin_req_host = origin_req_host
193 self.unverifiable = unverifiable
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000194 self._parse()
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000195
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000196 def _parse(self):
197 self.type, rest = splittype(self.full_url)
198 if self.type is None:
199 raise ValueError("unknown url type: %s" % self.full_url)
200 self.host, self.selector = splithost(rest)
201 if self.host:
202 self.host = unquote(self.host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000203
204 def get_method(self):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000205 if self.data is not None:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000206 return "POST"
207 else:
208 return "GET"
209
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000210 # Begin deprecated methods
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000211
212 def add_data(self, data):
213 self.data = data
214
215 def has_data(self):
216 return self.data is not None
217
218 def get_data(self):
219 return self.data
220
221 def get_full_url(self):
Senthil Kumaran26430412011-04-13 07:01:19 +0800222 if self.fragment:
223 return '%s#%s' % (self.full_url, self.fragment)
224 else:
225 return self.full_url
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000226
227 def get_type(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000228 return self.type
229
230 def get_host(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000231 return self.host
232
233 def get_selector(self):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000234 return self.selector
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000235
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000236 def is_unverifiable(self):
237 return self.unverifiable
Facundo Batista72dc1ea2008-08-16 14:44:32 +0000238
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000239 def get_origin_req_host(self):
240 return self.origin_req_host
241
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000242 # End deprecated methods
243
244 def set_proxy(self, host, type):
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +0000245 if self.type == 'https' and not self._tunnel_host:
246 self._tunnel_host = self.host
247 else:
248 self.type= type
249 self.selector = self.full_url
250 self.host = host
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000251
252 def has_proxy(self):
253 return self.selector == self.full_url
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000254
255 def add_header(self, key, val):
256 # useful for something like authentication
257 self.headers[key.capitalize()] = val
258
259 def add_unredirected_header(self, key, val):
260 # will not be added to a redirected request
261 self.unredirected_hdrs[key.capitalize()] = val
262
263 def has_header(self, header_name):
264 return (header_name in self.headers or
265 header_name in self.unredirected_hdrs)
266
267 def get_header(self, header_name, default=None):
268 return self.headers.get(
269 header_name,
270 self.unredirected_hdrs.get(header_name, default))
271
272 def header_items(self):
273 hdrs = self.unredirected_hdrs.copy()
274 hdrs.update(self.headers)
275 return list(hdrs.items())
276
277class OpenerDirector:
278 def __init__(self):
279 client_version = "Python-urllib/%s" % __version__
280 self.addheaders = [('User-agent', client_version)]
R. David Murray25b8cca2010-12-23 19:44:49 +0000281 # self.handlers is retained only for backward compatibility
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000282 self.handlers = []
R. David Murray25b8cca2010-12-23 19:44:49 +0000283 # manage the individual handlers
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000284 self.handle_open = {}
285 self.handle_error = {}
286 self.process_response = {}
287 self.process_request = {}
288
289 def add_handler(self, handler):
290 if not hasattr(handler, "add_parent"):
291 raise TypeError("expected BaseHandler instance, got %r" %
292 type(handler))
293
294 added = False
295 for meth in dir(handler):
296 if meth in ["redirect_request", "do_open", "proxy_open"]:
297 # oops, coincidental match
298 continue
299
300 i = meth.find("_")
301 protocol = meth[:i]
302 condition = meth[i+1:]
303
304 if condition.startswith("error"):
305 j = condition.find("_") + i + 1
306 kind = meth[j+1:]
307 try:
308 kind = int(kind)
309 except ValueError:
310 pass
311 lookup = self.handle_error.get(protocol, {})
312 self.handle_error[protocol] = lookup
313 elif condition == "open":
314 kind = protocol
315 lookup = self.handle_open
316 elif condition == "response":
317 kind = protocol
318 lookup = self.process_response
319 elif condition == "request":
320 kind = protocol
321 lookup = self.process_request
322 else:
323 continue
324
325 handlers = lookup.setdefault(kind, [])
326 if handlers:
327 bisect.insort(handlers, handler)
328 else:
329 handlers.append(handler)
330 added = True
331
332 if added:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000333 bisect.insort(self.handlers, handler)
334 handler.add_parent(self)
335
336 def close(self):
337 # Only exists for backwards compatibility.
338 pass
339
340 def _call_chain(self, chain, kind, meth_name, *args):
341 # Handlers raise an exception if no one else should try to handle
342 # the request, or return None if they can't but another handler
343 # could. Otherwise, they return the response.
344 handlers = chain.get(kind, ())
345 for handler in handlers:
346 func = getattr(handler, meth_name)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000347 result = func(*args)
348 if result is not None:
349 return result
350
351 def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
352 # accept a URL or a Request object
353 if isinstance(fullurl, str):
354 req = Request(fullurl, data)
355 else:
356 req = fullurl
357 if data is not None:
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000358 req.data = data
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000359
360 req.timeout = timeout
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000361 protocol = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000362
363 # pre-process request
364 meth_name = protocol+"_request"
365 for processor in self.process_request.get(protocol, []):
366 meth = getattr(processor, meth_name)
367 req = meth(req)
368
369 response = self._open(req, data)
370
371 # post-process response
372 meth_name = protocol+"_response"
373 for processor in self.process_response.get(protocol, []):
374 meth = getattr(processor, meth_name)
375 response = meth(req, response)
376
377 return response
378
379 def _open(self, req, data=None):
380 result = self._call_chain(self.handle_open, 'default',
381 'default_open', req)
382 if result:
383 return result
384
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000385 protocol = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000386 result = self._call_chain(self.handle_open, protocol, protocol +
387 '_open', req)
388 if result:
389 return result
390
391 return self._call_chain(self.handle_open, 'unknown',
392 'unknown_open', req)
393
394 def error(self, proto, *args):
395 if proto in ('http', 'https'):
396 # XXX http[s] protocols are special-cased
397 dict = self.handle_error['http'] # https is not different than http
398 proto = args[2] # YUCK!
399 meth_name = 'http_error_%s' % proto
400 http_err = 1
401 orig_args = args
402 else:
403 dict = self.handle_error
404 meth_name = proto + '_error'
405 http_err = 0
406 args = (dict, proto, meth_name) + args
407 result = self._call_chain(*args)
408 if result:
409 return result
410
411 if http_err:
412 args = (dict, 'default', 'http_error_default') + orig_args
413 return self._call_chain(*args)
414
415# XXX probably also want an abstract factory that knows when it makes
416# sense to skip a superclass in favor of a subclass and when it might
417# make sense to include both
418
419def build_opener(*handlers):
420 """Create an opener object from a list of handlers.
421
422 The opener will use several default handlers, including support
Senthil Kumaran1107c5d2009-11-15 06:20:55 +0000423 for HTTP, FTP and when applicable HTTPS.
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000424
425 If any of the handlers passed as arguments are subclasses of the
426 default handlers, the default handlers will not be used.
427 """
428 def isclass(obj):
429 return isinstance(obj, type) or hasattr(obj, "__bases__")
430
431 opener = OpenerDirector()
432 default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
433 HTTPDefaultErrorHandler, HTTPRedirectHandler,
434 FTPHandler, FileHandler, HTTPErrorProcessor]
435 if hasattr(http.client, "HTTPSConnection"):
436 default_classes.append(HTTPSHandler)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000437 skip = set()
438 for klass in default_classes:
439 for check in handlers:
440 if isclass(check):
441 if issubclass(check, klass):
442 skip.add(klass)
443 elif isinstance(check, klass):
444 skip.add(klass)
445 for klass in skip:
446 default_classes.remove(klass)
447
448 for klass in default_classes:
449 opener.add_handler(klass())
450
451 for h in handlers:
452 if isclass(h):
453 h = h()
454 opener.add_handler(h)
455 return opener
456
457class BaseHandler:
458 handler_order = 500
459
460 def add_parent(self, parent):
461 self.parent = parent
462
463 def close(self):
464 # Only exists for backwards compatibility
465 pass
466
467 def __lt__(self, other):
468 if not hasattr(other, "handler_order"):
469 # Try to preserve the old behavior of having custom classes
470 # inserted after default ones (works only for custom user
471 # classes which are not aware of handler_order).
472 return True
473 return self.handler_order < other.handler_order
474
475
476class HTTPErrorProcessor(BaseHandler):
477 """Process HTTP error responses."""
478 handler_order = 1000 # after all other processing
479
480 def http_response(self, request, response):
481 code, msg, hdrs = response.code, response.msg, response.info()
482
483 # According to RFC 2616, "2xx" code indicates that the client's
484 # request was successfully received, understood, and accepted.
485 if not (200 <= code < 300):
486 response = self.parent.error(
487 'http', request, response, code, msg, hdrs)
488
489 return response
490
491 https_response = http_response
492
493class HTTPDefaultErrorHandler(BaseHandler):
494 def http_error_default(self, req, fp, code, msg, hdrs):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000495 raise HTTPError(req.full_url, code, msg, hdrs, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000496
497class HTTPRedirectHandler(BaseHandler):
498 # maximum number of redirections to any single URL
499 # this is needed because of the state that cookies introduce
500 max_repeats = 4
501 # maximum total number of redirections (regardless of URL) before
502 # assuming we're in a loop
503 max_redirections = 10
504
505 def redirect_request(self, req, fp, code, msg, headers, newurl):
506 """Return a Request or None in response to a redirect.
507
508 This is called by the http_error_30x methods when a
509 redirection response is received. If a redirection should
510 take place, return a new Request to allow http_error_30x to
511 perform the redirect. Otherwise, raise HTTPError if no-one
512 else should try to handle this url. Return None if you can't
513 but another Handler might.
514 """
515 m = req.get_method()
516 if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
517 or code in (301, 302, 303) and m == "POST")):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000518 raise HTTPError(req.full_url, code, msg, headers, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000519
520 # Strictly (according to RFC 2616), 301 or 302 in response to
521 # a POST MUST NOT cause a redirection without confirmation
Georg Brandl029986a2008-06-23 11:44:14 +0000522 # from the user (of urllib.request, in this case). In practice,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000523 # essentially all clients do redirect in this case, so we do
524 # the same.
525 # be conciliant with URIs containing a space
526 newurl = newurl.replace(' ', '%20')
527 CONTENT_HEADERS = ("content-length", "content-type")
528 newheaders = dict((k, v) for k, v in req.headers.items()
529 if k.lower() not in CONTENT_HEADERS)
530 return Request(newurl,
531 headers=newheaders,
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000532 origin_req_host=req.origin_req_host,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000533 unverifiable=True)
534
535 # Implementation note: To avoid the server sending us into an
536 # infinite loop, the request object needs to track what URLs we
537 # have already seen. Do this by adding a handler-specific
538 # attribute to the Request object.
539 def http_error_302(self, req, fp, code, msg, headers):
540 # Some servers (incorrectly) return multiple Location headers
541 # (so probably same goes for URI). Use first header.
542 if "location" in headers:
543 newurl = headers["location"]
544 elif "uri" in headers:
545 newurl = headers["uri"]
546 else:
547 return
Facundo Batistaf24802c2008-08-17 03:36:03 +0000548
549 # fix a possible malformed URL
550 urlparts = urlparse(newurl)
guido@google.coma119df92011-03-29 11:41:02 -0700551
552 # For security reasons we don't allow redirection to anything other
553 # than http, https or ftp.
554
555 if not urlparts.scheme in ('http', 'https', 'ftp'):
556 raise HTTPError(newurl, code,
557 msg +
558 " - Redirection to url '%s' is not allowed" %
559 newurl,
560 headers, fp)
561
Facundo Batistaf24802c2008-08-17 03:36:03 +0000562 if not urlparts.path:
563 urlparts = list(urlparts)
564 urlparts[2] = "/"
565 newurl = urlunparse(urlparts)
566
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000567 newurl = urljoin(req.full_url, newurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000568
569 # XXX Probably want to forget about the state of the current
570 # request, although that might interact poorly with other
571 # handlers that also use handler-specific request attributes
572 new = self.redirect_request(req, fp, code, msg, headers, newurl)
573 if new is None:
574 return
575
576 # loop detection
577 # .redirect_dict has a key url if url was previously visited.
578 if hasattr(req, 'redirect_dict'):
579 visited = new.redirect_dict = req.redirect_dict
580 if (visited.get(newurl, 0) >= self.max_repeats or
581 len(visited) >= self.max_redirections):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000582 raise HTTPError(req.full_url, code,
Georg Brandl13e89462008-07-01 19:56:00 +0000583 self.inf_msg + msg, headers, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000584 else:
585 visited = new.redirect_dict = req.redirect_dict = {}
586 visited[newurl] = visited.get(newurl, 0) + 1
587
588 # Don't close the fp until we are sure that we won't use it
589 # with HTTPError.
590 fp.read()
591 fp.close()
592
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000593 return self.parent.open(new, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000594
595 http_error_301 = http_error_303 = http_error_307 = http_error_302
596
597 inf_msg = "The HTTP server returned a redirect error that would " \
598 "lead to an infinite loop.\n" \
599 "The last 30x error message was:\n"
600
601
602def _parse_proxy(proxy):
603 """Return (scheme, user, password, host/port) given a URL or an authority.
604
605 If a URL is supplied, it must have an authority (host:port) component.
606 According to RFC 3986, having an authority component means the URL must
607 have two slashes after the scheme:
608
609 >>> _parse_proxy('file:/ftp.example.com/')
610 Traceback (most recent call last):
611 ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
612
613 The first three items of the returned tuple may be None.
614
615 Examples of authority parsing:
616
617 >>> _parse_proxy('proxy.example.com')
618 (None, None, None, 'proxy.example.com')
619 >>> _parse_proxy('proxy.example.com:3128')
620 (None, None, None, 'proxy.example.com:3128')
621
622 The authority component may optionally include userinfo (assumed to be
623 username:password):
624
625 >>> _parse_proxy('joe:password@proxy.example.com')
626 (None, 'joe', 'password', 'proxy.example.com')
627 >>> _parse_proxy('joe:password@proxy.example.com:3128')
628 (None, 'joe', 'password', 'proxy.example.com:3128')
629
630 Same examples, but with URLs instead:
631
632 >>> _parse_proxy('http://proxy.example.com/')
633 ('http', None, None, 'proxy.example.com')
634 >>> _parse_proxy('http://proxy.example.com:3128/')
635 ('http', None, None, 'proxy.example.com:3128')
636 >>> _parse_proxy('http://joe:password@proxy.example.com/')
637 ('http', 'joe', 'password', 'proxy.example.com')
638 >>> _parse_proxy('http://joe:password@proxy.example.com:3128')
639 ('http', 'joe', 'password', 'proxy.example.com:3128')
640
641 Everything after the authority is ignored:
642
643 >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
644 ('ftp', 'joe', 'password', 'proxy.example.com')
645
646 Test for no trailing '/' case:
647
648 >>> _parse_proxy('http://joe:password@proxy.example.com')
649 ('http', 'joe', 'password', 'proxy.example.com')
650
651 """
Georg Brandl13e89462008-07-01 19:56:00 +0000652 scheme, r_scheme = splittype(proxy)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000653 if not r_scheme.startswith("/"):
654 # authority
655 scheme = None
656 authority = proxy
657 else:
658 # URL
659 if not r_scheme.startswith("//"):
660 raise ValueError("proxy URL with no authority: %r" % proxy)
661 # We have an authority, so for RFC 3986-compliant URLs (by ss 3.
662 # and 3.3.), path is empty or starts with '/'
663 end = r_scheme.find("/", 2)
664 if end == -1:
665 end = None
666 authority = r_scheme[2:end]
Georg Brandl13e89462008-07-01 19:56:00 +0000667 userinfo, hostport = splituser(authority)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000668 if userinfo is not None:
Georg Brandl13e89462008-07-01 19:56:00 +0000669 user, password = splitpasswd(userinfo)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000670 else:
671 user = password = None
672 return scheme, user, password, hostport
673
674class ProxyHandler(BaseHandler):
675 # Proxies must be in front
676 handler_order = 100
677
678 def __init__(self, proxies=None):
679 if proxies is None:
680 proxies = getproxies()
681 assert hasattr(proxies, 'keys'), "proxies must be a mapping"
682 self.proxies = proxies
683 for type, url in proxies.items():
684 setattr(self, '%s_open' % type,
685 lambda r, proxy=url, type=type, meth=self.proxy_open: \
686 meth(r, proxy, type))
687
688 def proxy_open(self, req, proxy, type):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000689 orig_type = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000690 proxy_type, user, password, hostport = _parse_proxy(proxy)
691 if proxy_type is None:
692 proxy_type = orig_type
Senthil Kumaran7bb04972009-10-11 04:58:55 +0000693
694 if req.host and proxy_bypass(req.host):
695 return None
696
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000697 if user and password:
Georg Brandl13e89462008-07-01 19:56:00 +0000698 user_pass = '%s:%s' % (unquote(user),
699 unquote(password))
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000700 creds = base64.b64encode(user_pass.encode()).decode("ascii")
701 req.add_header('Proxy-authorization', 'Basic ' + creds)
Georg Brandl13e89462008-07-01 19:56:00 +0000702 hostport = unquote(hostport)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000703 req.set_proxy(hostport, proxy_type)
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +0000704 if orig_type == proxy_type or orig_type == 'https':
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000705 # let other handlers take care of it
706 return None
707 else:
708 # need to start over, because the other handlers don't
709 # grok the proxy's URL type
710 # e.g. if we have a constructor arg proxies like so:
711 # {'http': 'ftp://proxy.example.com'}, we may end up turning
712 # a request for http://acme.example.com/a into one for
713 # ftp://proxy.example.com/a
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000714 return self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000715
716class HTTPPasswordMgr:
717
718 def __init__(self):
719 self.passwd = {}
720
721 def add_password(self, realm, uri, user, passwd):
722 # uri could be a single URI or a sequence
723 if isinstance(uri, str):
724 uri = [uri]
725 if not realm in self.passwd:
726 self.passwd[realm] = {}
727 for default_port in True, False:
728 reduced_uri = tuple(
729 [self.reduce_uri(u, default_port) for u in uri])
730 self.passwd[realm][reduced_uri] = (user, passwd)
731
732 def find_user_password(self, realm, authuri):
733 domains = self.passwd.get(realm, {})
734 for default_port in True, False:
735 reduced_authuri = self.reduce_uri(authuri, default_port)
736 for uris, authinfo in domains.items():
737 for uri in uris:
738 if self.is_suburi(uri, reduced_authuri):
739 return authinfo
740 return None, None
741
742 def reduce_uri(self, uri, default_port=True):
743 """Accept authority or URI and extract only the authority and path."""
744 # note HTTP URLs do not have a userinfo component
Georg Brandl13e89462008-07-01 19:56:00 +0000745 parts = urlsplit(uri)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000746 if parts[1]:
747 # URI
748 scheme = parts[0]
749 authority = parts[1]
750 path = parts[2] or '/'
751 else:
752 # host or host:port
753 scheme = None
754 authority = uri
755 path = '/'
Georg Brandl13e89462008-07-01 19:56:00 +0000756 host, port = splitport(authority)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000757 if default_port and port is None and scheme is not None:
758 dport = {"http": 80,
759 "https": 443,
760 }.get(scheme)
761 if dport is not None:
762 authority = "%s:%d" % (host, dport)
763 return authority, path
764
765 def is_suburi(self, base, test):
766 """Check if test is below base in a URI tree
767
768 Both args must be URIs in reduced form.
769 """
770 if base == test:
771 return True
772 if base[0] != test[0]:
773 return False
774 common = posixpath.commonprefix((base[1], test[1]))
775 if len(common) == len(base[1]):
776 return True
777 return False
778
779
780class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
781
782 def find_user_password(self, realm, authuri):
783 user, password = HTTPPasswordMgr.find_user_password(self, realm,
784 authuri)
785 if user is not None:
786 return user, password
787 return HTTPPasswordMgr.find_user_password(self, None, authuri)
788
789
790class AbstractBasicAuthHandler:
791
792 # XXX this allows for multiple auth-schemes, but will stupidly pick
793 # the last one with a realm specified.
794
795 # allow for double- and single-quoted realm values
796 # (single quotes are a violation of the RFC, but appear in the wild)
797 rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
798 'realm=(["\'])(.*?)\\2', re.I)
799
800 # XXX could pre-emptively send auth info already accepted (RFC 2617,
801 # end of section 2, and section 1.2 immediately after "credentials"
802 # production).
803
804 def __init__(self, password_mgr=None):
805 if password_mgr is None:
806 password_mgr = HTTPPasswordMgr()
807 self.passwd = password_mgr
808 self.add_password = self.passwd.add_password
Senthil Kumaranf4998ac2010-06-01 12:53:48 +0000809 self.retried = 0
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000810
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000811 def reset_retry_count(self):
812 self.retried = 0
813
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000814 def http_error_auth_reqed(self, authreq, host, req, headers):
815 # host may be an authority (without userinfo) or a URL with an
816 # authority
817 # XXX could be multiple headers
818 authreq = headers.get(authreq, None)
Senthil Kumaranf4998ac2010-06-01 12:53:48 +0000819
820 if self.retried > 5:
821 # retry sending the username:password 5 times before failing.
822 raise HTTPError(req.get_full_url(), 401, "basic auth failed",
823 headers, None)
824 else:
825 self.retried += 1
826
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000827 if authreq:
Senthil Kumaran4de00a22011-05-11 21:17:57 +0800828 scheme = authreq.split()[0]
829 if not scheme.lower() == 'basic':
830 raise ValueError("AbstractBasicAuthHandler does not"
831 " support the following scheme: '%s'" %
832 scheme)
833 else:
834 mo = AbstractBasicAuthHandler.rx.search(authreq)
835 if mo:
836 scheme, quote, realm = mo.groups()
837 if scheme.lower() == 'basic':
838 response = self.retry_http_basic_auth(host, req, realm)
839 if response and response.code != 401:
840 self.retried = 0
841 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000842
843 def retry_http_basic_auth(self, host, req, realm):
844 user, pw = self.passwd.find_user_password(realm, host)
845 if pw is not None:
846 raw = "%s:%s" % (user, pw)
847 auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii")
848 if req.headers.get(self.auth_header, None) == auth:
849 return None
Senthil Kumaranca2fc9e2010-02-24 16:53:16 +0000850 req.add_unredirected_header(self.auth_header, auth)
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000851 return self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000852 else:
853 return None
854
855
856class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
857
858 auth_header = 'Authorization'
859
860 def http_error_401(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000861 url = req.full_url
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000862 response = self.http_error_auth_reqed('www-authenticate',
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000863 url, req, headers)
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000864 self.reset_retry_count()
865 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000866
867
868class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
869
870 auth_header = 'Proxy-authorization'
871
872 def http_error_407(self, req, fp, code, msg, headers):
873 # http_error_auth_reqed requires that there is no userinfo component in
Georg Brandl029986a2008-06-23 11:44:14 +0000874 # authority. Assume there isn't one, since urllib.request does not (and
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000875 # should not, RFC 3986 s. 3.2.1) support requests for URLs containing
876 # userinfo.
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000877 authority = req.host
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000878 response = self.http_error_auth_reqed('proxy-authenticate',
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000879 authority, req, headers)
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000880 self.reset_retry_count()
881 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000882
883
884def randombytes(n):
885 """Return n random bytes."""
886 return os.urandom(n)
887
888class AbstractDigestAuthHandler:
889 # Digest authentication is specified in RFC 2617.
890
891 # XXX The client does not inspect the Authentication-Info header
892 # in a successful response.
893
894 # XXX It should be possible to test this implementation against
895 # a mock server that just generates a static set of challenges.
896
897 # XXX qop="auth-int" supports is shaky
898
899 def __init__(self, passwd=None):
900 if passwd is None:
901 passwd = HTTPPasswordMgr()
902 self.passwd = passwd
903 self.add_password = self.passwd.add_password
904 self.retried = 0
905 self.nonce_count = 0
Senthil Kumaran4c7eaee2009-11-15 08:43:45 +0000906 self.last_nonce = None
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000907
908 def reset_retry_count(self):
909 self.retried = 0
910
911 def http_error_auth_reqed(self, auth_header, host, req, headers):
912 authreq = headers.get(auth_header, None)
913 if self.retried > 5:
914 # Don't fail endlessly - if we failed once, we'll probably
915 # fail a second time. Hm. Unless the Password Manager is
916 # prompting for the information. Crap. This isn't great
917 # but it's better than the current 'repeat until recursion
918 # depth exceeded' approach <wink>
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000919 raise HTTPError(req.full_url, 401, "digest auth failed",
Georg Brandl13e89462008-07-01 19:56:00 +0000920 headers, None)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000921 else:
922 self.retried += 1
923 if authreq:
924 scheme = authreq.split()[0]
925 if scheme.lower() == 'digest':
926 return self.retry_http_digest_auth(req, authreq)
Senthil Kumaran4de00a22011-05-11 21:17:57 +0800927 elif not scheme.lower() == 'basic':
928 raise ValueError("AbstractDigestAuthHandler does not support"
929 " the following scheme: '%s'" % scheme)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000930
931 def retry_http_digest_auth(self, req, auth):
932 token, challenge = auth.split(' ', 1)
933 chal = parse_keqv_list(filter(None, parse_http_list(challenge)))
934 auth = self.get_authorization(req, chal)
935 if auth:
936 auth_val = 'Digest %s' % auth
937 if req.headers.get(self.auth_header, None) == auth_val:
938 return None
939 req.add_unredirected_header(self.auth_header, auth_val)
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000940 resp = self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000941 return resp
942
943 def get_cnonce(self, nonce):
944 # The cnonce-value is an opaque
945 # quoted string value provided by the client and used by both client
946 # and server to avoid chosen plaintext attacks, to provide mutual
947 # authentication, and to provide some message integrity protection.
948 # This isn't a fabulous effort, but it's probably Good Enough.
949 s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime())
950 b = s.encode("ascii") + randombytes(8)
951 dig = hashlib.sha1(b).hexdigest()
952 return dig[:16]
953
954 def get_authorization(self, req, chal):
955 try:
956 realm = chal['realm']
957 nonce = chal['nonce']
958 qop = chal.get('qop')
959 algorithm = chal.get('algorithm', 'MD5')
960 # mod_digest doesn't send an opaque, even though it isn't
961 # supposed to be optional
962 opaque = chal.get('opaque', None)
963 except KeyError:
964 return None
965
966 H, KD = self.get_algorithm_impls(algorithm)
967 if H is None:
968 return None
969
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000970 user, pw = self.passwd.find_user_password(realm, req.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000971 if user is None:
972 return None
973
974 # XXX not implemented yet
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000975 if req.data is not None:
976 entdig = self.get_entity_digest(req.data, chal)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000977 else:
978 entdig = None
979
980 A1 = "%s:%s:%s" % (user, realm, pw)
981 A2 = "%s:%s" % (req.get_method(),
982 # XXX selector: what about proxies and full urls
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000983 req.selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000984 if qop == 'auth':
Senthil Kumaran4c7eaee2009-11-15 08:43:45 +0000985 if nonce == self.last_nonce:
986 self.nonce_count += 1
987 else:
988 self.nonce_count = 1
989 self.last_nonce = nonce
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000990 ncvalue = '%08x' % self.nonce_count
991 cnonce = self.get_cnonce(nonce)
992 noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
993 respdig = KD(H(A1), noncebit)
994 elif qop is None:
995 respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
996 else:
997 # XXX handle auth-int.
Georg Brandl13e89462008-07-01 19:56:00 +0000998 raise URLError("qop '%s' is not supported." % qop)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000999
1000 # XXX should the partial digests be encoded too?
1001
1002 base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001003 'response="%s"' % (user, realm, nonce, req.selector,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001004 respdig)
1005 if opaque:
1006 base += ', opaque="%s"' % opaque
1007 if entdig:
1008 base += ', digest="%s"' % entdig
1009 base += ', algorithm="%s"' % algorithm
1010 if qop:
1011 base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
1012 return base
1013
1014 def get_algorithm_impls(self, algorithm):
1015 # lambdas assume digest modules are imported at the top level
1016 if algorithm == 'MD5':
1017 H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest()
1018 elif algorithm == 'SHA':
1019 H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest()
1020 # XXX MD5-sess
1021 KD = lambda s, d: H("%s:%s" % (s, d))
1022 return H, KD
1023
1024 def get_entity_digest(self, data, chal):
1025 # XXX not implemented yet
1026 return None
1027
1028
1029class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
1030 """An authentication protocol defined by RFC 2069
1031
1032 Digest authentication improves on basic authentication because it
1033 does not transmit passwords in the clear.
1034 """
1035
1036 auth_header = 'Authorization'
1037 handler_order = 490 # before Basic auth
1038
1039 def http_error_401(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001040 host = urlparse(req.full_url)[1]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001041 retry = self.http_error_auth_reqed('www-authenticate',
1042 host, req, headers)
1043 self.reset_retry_count()
1044 return retry
1045
1046
1047class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
1048
1049 auth_header = 'Proxy-Authorization'
1050 handler_order = 490 # before Basic auth
1051
1052 def http_error_407(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001053 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001054 retry = self.http_error_auth_reqed('proxy-authenticate',
1055 host, req, headers)
1056 self.reset_retry_count()
1057 return retry
1058
1059class AbstractHTTPHandler(BaseHandler):
1060
1061 def __init__(self, debuglevel=0):
1062 self._debuglevel = debuglevel
1063
1064 def set_http_debuglevel(self, level):
1065 self._debuglevel = level
1066
1067 def do_request_(self, request):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001068 host = request.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001069 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001070 raise URLError('no host given')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001071
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001072 if request.data is not None: # POST
1073 data = request.data
Senthil Kumaran29333122011-02-11 11:25:47 +00001074 if isinstance(data, str):
1075 raise TypeError("POST data should be bytes"
1076 " or an iterable of bytes. It cannot be str.")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001077 if not request.has_header('Content-type'):
1078 request.add_unredirected_header(
1079 'Content-type',
1080 'application/x-www-form-urlencoded')
1081 if not request.has_header('Content-length'):
Senthil Kumaran7bc0d872010-12-19 10:49:52 +00001082 try:
1083 mv = memoryview(data)
1084 except TypeError:
Senthil Kumaran7bc0d872010-12-19 10:49:52 +00001085 if isinstance(data, collections.Iterable):
Georg Brandl61536042011-02-03 07:46:41 +00001086 raise ValueError("Content-Length should be specified "
1087 "for iterable data of type %r %r" % (type(data),
Senthil Kumaran7bc0d872010-12-19 10:49:52 +00001088 data))
1089 else:
1090 request.add_unredirected_header(
Senthil Kumaran1e991f22010-12-24 04:03:59 +00001091 'Content-length', '%d' % (len(mv) * mv.itemsize))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001092
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001093 sel_host = host
1094 if request.has_proxy():
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001095 scheme, sel = splittype(request.selector)
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001096 sel_host, sel_path = splithost(sel)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001097 if not request.has_header('Host'):
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001098 request.add_unredirected_header('Host', sel_host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001099 for name, value in self.parent.addheaders:
1100 name = name.capitalize()
1101 if not request.has_header(name):
1102 request.add_unredirected_header(name, value)
1103
1104 return request
1105
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001106 def do_open(self, http_class, req, **http_conn_args):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001107 """Return an HTTPResponse object for the request, using http_class.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001108
1109 http_class must implement the HTTPConnection API from http.client.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001110 """
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001111 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001112 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001113 raise URLError('no host given')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001114
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001115 # will parse host:port
1116 h = http_class(host, timeout=req.timeout, **http_conn_args)
Senthil Kumaran42ef4b12010-09-27 01:26:03 +00001117
1118 headers = dict(req.unredirected_hdrs)
1119 headers.update(dict((k, v) for k, v in req.headers.items()
1120 if k not in headers))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001121
1122 # TODO(jhylton): Should this be redesigned to handle
1123 # persistent connections?
1124
1125 # We want to make an HTTP/1.1 request, but the addinfourl
1126 # class isn't prepared to deal with a persistent connection.
1127 # It will try to read all remaining data from the socket,
1128 # which will block while the server waits for the next request.
1129 # So make sure the connection gets closed after the (only)
1130 # request.
1131 headers["Connection"] = "close"
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001132 headers = dict((name.title(), val) for name, val in headers.items())
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +00001133
1134 if req._tunnel_host:
Senthil Kumaran47fff872009-12-20 07:10:31 +00001135 tunnel_headers = {}
1136 proxy_auth_hdr = "Proxy-Authorization"
1137 if proxy_auth_hdr in headers:
1138 tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
1139 # Proxy-Authorization should not be sent to origin
1140 # server.
1141 del headers[proxy_auth_hdr]
1142 h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +00001143
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001144 try:
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001145 h.request(req.get_method(), req.selector, req.data, headers)
Senthil Kumaran1299a8f2011-07-27 08:05:58 +08001146 except socket.error as err: # timeout error
Senthil Kumaran45686b42011-07-27 09:31:03 +08001147 h.close()
Georg Brandl13e89462008-07-01 19:56:00 +00001148 raise URLError(err)
Senthil Kumaran45686b42011-07-27 09:31:03 +08001149 else:
1150 r = h.getresponse()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001151
Senthil Kumaran26430412011-04-13 07:01:19 +08001152 r.url = req.get_full_url()
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001153 # This line replaces the .msg attribute of the HTTPResponse
1154 # with .headers, because urllib clients expect the response to
1155 # have the reason in .msg. It would be good to mark this
1156 # attribute is deprecated and get then to use info() or
1157 # .headers.
1158 r.msg = r.reason
1159 return r
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001160
1161
1162class HTTPHandler(AbstractHTTPHandler):
1163
1164 def http_open(self, req):
1165 return self.do_open(http.client.HTTPConnection, req)
1166
1167 http_request = AbstractHTTPHandler.do_request_
1168
1169if hasattr(http.client, 'HTTPSConnection'):
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001170 import ssl
1171
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001172 class HTTPSHandler(AbstractHTTPHandler):
1173
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001174 def __init__(self, debuglevel=0, context=None, check_hostname=None):
1175 AbstractHTTPHandler.__init__(self, debuglevel)
1176 self._context = context
1177 self._check_hostname = check_hostname
1178
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001179 def https_open(self, req):
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001180 return self.do_open(http.client.HTTPSConnection, req,
1181 context=self._context, check_hostname=self._check_hostname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001182
1183 https_request = AbstractHTTPHandler.do_request_
1184
1185class HTTPCookieProcessor(BaseHandler):
1186 def __init__(self, cookiejar=None):
1187 import http.cookiejar
1188 if cookiejar is None:
1189 cookiejar = http.cookiejar.CookieJar()
1190 self.cookiejar = cookiejar
1191
1192 def http_request(self, request):
1193 self.cookiejar.add_cookie_header(request)
1194 return request
1195
1196 def http_response(self, request, response):
1197 self.cookiejar.extract_cookies(response, request)
1198 return response
1199
1200 https_request = http_request
1201 https_response = http_response
1202
1203class UnknownHandler(BaseHandler):
1204 def unknown_open(self, req):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001205 type = req.type
Georg Brandl13e89462008-07-01 19:56:00 +00001206 raise URLError('unknown url type: %s' % type)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001207
1208def parse_keqv_list(l):
1209 """Parse list of key=value strings where keys are not duplicated."""
1210 parsed = {}
1211 for elt in l:
1212 k, v = elt.split('=', 1)
1213 if v[0] == '"' and v[-1] == '"':
1214 v = v[1:-1]
1215 parsed[k] = v
1216 return parsed
1217
1218def parse_http_list(s):
1219 """Parse lists as described by RFC 2068 Section 2.
1220
1221 In particular, parse comma-separated lists where the elements of
1222 the list may include quoted-strings. A quoted-string could
1223 contain a comma. A non-quoted string could have quotes in the
1224 middle. Neither commas nor quotes count if they are escaped.
1225 Only double-quotes count, not single-quotes.
1226 """
1227 res = []
1228 part = ''
1229
1230 escape = quote = False
1231 for cur in s:
1232 if escape:
1233 part += cur
1234 escape = False
1235 continue
1236 if quote:
1237 if cur == '\\':
1238 escape = True
1239 continue
1240 elif cur == '"':
1241 quote = False
1242 part += cur
1243 continue
1244
1245 if cur == ',':
1246 res.append(part)
1247 part = ''
1248 continue
1249
1250 if cur == '"':
1251 quote = True
1252
1253 part += cur
1254
1255 # append last part
1256 if part:
1257 res.append(part)
1258
1259 return [part.strip() for part in res]
1260
1261class FileHandler(BaseHandler):
1262 # Use local file or FTP depending on form of URL
1263 def file_open(self, req):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001264 url = req.selector
Senthil Kumaran2ef16322010-07-11 03:12:43 +00001265 if url[:2] == '//' and url[2:3] != '/' and (req.host and
1266 req.host != 'localhost'):
Senthil Kumaran383c32d2010-10-14 11:57:35 +00001267 if not req.host is self.get_names():
1268 raise URLError("file:// scheme is supported only on localhost")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001269 else:
1270 return self.open_local_file(req)
1271
1272 # names for the localhost
1273 names = None
1274 def get_names(self):
1275 if FileHandler.names is None:
1276 try:
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00001277 FileHandler.names = tuple(
1278 socket.gethostbyname_ex('localhost')[2] +
1279 socket.gethostbyname_ex(socket.gethostname())[2])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001280 except socket.gaierror:
1281 FileHandler.names = (socket.gethostbyname('localhost'),)
1282 return FileHandler.names
1283
1284 # not entirely sure what the rules are here
1285 def open_local_file(self, req):
1286 import email.utils
1287 import mimetypes
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001288 host = req.host
Senthil Kumaran06f5a532010-05-08 05:12:05 +00001289 filename = req.selector
1290 localfile = url2pathname(filename)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001291 try:
1292 stats = os.stat(localfile)
1293 size = stats.st_size
1294 modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
Senthil Kumaran06f5a532010-05-08 05:12:05 +00001295 mtype = mimetypes.guess_type(filename)[0]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001296 headers = email.message_from_string(
1297 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
1298 (mtype or 'text/plain', size, modified))
1299 if host:
Georg Brandl13e89462008-07-01 19:56:00 +00001300 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001301 if not host or \
1302 (not port and _safe_gethostbyname(host) in self.get_names()):
Senthil Kumaran06f5a532010-05-08 05:12:05 +00001303 if host:
1304 origurl = 'file://' + host + filename
1305 else:
1306 origurl = 'file://' + filename
1307 return addinfourl(open(localfile, 'rb'), headers, origurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001308 except OSError as msg:
Georg Brandl029986a2008-06-23 11:44:14 +00001309 # users shouldn't expect OSErrors coming from urlopen()
Georg Brandl13e89462008-07-01 19:56:00 +00001310 raise URLError(msg)
1311 raise URLError('file not on local host')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001312
1313def _safe_gethostbyname(host):
1314 try:
1315 return socket.gethostbyname(host)
1316 except socket.gaierror:
1317 return None
1318
1319class FTPHandler(BaseHandler):
1320 def ftp_open(self, req):
1321 import ftplib
1322 import mimetypes
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001323 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001324 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001325 raise URLError('ftp error: no host given')
1326 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001327 if port is None:
1328 port = ftplib.FTP_PORT
1329 else:
1330 port = int(port)
1331
1332 # username/password handling
Georg Brandl13e89462008-07-01 19:56:00 +00001333 user, host = splituser(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001334 if user:
Georg Brandl13e89462008-07-01 19:56:00 +00001335 user, passwd = splitpasswd(user)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001336 else:
1337 passwd = None
Georg Brandl13e89462008-07-01 19:56:00 +00001338 host = unquote(host)
Senthil Kumarandaa29d02010-11-18 15:36:41 +00001339 user = user or ''
1340 passwd = passwd or ''
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001341
1342 try:
1343 host = socket.gethostbyname(host)
1344 except socket.error as msg:
Georg Brandl13e89462008-07-01 19:56:00 +00001345 raise URLError(msg)
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001346 path, attrs = splitattr(req.selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001347 dirs = path.split('/')
Georg Brandl13e89462008-07-01 19:56:00 +00001348 dirs = list(map(unquote, dirs))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001349 dirs, file = dirs[:-1], dirs[-1]
1350 if dirs and not dirs[0]:
1351 dirs = dirs[1:]
1352 try:
1353 fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
1354 type = file and 'I' or 'D'
1355 for attr in attrs:
Georg Brandl13e89462008-07-01 19:56:00 +00001356 attr, value = splitvalue(attr)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001357 if attr.lower() == 'type' and \
1358 value in ('a', 'A', 'i', 'I', 'd', 'D'):
1359 type = value.upper()
1360 fp, retrlen = fw.retrfile(file, type)
1361 headers = ""
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001362 mtype = mimetypes.guess_type(req.full_url)[0]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001363 if mtype:
1364 headers += "Content-type: %s\n" % mtype
1365 if retrlen is not None and retrlen >= 0:
1366 headers += "Content-length: %d\n" % retrlen
1367 headers = email.message_from_string(headers)
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001368 return addinfourl(fp, headers, req.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001369 except ftplib.all_errors as msg:
Georg Brandl13e89462008-07-01 19:56:00 +00001370 exc = URLError('ftp error: %s' % msg)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001371 raise exc.with_traceback(sys.exc_info()[2])
1372
1373 def connect_ftp(self, user, passwd, host, port, dirs, timeout):
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02001374 return ftpwrapper(user, passwd, host, port, dirs, timeout,
1375 persistent=False)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001376
1377class CacheFTPHandler(FTPHandler):
1378 # XXX would be nice to have pluggable cache strategies
1379 # XXX this stuff is definitely not thread safe
1380 def __init__(self):
1381 self.cache = {}
1382 self.timeout = {}
1383 self.soonest = 0
1384 self.delay = 60
1385 self.max_conns = 16
1386
1387 def setTimeout(self, t):
1388 self.delay = t
1389
1390 def setMaxConns(self, m):
1391 self.max_conns = m
1392
1393 def connect_ftp(self, user, passwd, host, port, dirs, timeout):
1394 key = user, host, port, '/'.join(dirs), timeout
1395 if key in self.cache:
1396 self.timeout[key] = time.time() + self.delay
1397 else:
1398 self.cache[key] = ftpwrapper(user, passwd, host, port,
1399 dirs, timeout)
1400 self.timeout[key] = time.time() + self.delay
1401 self.check_cache()
1402 return self.cache[key]
1403
1404 def check_cache(self):
1405 # first check for old ones
1406 t = time.time()
1407 if self.soonest <= t:
1408 for k, v in list(self.timeout.items()):
1409 if v < t:
1410 self.cache[k].close()
1411 del self.cache[k]
1412 del self.timeout[k]
1413 self.soonest = min(list(self.timeout.values()))
1414
1415 # then check the size
1416 if len(self.cache) == self.max_conns:
1417 for k, v in list(self.timeout.items()):
1418 if v == self.soonest:
1419 del self.cache[k]
1420 del self.timeout[k]
1421 break
1422 self.soonest = min(list(self.timeout.values()))
1423
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02001424 def clear_cache(self):
1425 for conn in self.cache.values():
1426 conn.close()
1427 self.cache.clear()
1428 self.timeout.clear()
1429
1430
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001431# Code move from the old urllib module
1432
1433MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
1434
1435# Helper for non-unix systems
Ronald Oussoren94f25282010-05-05 19:11:21 +00001436if os.name == 'nt':
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001437 from nturl2path import url2pathname, pathname2url
1438else:
1439 def url2pathname(pathname):
1440 """OS-specific conversion from a relative URL of the 'file' scheme
1441 to a file system path; not recommended for general use."""
Georg Brandl13e89462008-07-01 19:56:00 +00001442 return unquote(pathname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001443
1444 def pathname2url(pathname):
1445 """OS-specific conversion from a file system path to a relative URL
1446 of the 'file' scheme; not recommended for general use."""
Georg Brandl13e89462008-07-01 19:56:00 +00001447 return quote(pathname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001448
1449# This really consists of two pieces:
1450# (1) a class which handles opening of all sorts of URLs
1451# (plus assorted utilities etc.)
1452# (2) a set of functions for parsing URLs
1453# XXX Should these be separated out into different modules?
1454
1455
1456ftpcache = {}
1457class URLopener:
1458 """Class to open URLs.
1459 This is a class rather than just a subroutine because we may need
1460 more than one set of global protocol-specific options.
1461 Note -- this is a base class for those who don't want the
1462 automatic handling of errors type 302 (relocated) and 401
1463 (authorization needed)."""
1464
1465 __tempfiles = None
1466
1467 version = "Python-urllib/%s" % __version__
1468
1469 # Constructor
1470 def __init__(self, proxies=None, **x509):
1471 if proxies is None:
1472 proxies = getproxies()
1473 assert hasattr(proxies, 'keys'), "proxies must be a mapping"
1474 self.proxies = proxies
1475 self.key_file = x509.get('key_file')
1476 self.cert_file = x509.get('cert_file')
1477 self.addheaders = [('User-Agent', self.version)]
1478 self.__tempfiles = []
1479 self.__unlink = os.unlink # See cleanup()
1480 self.tempcache = None
1481 # Undocumented feature: if you assign {} to tempcache,
1482 # it is used to cache files retrieved with
1483 # self.retrieve(). This is not enabled by default
1484 # since it does not work for changing documents (and I
1485 # haven't got the logic to check expiration headers
1486 # yet).
1487 self.ftpcache = ftpcache
1488 # Undocumented feature: you can use a different
1489 # ftp cache by assigning to the .ftpcache member;
1490 # in case you want logically independent URL openers
1491 # XXX This is not threadsafe. Bah.
1492
1493 def __del__(self):
1494 self.close()
1495
1496 def close(self):
1497 self.cleanup()
1498
1499 def cleanup(self):
1500 # This code sometimes runs when the rest of this module
1501 # has already been deleted, so it can't use any globals
1502 # or import anything.
1503 if self.__tempfiles:
1504 for file in self.__tempfiles:
1505 try:
1506 self.__unlink(file)
1507 except OSError:
1508 pass
1509 del self.__tempfiles[:]
1510 if self.tempcache:
1511 self.tempcache.clear()
1512
1513 def addheader(self, *args):
1514 """Add a header to be used by the HTTP interface only
1515 e.g. u.addheader('Accept', 'sound/basic')"""
1516 self.addheaders.append(args)
1517
1518 # External interface
1519 def open(self, fullurl, data=None):
1520 """Use URLopener().open(file) instead of open(file, 'r')."""
Georg Brandl13e89462008-07-01 19:56:00 +00001521 fullurl = unwrap(to_bytes(fullurl))
Senthil Kumaran734f0592010-02-20 22:19:04 +00001522 fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001523 if self.tempcache and fullurl in self.tempcache:
1524 filename, headers = self.tempcache[fullurl]
1525 fp = open(filename, 'rb')
Georg Brandl13e89462008-07-01 19:56:00 +00001526 return addinfourl(fp, headers, fullurl)
1527 urltype, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001528 if not urltype:
1529 urltype = 'file'
1530 if urltype in self.proxies:
1531 proxy = self.proxies[urltype]
Georg Brandl13e89462008-07-01 19:56:00 +00001532 urltype, proxyhost = splittype(proxy)
1533 host, selector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001534 url = (host, fullurl) # Signal special case to open_*()
1535 else:
1536 proxy = None
1537 name = 'open_' + urltype
1538 self.type = urltype
1539 name = name.replace('-', '_')
1540 if not hasattr(self, name):
1541 if proxy:
1542 return self.open_unknown_proxy(proxy, fullurl, data)
1543 else:
1544 return self.open_unknown(fullurl, data)
1545 try:
1546 if data is None:
1547 return getattr(self, name)(url)
1548 else:
1549 return getattr(self, name)(url, data)
1550 except socket.error as msg:
1551 raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
1552
1553 def open_unknown(self, fullurl, data=None):
1554 """Overridable interface to open unknown URL type."""
Georg Brandl13e89462008-07-01 19:56:00 +00001555 type, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001556 raise IOError('url error', 'unknown url type', type)
1557
1558 def open_unknown_proxy(self, proxy, fullurl, data=None):
1559 """Overridable interface to open unknown URL type."""
Georg Brandl13e89462008-07-01 19:56:00 +00001560 type, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001561 raise IOError('url error', 'invalid proxy for %s' % type, proxy)
1562
1563 # External interface
1564 def retrieve(self, url, filename=None, reporthook=None, data=None):
1565 """retrieve(url) returns (filename, headers) for a local object
1566 or (tempfilename, headers) for a remote object."""
Georg Brandl13e89462008-07-01 19:56:00 +00001567 url = unwrap(to_bytes(url))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001568 if self.tempcache and url in self.tempcache:
1569 return self.tempcache[url]
Georg Brandl13e89462008-07-01 19:56:00 +00001570 type, url1 = splittype(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001571 if filename is None and (not type or type == 'file'):
1572 try:
1573 fp = self.open_local_file(url1)
1574 hdrs = fp.info()
Philip Jenveycb134d72009-12-03 02:45:01 +00001575 fp.close()
Georg Brandl13e89462008-07-01 19:56:00 +00001576 return url2pathname(splithost(url1)[1]), hdrs
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001577 except IOError as msg:
1578 pass
1579 fp = self.open(url, data)
Benjamin Peterson5f28b7b2009-03-26 21:49:58 +00001580 try:
1581 headers = fp.info()
1582 if filename:
1583 tfp = open(filename, 'wb')
1584 else:
1585 import tempfile
1586 garbage, path = splittype(url)
1587 garbage, path = splithost(path or "")
1588 path, garbage = splitquery(path or "")
1589 path, garbage = splitattr(path or "")
1590 suffix = os.path.splitext(path)[1]
1591 (fd, filename) = tempfile.mkstemp(suffix)
1592 self.__tempfiles.append(filename)
1593 tfp = os.fdopen(fd, 'wb')
1594 try:
1595 result = filename, headers
1596 if self.tempcache is not None:
1597 self.tempcache[url] = result
1598 bs = 1024*8
1599 size = -1
1600 read = 0
1601 blocknum = 0
1602 if reporthook:
1603 if "content-length" in headers:
1604 size = int(headers["Content-Length"])
1605 reporthook(blocknum, bs, size)
1606 while 1:
1607 block = fp.read(bs)
1608 if not block:
1609 break
1610 read += len(block)
1611 tfp.write(block)
1612 blocknum += 1
1613 if reporthook:
1614 reporthook(blocknum, bs, size)
1615 finally:
1616 tfp.close()
1617 finally:
1618 fp.close()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001619
1620 # raise exception if actual size does not match content-length header
1621 if size >= 0 and read < size:
Georg Brandl13e89462008-07-01 19:56:00 +00001622 raise ContentTooShortError(
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001623 "retrieval incomplete: got only %i out of %i bytes"
1624 % (read, size), result)
1625
1626 return result
1627
1628 # Each method named open_<type> knows how to open that type of URL
1629
1630 def _open_generic_http(self, connection_factory, url, data):
1631 """Make an HTTP connection using connection_class.
1632
1633 This is an internal method that should be called from
1634 open_http() or open_https().
1635
1636 Arguments:
1637 - connection_factory should take a host name and return an
1638 HTTPConnection instance.
1639 - url is the url to retrieval or a host, relative-path pair.
1640 - data is payload for a POST request or None.
1641 """
1642
1643 user_passwd = None
1644 proxy_passwd= None
1645 if isinstance(url, str):
Georg Brandl13e89462008-07-01 19:56:00 +00001646 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001647 if host:
Georg Brandl13e89462008-07-01 19:56:00 +00001648 user_passwd, host = splituser(host)
1649 host = unquote(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001650 realhost = host
1651 else:
1652 host, selector = url
1653 # check whether the proxy contains authorization information
Georg Brandl13e89462008-07-01 19:56:00 +00001654 proxy_passwd, host = splituser(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001655 # now we proceed with the url we want to obtain
Georg Brandl13e89462008-07-01 19:56:00 +00001656 urltype, rest = splittype(selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001657 url = rest
1658 user_passwd = None
1659 if urltype.lower() != 'http':
1660 realhost = None
1661 else:
Georg Brandl13e89462008-07-01 19:56:00 +00001662 realhost, rest = splithost(rest)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001663 if realhost:
Georg Brandl13e89462008-07-01 19:56:00 +00001664 user_passwd, realhost = splituser(realhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001665 if user_passwd:
1666 selector = "%s://%s%s" % (urltype, realhost, rest)
1667 if proxy_bypass(realhost):
1668 host = realhost
1669
1670 #print "proxy via http:", host, selector
1671 if not host: raise IOError('http error', 'no host given')
1672
1673 if proxy_passwd:
1674 import base64
Senthil Kumaran5626eec2010-08-04 17:46:23 +00001675 proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001676 else:
1677 proxy_auth = None
1678
1679 if user_passwd:
1680 import base64
Senthil Kumaran5626eec2010-08-04 17:46:23 +00001681 auth = base64.b64encode(user_passwd.encode()).decode('ascii')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001682 else:
1683 auth = None
1684 http_conn = connection_factory(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001685 headers = {}
1686 if proxy_auth:
1687 headers["Proxy-Authorization"] = "Basic %s" % proxy_auth
1688 if auth:
1689 headers["Authorization"] = "Basic %s" % auth
1690 if realhost:
1691 headers["Host"] = realhost
Senthil Kumarand91ffca2011-03-19 17:25:27 +08001692
1693 # Add Connection:close as we don't support persistent connections yet.
1694 # This helps in closing the socket and avoiding ResourceWarning
1695
1696 headers["Connection"] = "close"
1697
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001698 for header, value in self.addheaders:
1699 headers[header] = value
1700
1701 if data is not None:
1702 headers["Content-Type"] = "application/x-www-form-urlencoded"
1703 http_conn.request("POST", selector, data, headers)
1704 else:
1705 http_conn.request("GET", selector, headers=headers)
1706
1707 try:
1708 response = http_conn.getresponse()
1709 except http.client.BadStatusLine:
1710 # something went wrong with the HTTP status line
Georg Brandl13e89462008-07-01 19:56:00 +00001711 raise URLError("http protocol error: bad status line")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001712
1713 # According to RFC 2616, "2xx" code indicates that the client's
1714 # request was successfully received, understood, and accepted.
1715 if 200 <= response.status < 300:
Antoine Pitroub353c122009-02-11 00:39:14 +00001716 return addinfourl(response, response.msg, "http:" + url,
Georg Brandl13e89462008-07-01 19:56:00 +00001717 response.status)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001718 else:
1719 return self.http_error(
1720 url, response.fp,
1721 response.status, response.reason, response.msg, data)
1722
1723 def open_http(self, url, data=None):
1724 """Use HTTP protocol."""
1725 return self._open_generic_http(http.client.HTTPConnection, url, data)
1726
1727 def http_error(self, url, fp, errcode, errmsg, headers, data=None):
1728 """Handle http errors.
1729
1730 Derived class can override this, or provide specific handlers
1731 named http_error_DDD where DDD is the 3-digit error code."""
1732 # First check if there's a specific handler for this error
1733 name = 'http_error_%d' % errcode
1734 if hasattr(self, name):
1735 method = getattr(self, name)
1736 if data is None:
1737 result = method(url, fp, errcode, errmsg, headers)
1738 else:
1739 result = method(url, fp, errcode, errmsg, headers, data)
1740 if result: return result
1741 return self.http_error_default(url, fp, errcode, errmsg, headers)
1742
1743 def http_error_default(self, url, fp, errcode, errmsg, headers):
1744 """Default error handler: close the connection and raise IOError."""
1745 void = fp.read()
1746 fp.close()
Georg Brandl13e89462008-07-01 19:56:00 +00001747 raise HTTPError(url, errcode, errmsg, headers, None)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001748
1749 if _have_ssl:
1750 def _https_connection(self, host):
1751 return http.client.HTTPSConnection(host,
1752 key_file=self.key_file,
1753 cert_file=self.cert_file)
1754
1755 def open_https(self, url, data=None):
1756 """Use HTTPS protocol."""
1757 return self._open_generic_http(self._https_connection, url, data)
1758
1759 def open_file(self, url):
1760 """Use local file or FTP depending on form of URL."""
1761 if not isinstance(url, str):
1762 raise URLError('file error', 'proxy support for file protocol currently not implemented')
1763 if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
Senthil Kumaran383c32d2010-10-14 11:57:35 +00001764 raise ValueError("file:// scheme is supported only on localhost")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001765 else:
1766 return self.open_local_file(url)
1767
1768 def open_local_file(self, url):
1769 """Use local file."""
1770 import mimetypes, email.utils
1771 from io import StringIO
Georg Brandl13e89462008-07-01 19:56:00 +00001772 host, file = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001773 localname = url2pathname(file)
1774 try:
1775 stats = os.stat(localname)
1776 except OSError as e:
1777 raise URLError(e.errno, e.strerror, e.filename)
1778 size = stats.st_size
1779 modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
1780 mtype = mimetypes.guess_type(url)[0]
1781 headers = email.message_from_string(
1782 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
1783 (mtype or 'text/plain', size, modified))
1784 if not host:
1785 urlfile = file
1786 if file[:1] == '/':
1787 urlfile = 'file://' + file
Georg Brandl13e89462008-07-01 19:56:00 +00001788 return addinfourl(open(localname, 'rb'), headers, urlfile)
1789 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001790 if (not port
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00001791 and socket.gethostbyname(host) in (localhost() + thishost())):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001792 urlfile = file
1793 if file[:1] == '/':
1794 urlfile = 'file://' + file
Georg Brandl13e89462008-07-01 19:56:00 +00001795 return addinfourl(open(localname, 'rb'), headers, urlfile)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001796 raise URLError('local file error', 'not on local host')
1797
1798 def open_ftp(self, url):
1799 """Use FTP protocol."""
1800 if not isinstance(url, str):
1801 raise URLError('ftp error', 'proxy support for ftp protocol currently not implemented')
1802 import mimetypes
1803 from io import StringIO
Georg Brandl13e89462008-07-01 19:56:00 +00001804 host, path = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001805 if not host: raise URLError('ftp error', 'no host given')
Georg Brandl13e89462008-07-01 19:56:00 +00001806 host, port = splitport(host)
1807 user, host = splituser(host)
1808 if user: user, passwd = splitpasswd(user)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001809 else: passwd = None
Georg Brandl13e89462008-07-01 19:56:00 +00001810 host = unquote(host)
1811 user = unquote(user or '')
1812 passwd = unquote(passwd or '')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001813 host = socket.gethostbyname(host)
1814 if not port:
1815 import ftplib
1816 port = ftplib.FTP_PORT
1817 else:
1818 port = int(port)
Georg Brandl13e89462008-07-01 19:56:00 +00001819 path, attrs = splitattr(path)
1820 path = unquote(path)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001821 dirs = path.split('/')
1822 dirs, file = dirs[:-1], dirs[-1]
1823 if dirs and not dirs[0]: dirs = dirs[1:]
1824 if dirs and not dirs[0]: dirs[0] = '/'
1825 key = user, host, port, '/'.join(dirs)
1826 # XXX thread unsafe!
1827 if len(self.ftpcache) > MAXFTPCACHE:
1828 # Prune the cache, rather arbitrarily
1829 for k in self.ftpcache.keys():
1830 if k != key:
1831 v = self.ftpcache[k]
1832 del self.ftpcache[k]
1833 v.close()
1834 try:
1835 if not key in self.ftpcache:
1836 self.ftpcache[key] = \
1837 ftpwrapper(user, passwd, host, port, dirs)
1838 if not file: type = 'D'
1839 else: type = 'I'
1840 for attr in attrs:
Georg Brandl13e89462008-07-01 19:56:00 +00001841 attr, value = splitvalue(attr)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001842 if attr.lower() == 'type' and \
1843 value in ('a', 'A', 'i', 'I', 'd', 'D'):
1844 type = value.upper()
1845 (fp, retrlen) = self.ftpcache[key].retrfile(file, type)
1846 mtype = mimetypes.guess_type("ftp:" + url)[0]
1847 headers = ""
1848 if mtype:
1849 headers += "Content-Type: %s\n" % mtype
1850 if retrlen is not None and retrlen >= 0:
1851 headers += "Content-Length: %d\n" % retrlen
1852 headers = email.message_from_string(headers)
Georg Brandl13e89462008-07-01 19:56:00 +00001853 return addinfourl(fp, headers, "ftp:" + url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001854 except ftperrors() as msg:
1855 raise URLError('ftp error', msg).with_traceback(sys.exc_info()[2])
1856
1857 def open_data(self, url, data=None):
1858 """Use "data" URL."""
1859 if not isinstance(url, str):
1860 raise URLError('data error', 'proxy support for data protocol currently not implemented')
1861 # ignore POSTed data
1862 #
1863 # syntax of data URLs:
1864 # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
1865 # mediatype := [ type "/" subtype ] *( ";" parameter )
1866 # data := *urlchar
1867 # parameter := attribute "=" value
1868 try:
1869 [type, data] = url.split(',', 1)
1870 except ValueError:
1871 raise IOError('data error', 'bad data URL')
1872 if not type:
1873 type = 'text/plain;charset=US-ASCII'
1874 semi = type.rfind(';')
1875 if semi >= 0 and '=' not in type[semi:]:
1876 encoding = type[semi+1:]
1877 type = type[:semi]
1878 else:
1879 encoding = ''
1880 msg = []
Senthil Kumaranf6c456d2010-05-01 08:29:18 +00001881 msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001882 time.gmtime(time.time())))
1883 msg.append('Content-type: %s' % type)
1884 if encoding == 'base64':
1885 import base64
Georg Brandl706824f2009-06-04 09:42:55 +00001886 # XXX is this encoding/decoding ok?
Marc-André Lemburg8f36af72011-02-25 15:42:01 +00001887 data = base64.decodebytes(data.encode('ascii')).decode('latin-1')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001888 else:
Georg Brandl13e89462008-07-01 19:56:00 +00001889 data = unquote(data)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001890 msg.append('Content-Length: %d' % len(data))
1891 msg.append('')
1892 msg.append(data)
1893 msg = '\n'.join(msg)
Georg Brandl13e89462008-07-01 19:56:00 +00001894 headers = email.message_from_string(msg)
1895 f = io.StringIO(msg)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001896 #f.fileno = None # needed for addinfourl
Georg Brandl13e89462008-07-01 19:56:00 +00001897 return addinfourl(f, headers, url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001898
1899
1900class FancyURLopener(URLopener):
1901 """Derived class with handlers for errors we can handle (perhaps)."""
1902
1903 def __init__(self, *args, **kwargs):
1904 URLopener.__init__(self, *args, **kwargs)
1905 self.auth_cache = {}
1906 self.tries = 0
1907 self.maxtries = 10
1908
1909 def http_error_default(self, url, fp, errcode, errmsg, headers):
1910 """Default error handling -- don't raise an exception."""
Georg Brandl13e89462008-07-01 19:56:00 +00001911 return addinfourl(fp, headers, "http:" + url, errcode)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001912
1913 def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
1914 """Error 302 -- relocated (temporarily)."""
1915 self.tries += 1
1916 if self.maxtries and self.tries >= self.maxtries:
1917 if hasattr(self, "http_error_500"):
1918 meth = self.http_error_500
1919 else:
1920 meth = self.http_error_default
1921 self.tries = 0
1922 return meth(url, fp, 500,
1923 "Internal Server Error: Redirect Recursion", headers)
1924 result = self.redirect_internal(url, fp, errcode, errmsg, headers,
1925 data)
1926 self.tries = 0
1927 return result
1928
1929 def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
1930 if 'location' in headers:
1931 newurl = headers['location']
1932 elif 'uri' in headers:
1933 newurl = headers['uri']
1934 else:
1935 return
1936 void = fp.read()
1937 fp.close()
guido@google.coma119df92011-03-29 11:41:02 -07001938
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001939 # In case the server sent a relative URL, join with original:
Georg Brandl13e89462008-07-01 19:56:00 +00001940 newurl = urljoin(self.type + ":" + url, newurl)
guido@google.coma119df92011-03-29 11:41:02 -07001941
1942 urlparts = urlparse(newurl)
1943
1944 # For security reasons, we don't allow redirection to anything other
1945 # than http, https and ftp.
1946
1947 # We are using newer HTTPError with older redirect_internal method
1948 # This older method will get deprecated in 3.3
1949
1950 if not urlparts.scheme in ('http', 'https', 'ftp'):
1951 raise HTTPError(newurl, errcode,
1952 errmsg +
1953 " Redirection to url '%s' is not allowed." % newurl,
1954 headers, fp)
1955
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001956 return self.open(newurl)
1957
1958 def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
1959 """Error 301 -- also relocated (permanently)."""
1960 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1961
1962 def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
1963 """Error 303 -- also relocated (essentially identical to 302)."""
1964 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1965
1966 def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
1967 """Error 307 -- relocated, but turn POST into error."""
1968 if data is None:
1969 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1970 else:
1971 return self.http_error_default(url, fp, errcode, errmsg, headers)
1972
Senthil Kumaran80f1b052010-06-18 15:08:18 +00001973 def http_error_401(self, url, fp, errcode, errmsg, headers, data=None,
1974 retry=False):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001975 """Error 401 -- authentication required.
1976 This function supports Basic authentication only."""
1977 if not 'www-authenticate' in headers:
1978 URLopener.http_error_default(self, url, fp,
1979 errcode, errmsg, headers)
1980 stuff = headers['www-authenticate']
1981 import re
1982 match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
1983 if not match:
1984 URLopener.http_error_default(self, url, fp,
1985 errcode, errmsg, headers)
1986 scheme, realm = match.groups()
1987 if scheme.lower() != 'basic':
1988 URLopener.http_error_default(self, url, fp,
1989 errcode, errmsg, headers)
Senthil Kumaran80f1b052010-06-18 15:08:18 +00001990 if not retry:
1991 URLopener.http_error_default(self, url, fp, errcode, errmsg,
1992 headers)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001993 name = 'retry_' + self.type + '_basic_auth'
1994 if data is None:
1995 return getattr(self,name)(url, realm)
1996 else:
1997 return getattr(self,name)(url, realm, data)
1998
Senthil Kumaran80f1b052010-06-18 15:08:18 +00001999 def http_error_407(self, url, fp, errcode, errmsg, headers, data=None,
2000 retry=False):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002001 """Error 407 -- proxy authentication required.
2002 This function supports Basic authentication only."""
2003 if not 'proxy-authenticate' in headers:
2004 URLopener.http_error_default(self, url, fp,
2005 errcode, errmsg, headers)
2006 stuff = headers['proxy-authenticate']
2007 import re
2008 match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
2009 if not match:
2010 URLopener.http_error_default(self, url, fp,
2011 errcode, errmsg, headers)
2012 scheme, realm = match.groups()
2013 if scheme.lower() != 'basic':
2014 URLopener.http_error_default(self, url, fp,
2015 errcode, errmsg, headers)
Senthil Kumaran80f1b052010-06-18 15:08:18 +00002016 if not retry:
2017 URLopener.http_error_default(self, url, fp, errcode, errmsg,
2018 headers)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002019 name = 'retry_proxy_' + self.type + '_basic_auth'
2020 if data is None:
2021 return getattr(self,name)(url, realm)
2022 else:
2023 return getattr(self,name)(url, realm, data)
2024
2025 def retry_proxy_http_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002026 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002027 newurl = 'http://' + host + selector
2028 proxy = self.proxies['http']
Georg Brandl13e89462008-07-01 19:56:00 +00002029 urltype, proxyhost = splittype(proxy)
2030 proxyhost, proxyselector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002031 i = proxyhost.find('@') + 1
2032 proxyhost = proxyhost[i:]
2033 user, passwd = self.get_user_passwd(proxyhost, realm, i)
2034 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002035 proxyhost = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002036 quote(passwd, safe=''), proxyhost)
2037 self.proxies['http'] = 'http://' + proxyhost + proxyselector
2038 if data is None:
2039 return self.open(newurl)
2040 else:
2041 return self.open(newurl, data)
2042
2043 def retry_proxy_https_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002044 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002045 newurl = 'https://' + host + selector
2046 proxy = self.proxies['https']
Georg Brandl13e89462008-07-01 19:56:00 +00002047 urltype, proxyhost = splittype(proxy)
2048 proxyhost, proxyselector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002049 i = proxyhost.find('@') + 1
2050 proxyhost = proxyhost[i:]
2051 user, passwd = self.get_user_passwd(proxyhost, realm, i)
2052 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002053 proxyhost = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002054 quote(passwd, safe=''), proxyhost)
2055 self.proxies['https'] = 'https://' + proxyhost + proxyselector
2056 if data is None:
2057 return self.open(newurl)
2058 else:
2059 return self.open(newurl, data)
2060
2061 def retry_http_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002062 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002063 i = host.find('@') + 1
2064 host = host[i:]
2065 user, passwd = self.get_user_passwd(host, realm, i)
2066 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002067 host = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002068 quote(passwd, safe=''), host)
2069 newurl = 'http://' + host + selector
2070 if data is None:
2071 return self.open(newurl)
2072 else:
2073 return self.open(newurl, data)
2074
2075 def retry_https_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002076 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002077 i = host.find('@') + 1
2078 host = host[i:]
2079 user, passwd = self.get_user_passwd(host, realm, i)
2080 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002081 host = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002082 quote(passwd, safe=''), host)
2083 newurl = 'https://' + host + selector
2084 if data is None:
2085 return self.open(newurl)
2086 else:
2087 return self.open(newurl, data)
2088
Florent Xicluna757445b2010-05-17 17:24:07 +00002089 def get_user_passwd(self, host, realm, clear_cache=0):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002090 key = realm + '@' + host.lower()
2091 if key in self.auth_cache:
2092 if clear_cache:
2093 del self.auth_cache[key]
2094 else:
2095 return self.auth_cache[key]
2096 user, passwd = self.prompt_user_passwd(host, realm)
2097 if user or passwd: self.auth_cache[key] = (user, passwd)
2098 return user, passwd
2099
2100 def prompt_user_passwd(self, host, realm):
2101 """Override this in a GUI environment!"""
2102 import getpass
2103 try:
2104 user = input("Enter username for %s at %s: " % (realm, host))
2105 passwd = getpass.getpass("Enter password for %s in %s at %s: " %
2106 (user, realm, host))
2107 return user, passwd
2108 except KeyboardInterrupt:
2109 print()
2110 return None, None
2111
2112
2113# Utility functions
2114
2115_localhost = None
2116def localhost():
2117 """Return the IP address of the magic hostname 'localhost'."""
2118 global _localhost
2119 if _localhost is None:
2120 _localhost = socket.gethostbyname('localhost')
2121 return _localhost
2122
2123_thishost = None
2124def thishost():
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00002125 """Return the IP addresses of the current host."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002126 global _thishost
2127 if _thishost is None:
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00002128 _thishost = tuple(socket.gethostbyname_ex(socket.gethostname()[2]))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002129 return _thishost
2130
2131_ftperrors = None
2132def ftperrors():
2133 """Return the set of errors raised by the FTP class."""
2134 global _ftperrors
2135 if _ftperrors is None:
2136 import ftplib
2137 _ftperrors = ftplib.all_errors
2138 return _ftperrors
2139
2140_noheaders = None
2141def noheaders():
Georg Brandl13e89462008-07-01 19:56:00 +00002142 """Return an empty email Message object."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002143 global _noheaders
2144 if _noheaders is None:
Georg Brandl13e89462008-07-01 19:56:00 +00002145 _noheaders = email.message_from_string("")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002146 return _noheaders
2147
2148
2149# Utility classes
2150
2151class ftpwrapper:
2152 """Class used by open_ftp() for cache of open FTP connections."""
2153
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02002154 def __init__(self, user, passwd, host, port, dirs, timeout=None,
2155 persistent=True):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002156 self.user = user
2157 self.passwd = passwd
2158 self.host = host
2159 self.port = port
2160 self.dirs = dirs
2161 self.timeout = timeout
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02002162 self.refcount = 0
2163 self.keepalive = persistent
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002164 self.init()
2165
2166 def init(self):
2167 import ftplib
2168 self.busy = 0
2169 self.ftp = ftplib.FTP()
2170 self.ftp.connect(self.host, self.port, self.timeout)
2171 self.ftp.login(self.user, self.passwd)
2172 for dir in self.dirs:
2173 self.ftp.cwd(dir)
2174
2175 def retrfile(self, file, type):
2176 import ftplib
2177 self.endtransfer()
2178 if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
2179 else: cmd = 'TYPE ' + type; isdir = 0
2180 try:
2181 self.ftp.voidcmd(cmd)
2182 except ftplib.all_errors:
2183 self.init()
2184 self.ftp.voidcmd(cmd)
2185 conn = None
2186 if file and not isdir:
2187 # Try to retrieve as a file
2188 try:
2189 cmd = 'RETR ' + file
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002190 conn, retrlen = self.ftp.ntransfercmd(cmd)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002191 except ftplib.error_perm as reason:
2192 if str(reason)[:3] != '550':
Georg Brandl13e89462008-07-01 19:56:00 +00002193 raise URLError('ftp error', reason).with_traceback(
2194 sys.exc_info()[2])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002195 if not conn:
2196 # Set transfer mode to ASCII!
2197 self.ftp.voidcmd('TYPE A')
2198 # Try a directory listing. Verify that directory exists.
2199 if file:
2200 pwd = self.ftp.pwd()
2201 try:
2202 try:
2203 self.ftp.cwd(file)
2204 except ftplib.error_perm as reason:
Georg Brandl13e89462008-07-01 19:56:00 +00002205 raise URLError('ftp error', reason) from reason
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002206 finally:
2207 self.ftp.cwd(pwd)
2208 cmd = 'LIST ' + file
2209 else:
2210 cmd = 'LIST'
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002211 conn, retrlen = self.ftp.ntransfercmd(cmd)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002212 self.busy = 1
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002213
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02002214 ftpobj = addclosehook(conn.makefile('rb'), self.file_close)
2215 self.refcount += 1
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002216 conn.close()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002217 # Pass back both a suitably decorated object and a retrieval length
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002218 return (ftpobj, retrlen)
2219
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002220 def endtransfer(self):
2221 if not self.busy:
2222 return
2223 self.busy = 0
2224 try:
2225 self.ftp.voidresp()
2226 except ftperrors():
2227 pass
2228
2229 def close(self):
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02002230 self.keepalive = False
2231 if self.refcount <= 0:
2232 self.real_close()
2233
2234 def file_close(self):
2235 self.endtransfer()
2236 self.refcount -= 1
2237 if self.refcount <= 0 and not self.keepalive:
2238 self.real_close()
2239
2240 def real_close(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002241 self.endtransfer()
2242 try:
2243 self.ftp.close()
2244 except ftperrors():
2245 pass
2246
2247# Proxy handling
2248def getproxies_environment():
2249 """Return a dictionary of scheme -> proxy server URL mappings.
2250
2251 Scan the environment for variables named <scheme>_proxy;
2252 this seems to be the standard convention. If you need a
2253 different way, you can pass a proxies dictionary to the
2254 [Fancy]URLopener constructor.
2255
2256 """
2257 proxies = {}
2258 for name, value in os.environ.items():
2259 name = name.lower()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002260 if value and name[-6:] == '_proxy':
2261 proxies[name[:-6]] = value
2262 return proxies
2263
2264def proxy_bypass_environment(host):
2265 """Test if proxies should not be used for a particular host.
2266
2267 Checks the environment for a variable named no_proxy, which should
2268 be a list of DNS suffixes separated by commas, or '*' for all hosts.
2269 """
2270 no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
2271 # '*' is special case for always bypass
2272 if no_proxy == '*':
2273 return 1
2274 # strip port off host
Georg Brandl13e89462008-07-01 19:56:00 +00002275 hostonly, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002276 # check if the host ends with any of the DNS suffixes
Senthil Kumaran89976f12011-08-06 12:27:40 +08002277 no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')]
2278 for name in no_proxy_list:
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002279 if name and (hostonly.endswith(name) or host.endswith(name)):
2280 return 1
2281 # otherwise, don't bypass
2282 return 0
2283
2284
Ronald Oussorene72e1612011-03-14 18:15:25 -04002285# This code tests an OSX specific data structure but is testable on all
2286# platforms
2287def _proxy_bypass_macosx_sysconf(host, proxy_settings):
2288 """
2289 Return True iff this host shouldn't be accessed using a proxy
2290
2291 This function uses the MacOSX framework SystemConfiguration
2292 to fetch the proxy information.
2293
2294 proxy_settings come from _scproxy._get_proxy_settings or get mocked ie:
2295 { 'exclude_simple': bool,
2296 'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16']
2297 }
2298 """
2299 import re
2300 import socket
2301 from fnmatch import fnmatch
2302
2303 hostonly, port = splitport(host)
2304
2305 def ip2num(ipAddr):
2306 parts = ipAddr.split('.')
2307 parts = list(map(int, parts))
2308 if len(parts) != 4:
2309 parts = (parts + [0, 0, 0, 0])[:4]
2310 return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
2311
2312 # Check for simple host names:
2313 if '.' not in host:
2314 if proxy_settings['exclude_simple']:
2315 return True
2316
2317 hostIP = None
2318
2319 for value in proxy_settings.get('exceptions', ()):
2320 # Items in the list are strings like these: *.local, 169.254/16
2321 if not value: continue
2322
2323 m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
2324 if m is not None:
2325 if hostIP is None:
2326 try:
2327 hostIP = socket.gethostbyname(hostonly)
2328 hostIP = ip2num(hostIP)
2329 except socket.error:
2330 continue
2331
2332 base = ip2num(m.group(1))
2333 mask = m.group(2)
2334 if mask is None:
2335 mask = 8 * (m.group(1).count('.') + 1)
2336 else:
2337 mask = int(mask[1:])
2338 mask = 32 - mask
2339
2340 if (hostIP >> mask) == (base >> mask):
2341 return True
2342
2343 elif fnmatch(host, value):
2344 return True
2345
2346 return False
2347
2348
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002349if sys.platform == 'darwin':
Ronald Oussoren84151202010-04-18 20:46:11 +00002350 from _scproxy import _get_proxy_settings, _get_proxies
2351
2352 def proxy_bypass_macosx_sysconf(host):
Ronald Oussoren84151202010-04-18 20:46:11 +00002353 proxy_settings = _get_proxy_settings()
Ronald Oussorene72e1612011-03-14 18:15:25 -04002354 return _proxy_bypass_macosx_sysconf(host, proxy_settings)
Ronald Oussoren84151202010-04-18 20:46:11 +00002355
2356 def getproxies_macosx_sysconf():
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002357 """Return a dictionary of scheme -> proxy server URL mappings.
2358
Ronald Oussoren84151202010-04-18 20:46:11 +00002359 This function uses the MacOSX framework SystemConfiguration
2360 to fetch the proxy information.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002361 """
Ronald Oussoren84151202010-04-18 20:46:11 +00002362 return _get_proxies()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002363
Ronald Oussoren84151202010-04-18 20:46:11 +00002364
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002365
2366 def proxy_bypass(host):
2367 if getproxies_environment():
2368 return proxy_bypass_environment(host)
2369 else:
Ronald Oussoren84151202010-04-18 20:46:11 +00002370 return proxy_bypass_macosx_sysconf(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002371
2372 def getproxies():
Ronald Oussoren84151202010-04-18 20:46:11 +00002373 return getproxies_environment() or getproxies_macosx_sysconf()
2374
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002375
2376elif os.name == 'nt':
2377 def getproxies_registry():
2378 """Return a dictionary of scheme -> proxy server URL mappings.
2379
2380 Win32 uses the registry to store proxies.
2381
2382 """
2383 proxies = {}
2384 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002385 import winreg
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002386 except ImportError:
2387 # Std module, so should be around - but you never know!
2388 return proxies
2389 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002390 internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002391 r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002392 proxyEnable = winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002393 'ProxyEnable')[0]
2394 if proxyEnable:
2395 # Returned as Unicode but problems if not converted to ASCII
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002396 proxyServer = str(winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002397 'ProxyServer')[0])
2398 if '=' in proxyServer:
2399 # Per-protocol settings
2400 for p in proxyServer.split(';'):
2401 protocol, address = p.split('=', 1)
2402 # See if address has a type:// prefix
2403 import re
2404 if not re.match('^([^/:]+)://', address):
2405 address = '%s://%s' % (protocol, address)
2406 proxies[protocol] = address
2407 else:
2408 # Use one setting for all protocols
2409 if proxyServer[:5] == 'http:':
2410 proxies['http'] = proxyServer
2411 else:
2412 proxies['http'] = 'http://%s' % proxyServer
Senthil Kumaran04f31b82010-07-14 20:10:52 +00002413 proxies['https'] = 'https://%s' % proxyServer
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002414 proxies['ftp'] = 'ftp://%s' % proxyServer
2415 internetSettings.Close()
2416 except (WindowsError, ValueError, TypeError):
2417 # Either registry key not found etc, or the value in an
2418 # unexpected format.
2419 # proxies already set up to be empty so nothing to do
2420 pass
2421 return proxies
2422
2423 def getproxies():
2424 """Return a dictionary of scheme -> proxy server URL mappings.
2425
2426 Returns settings gathered from the environment, if specified,
2427 or the registry.
2428
2429 """
2430 return getproxies_environment() or getproxies_registry()
2431
2432 def proxy_bypass_registry(host):
2433 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002434 import winreg
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002435 import re
2436 except ImportError:
2437 # Std modules, so should be around - but you never know!
2438 return 0
2439 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002440 internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002441 r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002442 proxyEnable = winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002443 'ProxyEnable')[0]
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002444 proxyOverride = str(winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002445 'ProxyOverride')[0])
2446 # ^^^^ Returned as Unicode but problems if not converted to ASCII
2447 except WindowsError:
2448 return 0
2449 if not proxyEnable or not proxyOverride:
2450 return 0
2451 # try to make a host list from name and IP address.
Georg Brandl13e89462008-07-01 19:56:00 +00002452 rawHost, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002453 host = [rawHost]
2454 try:
2455 addr = socket.gethostbyname(rawHost)
2456 if addr != rawHost:
2457 host.append(addr)
2458 except socket.error:
2459 pass
2460 try:
2461 fqdn = socket.getfqdn(rawHost)
2462 if fqdn != rawHost:
2463 host.append(fqdn)
2464 except socket.error:
2465 pass
2466 # make a check value list from the registry entry: replace the
2467 # '<local>' string by the localhost entry and the corresponding
2468 # canonical entry.
2469 proxyOverride = proxyOverride.split(';')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002470 # now check if we match one of the registry values.
2471 for test in proxyOverride:
Senthil Kumaran49476062009-05-01 06:00:23 +00002472 if test == '<local>':
2473 if '.' not in rawHost:
2474 return 1
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002475 test = test.replace(".", r"\.") # mask dots
2476 test = test.replace("*", r".*") # change glob sequence
2477 test = test.replace("?", r".") # change glob char
2478 for val in host:
2479 # print "%s <--> %s" %( test, val )
2480 if re.match(test, val, re.I):
2481 return 1
2482 return 0
2483
2484 def proxy_bypass(host):
2485 """Return a dictionary of scheme -> proxy server URL mappings.
2486
2487 Returns settings gathered from the environment, if specified,
2488 or the registry.
2489
2490 """
2491 if getproxies_environment():
2492 return proxy_bypass_environment(host)
2493 else:
2494 return proxy_bypass_registry(host)
2495
2496else:
2497 # By default use environment variables
2498 getproxies = getproxies_environment
2499 proxy_bypass = proxy_bypass_environment