blob: e98a97649afd6075b8c9728fd3ab336c123d5b91 [file] [log] [blame]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001"""An extensible library for opening URLs using a variety of protocols
2
3The simplest way to use this module is to call the urlopen function,
4which accepts a string containing a URL or a Request object (described
5below). It opens the URL and returns the results as file-like
6object; the returned object has some extra methods described below.
7
8The OpenerDirector manages a collection of Handler objects that do
9all the actual work. Each Handler implements a particular protocol or
10option. The OpenerDirector is a composite object that invokes the
11Handlers needed to open the requested URL. For example, the
12HTTPHandler performs HTTP GET and POST requests and deals with
13non-error returns. The HTTPRedirectHandler automatically deals with
14HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
15deals with digest authentication.
16
17urlopen(url, data=None) -- Basic usage is the same as original
18urllib. pass the url and optionally data to post to an HTTP URL, and
19get a file-like object back. One difference is that you can also pass
20a Request instance instead of URL. Raises a URLError (subclass of
21IOError); for HTTP errors, raises an HTTPError, which can also be
22treated as a valid response.
23
24build_opener -- Function that creates a new OpenerDirector instance.
25Will install the default handlers. Accepts one or more Handlers as
26arguments, either instances or Handler classes that it will
27instantiate. If one of the argument is a subclass of the default
28handler, the argument will be installed instead of the default.
29
30install_opener -- Installs a new opener as the default opener.
31
32objects of interest:
Senthil Kumaran1107c5d2009-11-15 06:20:55 +000033
Senthil Kumaran47fff872009-12-20 07:10:31 +000034OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
35the Handler classes, while dealing with requests and responses.
Jeremy Hylton1afc1692008-06-18 20:49:58 +000036
37Request -- An object that encapsulates the state of a request. The
38state can be as simple as the URL. It can also include extra HTTP
39headers, e.g. a User-Agent.
40
41BaseHandler --
42
43internals:
44BaseHandler and parent
45_call_chain conventions
46
47Example usage:
48
Georg Brandl029986a2008-06-23 11:44:14 +000049import urllib.request
Jeremy Hylton1afc1692008-06-18 20:49:58 +000050
51# set up authentication info
Georg Brandl029986a2008-06-23 11:44:14 +000052authinfo = urllib.request.HTTPBasicAuthHandler()
Jeremy Hylton1afc1692008-06-18 20:49:58 +000053authinfo.add_password(realm='PDQ Application',
54 uri='https://mahler:8092/site-updates.py',
55 user='klem',
56 passwd='geheim$parole')
57
Georg Brandl029986a2008-06-23 11:44:14 +000058proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"})
Jeremy Hylton1afc1692008-06-18 20:49:58 +000059
60# build a new opener that adds authentication and caching FTP handlers
Georg Brandl029986a2008-06-23 11:44:14 +000061opener = urllib.request.build_opener(proxy_support, authinfo,
62 urllib.request.CacheFTPHandler)
Jeremy Hylton1afc1692008-06-18 20:49:58 +000063
64# install it
Georg Brandl029986a2008-06-23 11:44:14 +000065urllib.request.install_opener(opener)
Jeremy Hylton1afc1692008-06-18 20:49:58 +000066
Georg Brandl029986a2008-06-23 11:44:14 +000067f = urllib.request.urlopen('http://www.python.org/')
Jeremy Hylton1afc1692008-06-18 20:49:58 +000068"""
69
70# XXX issues:
71# If an authentication error handler that tries to perform
72# authentication for some reason but fails, how should the error be
73# signalled? The client needs to know the HTTP error code. But if
74# the handler knows that the problem was, e.g., that it didn't know
75# that hash algo that requested in the challenge, it would be good to
76# pass that information along to the client, too.
77# ftp errors aren't handled cleanly
78# check digest against correct (i.e. non-apache) implementation
79
80# Possible extensions:
81# complex proxies XXX not sure what exactly was meant by this
82# abstract factory for opener
83
84import base64
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +000085import bisect
Jeremy Hylton1afc1692008-06-18 20:49:58 +000086import email
87import hashlib
88import http.client
89import io
90import os
91import posixpath
92import random
93import re
94import socket
95import sys
96import time
Senthil Kumaran7bc0d872010-12-19 10:49:52 +000097import collections
Jeremy Hylton1afc1692008-06-18 20:49:58 +000098
Georg Brandl13e89462008-07-01 19:56:00 +000099from urllib.error import URLError, HTTPError, ContentTooShortError
100from urllib.parse import (
101 urlparse, urlsplit, urljoin, unwrap, quote, unquote,
102 splittype, splithost, splitport, splituser, splitpasswd,
Senthil Kumarand95cc752010-08-08 11:27:53 +0000103 splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse)
Georg Brandl13e89462008-07-01 19:56:00 +0000104from urllib.response import addinfourl, addclosehook
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000105
106# check for SSL
107try:
108 import ssl
Senthil Kumaranc2958622010-11-22 04:48:26 +0000109except ImportError:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000110 _have_ssl = False
111else:
112 _have_ssl = True
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000113
114# used in User-Agent header sent
115__version__ = sys.version[:3]
116
117_opener = None
Antoine Pitrou803e6d62010-10-13 10:36:15 +0000118def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
119 *, cafile=None, capath=None):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000120 global _opener
Antoine Pitrou803e6d62010-10-13 10:36:15 +0000121 if cafile or capath:
122 if not _have_ssl:
123 raise ValueError('SSL support not available')
124 context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
125 context.options |= ssl.OP_NO_SSLv2
126 if cafile or capath:
127 context.verify_mode = ssl.CERT_REQUIRED
128 context.load_verify_locations(cafile, capath)
129 check_hostname = True
130 else:
131 check_hostname = False
132 https_handler = HTTPSHandler(context=context, check_hostname=check_hostname)
133 opener = build_opener(https_handler)
134 elif _opener is None:
135 _opener = opener = build_opener()
136 else:
137 opener = _opener
138 return opener.open(url, data, timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000139
140def install_opener(opener):
141 global _opener
142 _opener = opener
143
144# TODO(jhylton): Make this work with the same global opener.
145_urlopener = None
146def urlretrieve(url, filename=None, reporthook=None, data=None):
147 global _urlopener
148 if not _urlopener:
149 _urlopener = FancyURLopener()
150 return _urlopener.retrieve(url, filename, reporthook, data)
151
152def urlcleanup():
153 if _urlopener:
154 _urlopener.cleanup()
155 global _opener
156 if _opener:
157 _opener = None
158
159# copied from cookielib.py
Antoine Pitroufd036452008-08-19 17:56:33 +0000160_cut_port_re = re.compile(r":\d+$", re.ASCII)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000161def request_host(request):
162 """Return request-host, as defined by RFC 2965.
163
164 Variation from RFC: returned value is lowercased, for convenient
165 comparison.
166
167 """
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000168 url = request.full_url
Georg Brandl13e89462008-07-01 19:56:00 +0000169 host = urlparse(url)[1]
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000170 if host == "":
171 host = request.get_header("Host", "")
172
173 # remove port, if present
174 host = _cut_port_re.sub("", host, 1)
175 return host.lower()
176
177class Request:
178
179 def __init__(self, url, data=None, headers={},
180 origin_req_host=None, unverifiable=False):
181 # unwrap('<URL:type://host/path>') --> 'type://host/path'
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000182 self.full_url = unwrap(url)
Senthil Kumaran26430412011-04-13 07:01:19 +0800183 self.full_url, self.fragment = splittag(self.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000184 self.data = data
185 self.headers = {}
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +0000186 self._tunnel_host = None
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000187 for key, value in headers.items():
188 self.add_header(key, value)
189 self.unredirected_hdrs = {}
190 if origin_req_host is None:
191 origin_req_host = request_host(self)
192 self.origin_req_host = origin_req_host
193 self.unverifiable = unverifiable
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000194 self._parse()
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000195
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000196 def _parse(self):
197 self.type, rest = splittype(self.full_url)
198 if self.type is None:
199 raise ValueError("unknown url type: %s" % self.full_url)
200 self.host, self.selector = splithost(rest)
201 if self.host:
202 self.host = unquote(self.host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000203
204 def get_method(self):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000205 if self.data is not None:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000206 return "POST"
207 else:
208 return "GET"
209
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000210 # Begin deprecated methods
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000211
212 def add_data(self, data):
213 self.data = data
214
215 def has_data(self):
216 return self.data is not None
217
218 def get_data(self):
219 return self.data
220
221 def get_full_url(self):
Senthil Kumaran26430412011-04-13 07:01:19 +0800222 if self.fragment:
223 return '%s#%s' % (self.full_url, self.fragment)
224 else:
225 return self.full_url
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000226
227 def get_type(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000228 return self.type
229
230 def get_host(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000231 return self.host
232
233 def get_selector(self):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000234 return self.selector
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000235
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000236 def is_unverifiable(self):
237 return self.unverifiable
Facundo Batista72dc1ea2008-08-16 14:44:32 +0000238
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000239 def get_origin_req_host(self):
240 return self.origin_req_host
241
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000242 # End deprecated methods
243
244 def set_proxy(self, host, type):
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +0000245 if self.type == 'https' and not self._tunnel_host:
246 self._tunnel_host = self.host
247 else:
248 self.type= type
249 self.selector = self.full_url
250 self.host = host
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000251
252 def has_proxy(self):
253 return self.selector == self.full_url
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000254
255 def add_header(self, key, val):
256 # useful for something like authentication
257 self.headers[key.capitalize()] = val
258
259 def add_unredirected_header(self, key, val):
260 # will not be added to a redirected request
261 self.unredirected_hdrs[key.capitalize()] = val
262
263 def has_header(self, header_name):
264 return (header_name in self.headers or
265 header_name in self.unredirected_hdrs)
266
267 def get_header(self, header_name, default=None):
268 return self.headers.get(
269 header_name,
270 self.unredirected_hdrs.get(header_name, default))
271
272 def header_items(self):
273 hdrs = self.unredirected_hdrs.copy()
274 hdrs.update(self.headers)
275 return list(hdrs.items())
276
277class OpenerDirector:
278 def __init__(self):
279 client_version = "Python-urllib/%s" % __version__
280 self.addheaders = [('User-agent', client_version)]
R. David Murray25b8cca2010-12-23 19:44:49 +0000281 # self.handlers is retained only for backward compatibility
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000282 self.handlers = []
R. David Murray25b8cca2010-12-23 19:44:49 +0000283 # manage the individual handlers
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000284 self.handle_open = {}
285 self.handle_error = {}
286 self.process_response = {}
287 self.process_request = {}
288
289 def add_handler(self, handler):
290 if not hasattr(handler, "add_parent"):
291 raise TypeError("expected BaseHandler instance, got %r" %
292 type(handler))
293
294 added = False
295 for meth in dir(handler):
296 if meth in ["redirect_request", "do_open", "proxy_open"]:
297 # oops, coincidental match
298 continue
299
300 i = meth.find("_")
301 protocol = meth[:i]
302 condition = meth[i+1:]
303
304 if condition.startswith("error"):
305 j = condition.find("_") + i + 1
306 kind = meth[j+1:]
307 try:
308 kind = int(kind)
309 except ValueError:
310 pass
311 lookup = self.handle_error.get(protocol, {})
312 self.handle_error[protocol] = lookup
313 elif condition == "open":
314 kind = protocol
315 lookup = self.handle_open
316 elif condition == "response":
317 kind = protocol
318 lookup = self.process_response
319 elif condition == "request":
320 kind = protocol
321 lookup = self.process_request
322 else:
323 continue
324
325 handlers = lookup.setdefault(kind, [])
326 if handlers:
327 bisect.insort(handlers, handler)
328 else:
329 handlers.append(handler)
330 added = True
331
332 if added:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000333 bisect.insort(self.handlers, handler)
334 handler.add_parent(self)
335
336 def close(self):
337 # Only exists for backwards compatibility.
338 pass
339
340 def _call_chain(self, chain, kind, meth_name, *args):
341 # Handlers raise an exception if no one else should try to handle
342 # the request, or return None if they can't but another handler
343 # could. Otherwise, they return the response.
344 handlers = chain.get(kind, ())
345 for handler in handlers:
346 func = getattr(handler, meth_name)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000347 result = func(*args)
348 if result is not None:
349 return result
350
351 def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
352 # accept a URL or a Request object
353 if isinstance(fullurl, str):
354 req = Request(fullurl, data)
355 else:
356 req = fullurl
357 if data is not None:
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000358 req.data = data
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000359
360 req.timeout = timeout
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000361 protocol = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000362
363 # pre-process request
364 meth_name = protocol+"_request"
365 for processor in self.process_request.get(protocol, []):
366 meth = getattr(processor, meth_name)
367 req = meth(req)
368
369 response = self._open(req, data)
370
371 # post-process response
372 meth_name = protocol+"_response"
373 for processor in self.process_response.get(protocol, []):
374 meth = getattr(processor, meth_name)
375 response = meth(req, response)
376
377 return response
378
379 def _open(self, req, data=None):
380 result = self._call_chain(self.handle_open, 'default',
381 'default_open', req)
382 if result:
383 return result
384
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000385 protocol = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000386 result = self._call_chain(self.handle_open, protocol, protocol +
387 '_open', req)
388 if result:
389 return result
390
391 return self._call_chain(self.handle_open, 'unknown',
392 'unknown_open', req)
393
394 def error(self, proto, *args):
395 if proto in ('http', 'https'):
396 # XXX http[s] protocols are special-cased
397 dict = self.handle_error['http'] # https is not different than http
398 proto = args[2] # YUCK!
399 meth_name = 'http_error_%s' % proto
400 http_err = 1
401 orig_args = args
402 else:
403 dict = self.handle_error
404 meth_name = proto + '_error'
405 http_err = 0
406 args = (dict, proto, meth_name) + args
407 result = self._call_chain(*args)
408 if result:
409 return result
410
411 if http_err:
412 args = (dict, 'default', 'http_error_default') + orig_args
413 return self._call_chain(*args)
414
415# XXX probably also want an abstract factory that knows when it makes
416# sense to skip a superclass in favor of a subclass and when it might
417# make sense to include both
418
419def build_opener(*handlers):
420 """Create an opener object from a list of handlers.
421
422 The opener will use several default handlers, including support
Senthil Kumaran1107c5d2009-11-15 06:20:55 +0000423 for HTTP, FTP and when applicable HTTPS.
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000424
425 If any of the handlers passed as arguments are subclasses of the
426 default handlers, the default handlers will not be used.
427 """
428 def isclass(obj):
429 return isinstance(obj, type) or hasattr(obj, "__bases__")
430
431 opener = OpenerDirector()
432 default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
433 HTTPDefaultErrorHandler, HTTPRedirectHandler,
434 FTPHandler, FileHandler, HTTPErrorProcessor]
435 if hasattr(http.client, "HTTPSConnection"):
436 default_classes.append(HTTPSHandler)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000437 skip = set()
438 for klass in default_classes:
439 for check in handlers:
440 if isclass(check):
441 if issubclass(check, klass):
442 skip.add(klass)
443 elif isinstance(check, klass):
444 skip.add(klass)
445 for klass in skip:
446 default_classes.remove(klass)
447
448 for klass in default_classes:
449 opener.add_handler(klass())
450
451 for h in handlers:
452 if isclass(h):
453 h = h()
454 opener.add_handler(h)
455 return opener
456
457class BaseHandler:
458 handler_order = 500
459
460 def add_parent(self, parent):
461 self.parent = parent
462
463 def close(self):
464 # Only exists for backwards compatibility
465 pass
466
467 def __lt__(self, other):
468 if not hasattr(other, "handler_order"):
469 # Try to preserve the old behavior of having custom classes
470 # inserted after default ones (works only for custom user
471 # classes which are not aware of handler_order).
472 return True
473 return self.handler_order < other.handler_order
474
475
476class HTTPErrorProcessor(BaseHandler):
477 """Process HTTP error responses."""
478 handler_order = 1000 # after all other processing
479
480 def http_response(self, request, response):
481 code, msg, hdrs = response.code, response.msg, response.info()
482
483 # According to RFC 2616, "2xx" code indicates that the client's
484 # request was successfully received, understood, and accepted.
485 if not (200 <= code < 300):
486 response = self.parent.error(
487 'http', request, response, code, msg, hdrs)
488
489 return response
490
491 https_response = http_response
492
493class HTTPDefaultErrorHandler(BaseHandler):
494 def http_error_default(self, req, fp, code, msg, hdrs):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000495 raise HTTPError(req.full_url, code, msg, hdrs, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000496
497class HTTPRedirectHandler(BaseHandler):
498 # maximum number of redirections to any single URL
499 # this is needed because of the state that cookies introduce
500 max_repeats = 4
501 # maximum total number of redirections (regardless of URL) before
502 # assuming we're in a loop
503 max_redirections = 10
504
505 def redirect_request(self, req, fp, code, msg, headers, newurl):
506 """Return a Request or None in response to a redirect.
507
508 This is called by the http_error_30x methods when a
509 redirection response is received. If a redirection should
510 take place, return a new Request to allow http_error_30x to
511 perform the redirect. Otherwise, raise HTTPError if no-one
512 else should try to handle this url. Return None if you can't
513 but another Handler might.
514 """
515 m = req.get_method()
516 if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
517 or code in (301, 302, 303) and m == "POST")):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000518 raise HTTPError(req.full_url, code, msg, headers, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000519
520 # Strictly (according to RFC 2616), 301 or 302 in response to
521 # a POST MUST NOT cause a redirection without confirmation
Georg Brandl029986a2008-06-23 11:44:14 +0000522 # from the user (of urllib.request, in this case). In practice,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000523 # essentially all clients do redirect in this case, so we do
524 # the same.
525 # be conciliant with URIs containing a space
526 newurl = newurl.replace(' ', '%20')
527 CONTENT_HEADERS = ("content-length", "content-type")
528 newheaders = dict((k, v) for k, v in req.headers.items()
529 if k.lower() not in CONTENT_HEADERS)
530 return Request(newurl,
531 headers=newheaders,
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000532 origin_req_host=req.origin_req_host,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000533 unverifiable=True)
534
535 # Implementation note: To avoid the server sending us into an
536 # infinite loop, the request object needs to track what URLs we
537 # have already seen. Do this by adding a handler-specific
538 # attribute to the Request object.
539 def http_error_302(self, req, fp, code, msg, headers):
540 # Some servers (incorrectly) return multiple Location headers
541 # (so probably same goes for URI). Use first header.
542 if "location" in headers:
543 newurl = headers["location"]
544 elif "uri" in headers:
545 newurl = headers["uri"]
546 else:
547 return
Facundo Batistaf24802c2008-08-17 03:36:03 +0000548
549 # fix a possible malformed URL
550 urlparts = urlparse(newurl)
guido@google.coma119df92011-03-29 11:41:02 -0700551
552 # For security reasons we don't allow redirection to anything other
553 # than http, https or ftp.
554
555 if not urlparts.scheme in ('http', 'https', 'ftp'):
556 raise HTTPError(newurl, code,
557 msg +
558 " - Redirection to url '%s' is not allowed" %
559 newurl,
560 headers, fp)
561
Facundo Batistaf24802c2008-08-17 03:36:03 +0000562 if not urlparts.path:
563 urlparts = list(urlparts)
564 urlparts[2] = "/"
565 newurl = urlunparse(urlparts)
566
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000567 newurl = urljoin(req.full_url, newurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000568
569 # XXX Probably want to forget about the state of the current
570 # request, although that might interact poorly with other
571 # handlers that also use handler-specific request attributes
572 new = self.redirect_request(req, fp, code, msg, headers, newurl)
573 if new is None:
574 return
575
576 # loop detection
577 # .redirect_dict has a key url if url was previously visited.
578 if hasattr(req, 'redirect_dict'):
579 visited = new.redirect_dict = req.redirect_dict
580 if (visited.get(newurl, 0) >= self.max_repeats or
581 len(visited) >= self.max_redirections):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000582 raise HTTPError(req.full_url, code,
Georg Brandl13e89462008-07-01 19:56:00 +0000583 self.inf_msg + msg, headers, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000584 else:
585 visited = new.redirect_dict = req.redirect_dict = {}
586 visited[newurl] = visited.get(newurl, 0) + 1
587
588 # Don't close the fp until we are sure that we won't use it
589 # with HTTPError.
590 fp.read()
591 fp.close()
592
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000593 return self.parent.open(new, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000594
595 http_error_301 = http_error_303 = http_error_307 = http_error_302
596
597 inf_msg = "The HTTP server returned a redirect error that would " \
598 "lead to an infinite loop.\n" \
599 "The last 30x error message was:\n"
600
601
602def _parse_proxy(proxy):
603 """Return (scheme, user, password, host/port) given a URL or an authority.
604
605 If a URL is supplied, it must have an authority (host:port) component.
606 According to RFC 3986, having an authority component means the URL must
607 have two slashes after the scheme:
608
609 >>> _parse_proxy('file:/ftp.example.com/')
610 Traceback (most recent call last):
611 ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
612
613 The first three items of the returned tuple may be None.
614
615 Examples of authority parsing:
616
617 >>> _parse_proxy('proxy.example.com')
618 (None, None, None, 'proxy.example.com')
619 >>> _parse_proxy('proxy.example.com:3128')
620 (None, None, None, 'proxy.example.com:3128')
621
622 The authority component may optionally include userinfo (assumed to be
623 username:password):
624
625 >>> _parse_proxy('joe:password@proxy.example.com')
626 (None, 'joe', 'password', 'proxy.example.com')
627 >>> _parse_proxy('joe:password@proxy.example.com:3128')
628 (None, 'joe', 'password', 'proxy.example.com:3128')
629
630 Same examples, but with URLs instead:
631
632 >>> _parse_proxy('http://proxy.example.com/')
633 ('http', None, None, 'proxy.example.com')
634 >>> _parse_proxy('http://proxy.example.com:3128/')
635 ('http', None, None, 'proxy.example.com:3128')
636 >>> _parse_proxy('http://joe:password@proxy.example.com/')
637 ('http', 'joe', 'password', 'proxy.example.com')
638 >>> _parse_proxy('http://joe:password@proxy.example.com:3128')
639 ('http', 'joe', 'password', 'proxy.example.com:3128')
640
641 Everything after the authority is ignored:
642
643 >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
644 ('ftp', 'joe', 'password', 'proxy.example.com')
645
646 Test for no trailing '/' case:
647
648 >>> _parse_proxy('http://joe:password@proxy.example.com')
649 ('http', 'joe', 'password', 'proxy.example.com')
650
651 """
Georg Brandl13e89462008-07-01 19:56:00 +0000652 scheme, r_scheme = splittype(proxy)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000653 if not r_scheme.startswith("/"):
654 # authority
655 scheme = None
656 authority = proxy
657 else:
658 # URL
659 if not r_scheme.startswith("//"):
660 raise ValueError("proxy URL with no authority: %r" % proxy)
661 # We have an authority, so for RFC 3986-compliant URLs (by ss 3.
662 # and 3.3.), path is empty or starts with '/'
663 end = r_scheme.find("/", 2)
664 if end == -1:
665 end = None
666 authority = r_scheme[2:end]
Georg Brandl13e89462008-07-01 19:56:00 +0000667 userinfo, hostport = splituser(authority)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000668 if userinfo is not None:
Georg Brandl13e89462008-07-01 19:56:00 +0000669 user, password = splitpasswd(userinfo)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000670 else:
671 user = password = None
672 return scheme, user, password, hostport
673
674class ProxyHandler(BaseHandler):
675 # Proxies must be in front
676 handler_order = 100
677
678 def __init__(self, proxies=None):
679 if proxies is None:
680 proxies = getproxies()
681 assert hasattr(proxies, 'keys'), "proxies must be a mapping"
682 self.proxies = proxies
683 for type, url in proxies.items():
684 setattr(self, '%s_open' % type,
685 lambda r, proxy=url, type=type, meth=self.proxy_open: \
686 meth(r, proxy, type))
687
688 def proxy_open(self, req, proxy, type):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000689 orig_type = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000690 proxy_type, user, password, hostport = _parse_proxy(proxy)
691 if proxy_type is None:
692 proxy_type = orig_type
Senthil Kumaran7bb04972009-10-11 04:58:55 +0000693
694 if req.host and proxy_bypass(req.host):
695 return None
696
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000697 if user and password:
Georg Brandl13e89462008-07-01 19:56:00 +0000698 user_pass = '%s:%s' % (unquote(user),
699 unquote(password))
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000700 creds = base64.b64encode(user_pass.encode()).decode("ascii")
701 req.add_header('Proxy-authorization', 'Basic ' + creds)
Georg Brandl13e89462008-07-01 19:56:00 +0000702 hostport = unquote(hostport)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000703 req.set_proxy(hostport, proxy_type)
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +0000704 if orig_type == proxy_type or orig_type == 'https':
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000705 # let other handlers take care of it
706 return None
707 else:
708 # need to start over, because the other handlers don't
709 # grok the proxy's URL type
710 # e.g. if we have a constructor arg proxies like so:
711 # {'http': 'ftp://proxy.example.com'}, we may end up turning
712 # a request for http://acme.example.com/a into one for
713 # ftp://proxy.example.com/a
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000714 return self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000715
716class HTTPPasswordMgr:
717
718 def __init__(self):
719 self.passwd = {}
720
721 def add_password(self, realm, uri, user, passwd):
722 # uri could be a single URI or a sequence
723 if isinstance(uri, str):
724 uri = [uri]
725 if not realm in self.passwd:
726 self.passwd[realm] = {}
727 for default_port in True, False:
728 reduced_uri = tuple(
729 [self.reduce_uri(u, default_port) for u in uri])
730 self.passwd[realm][reduced_uri] = (user, passwd)
731
732 def find_user_password(self, realm, authuri):
733 domains = self.passwd.get(realm, {})
734 for default_port in True, False:
735 reduced_authuri = self.reduce_uri(authuri, default_port)
736 for uris, authinfo in domains.items():
737 for uri in uris:
738 if self.is_suburi(uri, reduced_authuri):
739 return authinfo
740 return None, None
741
742 def reduce_uri(self, uri, default_port=True):
743 """Accept authority or URI and extract only the authority and path."""
744 # note HTTP URLs do not have a userinfo component
Georg Brandl13e89462008-07-01 19:56:00 +0000745 parts = urlsplit(uri)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000746 if parts[1]:
747 # URI
748 scheme = parts[0]
749 authority = parts[1]
750 path = parts[2] or '/'
751 else:
752 # host or host:port
753 scheme = None
754 authority = uri
755 path = '/'
Georg Brandl13e89462008-07-01 19:56:00 +0000756 host, port = splitport(authority)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000757 if default_port and port is None and scheme is not None:
758 dport = {"http": 80,
759 "https": 443,
760 }.get(scheme)
761 if dport is not None:
762 authority = "%s:%d" % (host, dport)
763 return authority, path
764
765 def is_suburi(self, base, test):
766 """Check if test is below base in a URI tree
767
768 Both args must be URIs in reduced form.
769 """
770 if base == test:
771 return True
772 if base[0] != test[0]:
773 return False
774 common = posixpath.commonprefix((base[1], test[1]))
775 if len(common) == len(base[1]):
776 return True
777 return False
778
779
780class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
781
782 def find_user_password(self, realm, authuri):
783 user, password = HTTPPasswordMgr.find_user_password(self, realm,
784 authuri)
785 if user is not None:
786 return user, password
787 return HTTPPasswordMgr.find_user_password(self, None, authuri)
788
789
790class AbstractBasicAuthHandler:
791
792 # XXX this allows for multiple auth-schemes, but will stupidly pick
793 # the last one with a realm specified.
794
795 # allow for double- and single-quoted realm values
796 # (single quotes are a violation of the RFC, but appear in the wild)
797 rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
798 'realm=(["\'])(.*?)\\2', re.I)
799
800 # XXX could pre-emptively send auth info already accepted (RFC 2617,
801 # end of section 2, and section 1.2 immediately after "credentials"
802 # production).
803
804 def __init__(self, password_mgr=None):
805 if password_mgr is None:
806 password_mgr = HTTPPasswordMgr()
807 self.passwd = password_mgr
808 self.add_password = self.passwd.add_password
Senthil Kumaranf4998ac2010-06-01 12:53:48 +0000809 self.retried = 0
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000810
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000811 def reset_retry_count(self):
812 self.retried = 0
813
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000814 def http_error_auth_reqed(self, authreq, host, req, headers):
815 # host may be an authority (without userinfo) or a URL with an
816 # authority
817 # XXX could be multiple headers
818 authreq = headers.get(authreq, None)
Senthil Kumaranf4998ac2010-06-01 12:53:48 +0000819
820 if self.retried > 5:
821 # retry sending the username:password 5 times before failing.
822 raise HTTPError(req.get_full_url(), 401, "basic auth failed",
823 headers, None)
824 else:
825 self.retried += 1
826
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000827 if authreq:
Senthil Kumaran4de00a22011-05-11 21:17:57 +0800828 scheme = authreq.split()[0]
829 if not scheme.lower() == 'basic':
830 raise ValueError("AbstractBasicAuthHandler does not"
831 " support the following scheme: '%s'" %
832 scheme)
833 else:
834 mo = AbstractBasicAuthHandler.rx.search(authreq)
835 if mo:
836 scheme, quote, realm = mo.groups()
837 if scheme.lower() == 'basic':
838 response = self.retry_http_basic_auth(host, req, realm)
839 if response and response.code != 401:
840 self.retried = 0
841 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000842
843 def retry_http_basic_auth(self, host, req, realm):
844 user, pw = self.passwd.find_user_password(realm, host)
845 if pw is not None:
846 raw = "%s:%s" % (user, pw)
847 auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii")
848 if req.headers.get(self.auth_header, None) == auth:
849 return None
Senthil Kumaranca2fc9e2010-02-24 16:53:16 +0000850 req.add_unredirected_header(self.auth_header, auth)
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000851 return self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000852 else:
853 return None
854
855
856class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
857
858 auth_header = 'Authorization'
859
860 def http_error_401(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000861 url = req.full_url
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000862 response = self.http_error_auth_reqed('www-authenticate',
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000863 url, req, headers)
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000864 self.reset_retry_count()
865 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000866
867
868class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
869
870 auth_header = 'Proxy-authorization'
871
872 def http_error_407(self, req, fp, code, msg, headers):
873 # http_error_auth_reqed requires that there is no userinfo component in
Georg Brandl029986a2008-06-23 11:44:14 +0000874 # authority. Assume there isn't one, since urllib.request does not (and
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000875 # should not, RFC 3986 s. 3.2.1) support requests for URLs containing
876 # userinfo.
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000877 authority = req.host
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000878 response = self.http_error_auth_reqed('proxy-authenticate',
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000879 authority, req, headers)
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000880 self.reset_retry_count()
881 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000882
883
884def randombytes(n):
885 """Return n random bytes."""
886 return os.urandom(n)
887
888class AbstractDigestAuthHandler:
889 # Digest authentication is specified in RFC 2617.
890
891 # XXX The client does not inspect the Authentication-Info header
892 # in a successful response.
893
894 # XXX It should be possible to test this implementation against
895 # a mock server that just generates a static set of challenges.
896
897 # XXX qop="auth-int" supports is shaky
898
899 def __init__(self, passwd=None):
900 if passwd is None:
901 passwd = HTTPPasswordMgr()
902 self.passwd = passwd
903 self.add_password = self.passwd.add_password
904 self.retried = 0
905 self.nonce_count = 0
Senthil Kumaran4c7eaee2009-11-15 08:43:45 +0000906 self.last_nonce = None
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000907
908 def reset_retry_count(self):
909 self.retried = 0
910
911 def http_error_auth_reqed(self, auth_header, host, req, headers):
912 authreq = headers.get(auth_header, None)
913 if self.retried > 5:
914 # Don't fail endlessly - if we failed once, we'll probably
915 # fail a second time. Hm. Unless the Password Manager is
916 # prompting for the information. Crap. This isn't great
917 # but it's better than the current 'repeat until recursion
918 # depth exceeded' approach <wink>
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000919 raise HTTPError(req.full_url, 401, "digest auth failed",
Georg Brandl13e89462008-07-01 19:56:00 +0000920 headers, None)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000921 else:
922 self.retried += 1
923 if authreq:
924 scheme = authreq.split()[0]
925 if scheme.lower() == 'digest':
926 return self.retry_http_digest_auth(req, authreq)
Senthil Kumaran4de00a22011-05-11 21:17:57 +0800927 elif not scheme.lower() == 'basic':
928 raise ValueError("AbstractDigestAuthHandler does not support"
929 " the following scheme: '%s'" % scheme)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000930
931 def retry_http_digest_auth(self, req, auth):
932 token, challenge = auth.split(' ', 1)
933 chal = parse_keqv_list(filter(None, parse_http_list(challenge)))
934 auth = self.get_authorization(req, chal)
935 if auth:
936 auth_val = 'Digest %s' % auth
937 if req.headers.get(self.auth_header, None) == auth_val:
938 return None
939 req.add_unredirected_header(self.auth_header, auth_val)
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000940 resp = self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000941 return resp
942
943 def get_cnonce(self, nonce):
944 # The cnonce-value is an opaque
945 # quoted string value provided by the client and used by both client
946 # and server to avoid chosen plaintext attacks, to provide mutual
947 # authentication, and to provide some message integrity protection.
948 # This isn't a fabulous effort, but it's probably Good Enough.
949 s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime())
950 b = s.encode("ascii") + randombytes(8)
951 dig = hashlib.sha1(b).hexdigest()
952 return dig[:16]
953
954 def get_authorization(self, req, chal):
955 try:
956 realm = chal['realm']
957 nonce = chal['nonce']
958 qop = chal.get('qop')
959 algorithm = chal.get('algorithm', 'MD5')
960 # mod_digest doesn't send an opaque, even though it isn't
961 # supposed to be optional
962 opaque = chal.get('opaque', None)
963 except KeyError:
964 return None
965
966 H, KD = self.get_algorithm_impls(algorithm)
967 if H is None:
968 return None
969
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000970 user, pw = self.passwd.find_user_password(realm, req.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000971 if user is None:
972 return None
973
974 # XXX not implemented yet
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000975 if req.data is not None:
976 entdig = self.get_entity_digest(req.data, chal)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000977 else:
978 entdig = None
979
980 A1 = "%s:%s:%s" % (user, realm, pw)
981 A2 = "%s:%s" % (req.get_method(),
982 # XXX selector: what about proxies and full urls
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000983 req.selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000984 if qop == 'auth':
Senthil Kumaran4c7eaee2009-11-15 08:43:45 +0000985 if nonce == self.last_nonce:
986 self.nonce_count += 1
987 else:
988 self.nonce_count = 1
989 self.last_nonce = nonce
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000990 ncvalue = '%08x' % self.nonce_count
991 cnonce = self.get_cnonce(nonce)
992 noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
993 respdig = KD(H(A1), noncebit)
994 elif qop is None:
995 respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
996 else:
997 # XXX handle auth-int.
Georg Brandl13e89462008-07-01 19:56:00 +0000998 raise URLError("qop '%s' is not supported." % qop)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000999
1000 # XXX should the partial digests be encoded too?
1001
1002 base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001003 'response="%s"' % (user, realm, nonce, req.selector,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001004 respdig)
1005 if opaque:
1006 base += ', opaque="%s"' % opaque
1007 if entdig:
1008 base += ', digest="%s"' % entdig
1009 base += ', algorithm="%s"' % algorithm
1010 if qop:
1011 base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
1012 return base
1013
1014 def get_algorithm_impls(self, algorithm):
1015 # lambdas assume digest modules are imported at the top level
1016 if algorithm == 'MD5':
1017 H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest()
1018 elif algorithm == 'SHA':
1019 H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest()
1020 # XXX MD5-sess
1021 KD = lambda s, d: H("%s:%s" % (s, d))
1022 return H, KD
1023
1024 def get_entity_digest(self, data, chal):
1025 # XXX not implemented yet
1026 return None
1027
1028
1029class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
1030 """An authentication protocol defined by RFC 2069
1031
1032 Digest authentication improves on basic authentication because it
1033 does not transmit passwords in the clear.
1034 """
1035
1036 auth_header = 'Authorization'
1037 handler_order = 490 # before Basic auth
1038
1039 def http_error_401(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001040 host = urlparse(req.full_url)[1]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001041 retry = self.http_error_auth_reqed('www-authenticate',
1042 host, req, headers)
1043 self.reset_retry_count()
1044 return retry
1045
1046
1047class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
1048
1049 auth_header = 'Proxy-Authorization'
1050 handler_order = 490 # before Basic auth
1051
1052 def http_error_407(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001053 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001054 retry = self.http_error_auth_reqed('proxy-authenticate',
1055 host, req, headers)
1056 self.reset_retry_count()
1057 return retry
1058
1059class AbstractHTTPHandler(BaseHandler):
1060
1061 def __init__(self, debuglevel=0):
1062 self._debuglevel = debuglevel
1063
1064 def set_http_debuglevel(self, level):
1065 self._debuglevel = level
1066
1067 def do_request_(self, request):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001068 host = request.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001069 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001070 raise URLError('no host given')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001071
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001072 if request.data is not None: # POST
1073 data = request.data
Senthil Kumaran29333122011-02-11 11:25:47 +00001074 if isinstance(data, str):
1075 raise TypeError("POST data should be bytes"
1076 " or an iterable of bytes. It cannot be str.")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001077 if not request.has_header('Content-type'):
1078 request.add_unredirected_header(
1079 'Content-type',
1080 'application/x-www-form-urlencoded')
1081 if not request.has_header('Content-length'):
Senthil Kumaran7bc0d872010-12-19 10:49:52 +00001082 try:
1083 mv = memoryview(data)
1084 except TypeError:
Senthil Kumaran7bc0d872010-12-19 10:49:52 +00001085 if isinstance(data, collections.Iterable):
Georg Brandl61536042011-02-03 07:46:41 +00001086 raise ValueError("Content-Length should be specified "
1087 "for iterable data of type %r %r" % (type(data),
Senthil Kumaran7bc0d872010-12-19 10:49:52 +00001088 data))
1089 else:
1090 request.add_unredirected_header(
Senthil Kumaran1e991f22010-12-24 04:03:59 +00001091 'Content-length', '%d' % (len(mv) * mv.itemsize))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001092
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001093 sel_host = host
1094 if request.has_proxy():
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001095 scheme, sel = splittype(request.selector)
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001096 sel_host, sel_path = splithost(sel)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001097 if not request.has_header('Host'):
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001098 request.add_unredirected_header('Host', sel_host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001099 for name, value in self.parent.addheaders:
1100 name = name.capitalize()
1101 if not request.has_header(name):
1102 request.add_unredirected_header(name, value)
1103
1104 return request
1105
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001106 def do_open(self, http_class, req, **http_conn_args):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001107 """Return an HTTPResponse object for the request, using http_class.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001108
1109 http_class must implement the HTTPConnection API from http.client.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001110 """
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001111 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001112 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001113 raise URLError('no host given')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001114
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001115 # will parse host:port
1116 h = http_class(host, timeout=req.timeout, **http_conn_args)
Senthil Kumaran42ef4b12010-09-27 01:26:03 +00001117
1118 headers = dict(req.unredirected_hdrs)
1119 headers.update(dict((k, v) for k, v in req.headers.items()
1120 if k not in headers))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001121
1122 # TODO(jhylton): Should this be redesigned to handle
1123 # persistent connections?
1124
1125 # We want to make an HTTP/1.1 request, but the addinfourl
1126 # class isn't prepared to deal with a persistent connection.
1127 # It will try to read all remaining data from the socket,
1128 # which will block while the server waits for the next request.
1129 # So make sure the connection gets closed after the (only)
1130 # request.
1131 headers["Connection"] = "close"
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001132 headers = dict((name.title(), val) for name, val in headers.items())
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +00001133
1134 if req._tunnel_host:
Senthil Kumaran47fff872009-12-20 07:10:31 +00001135 tunnel_headers = {}
1136 proxy_auth_hdr = "Proxy-Authorization"
1137 if proxy_auth_hdr in headers:
1138 tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
1139 # Proxy-Authorization should not be sent to origin
1140 # server.
1141 del headers[proxy_auth_hdr]
1142 h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +00001143
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001144 try:
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001145 h.request(req.get_method(), req.selector, req.data, headers)
1146 r = h.getresponse() # an HTTPResponse instance
1147 except socket.error as err:
Georg Brandl13e89462008-07-01 19:56:00 +00001148 raise URLError(err)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001149
Senthil Kumaran26430412011-04-13 07:01:19 +08001150 r.url = req.get_full_url()
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001151 # This line replaces the .msg attribute of the HTTPResponse
1152 # with .headers, because urllib clients expect the response to
1153 # have the reason in .msg. It would be good to mark this
1154 # attribute is deprecated and get then to use info() or
1155 # .headers.
1156 r.msg = r.reason
1157 return r
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001158
1159
1160class HTTPHandler(AbstractHTTPHandler):
1161
1162 def http_open(self, req):
1163 return self.do_open(http.client.HTTPConnection, req)
1164
1165 http_request = AbstractHTTPHandler.do_request_
1166
1167if hasattr(http.client, 'HTTPSConnection'):
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001168 import ssl
1169
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001170 class HTTPSHandler(AbstractHTTPHandler):
1171
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001172 def __init__(self, debuglevel=0, context=None, check_hostname=None):
1173 AbstractHTTPHandler.__init__(self, debuglevel)
1174 self._context = context
1175 self._check_hostname = check_hostname
1176
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001177 def https_open(self, req):
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001178 return self.do_open(http.client.HTTPSConnection, req,
1179 context=self._context, check_hostname=self._check_hostname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001180
1181 https_request = AbstractHTTPHandler.do_request_
1182
1183class HTTPCookieProcessor(BaseHandler):
1184 def __init__(self, cookiejar=None):
1185 import http.cookiejar
1186 if cookiejar is None:
1187 cookiejar = http.cookiejar.CookieJar()
1188 self.cookiejar = cookiejar
1189
1190 def http_request(self, request):
1191 self.cookiejar.add_cookie_header(request)
1192 return request
1193
1194 def http_response(self, request, response):
1195 self.cookiejar.extract_cookies(response, request)
1196 return response
1197
1198 https_request = http_request
1199 https_response = http_response
1200
1201class UnknownHandler(BaseHandler):
1202 def unknown_open(self, req):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001203 type = req.type
Georg Brandl13e89462008-07-01 19:56:00 +00001204 raise URLError('unknown url type: %s' % type)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001205
1206def parse_keqv_list(l):
1207 """Parse list of key=value strings where keys are not duplicated."""
1208 parsed = {}
1209 for elt in l:
1210 k, v = elt.split('=', 1)
1211 if v[0] == '"' and v[-1] == '"':
1212 v = v[1:-1]
1213 parsed[k] = v
1214 return parsed
1215
1216def parse_http_list(s):
1217 """Parse lists as described by RFC 2068 Section 2.
1218
1219 In particular, parse comma-separated lists where the elements of
1220 the list may include quoted-strings. A quoted-string could
1221 contain a comma. A non-quoted string could have quotes in the
1222 middle. Neither commas nor quotes count if they are escaped.
1223 Only double-quotes count, not single-quotes.
1224 """
1225 res = []
1226 part = ''
1227
1228 escape = quote = False
1229 for cur in s:
1230 if escape:
1231 part += cur
1232 escape = False
1233 continue
1234 if quote:
1235 if cur == '\\':
1236 escape = True
1237 continue
1238 elif cur == '"':
1239 quote = False
1240 part += cur
1241 continue
1242
1243 if cur == ',':
1244 res.append(part)
1245 part = ''
1246 continue
1247
1248 if cur == '"':
1249 quote = True
1250
1251 part += cur
1252
1253 # append last part
1254 if part:
1255 res.append(part)
1256
1257 return [part.strip() for part in res]
1258
1259class FileHandler(BaseHandler):
1260 # Use local file or FTP depending on form of URL
1261 def file_open(self, req):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001262 url = req.selector
Senthil Kumaran2ef16322010-07-11 03:12:43 +00001263 if url[:2] == '//' and url[2:3] != '/' and (req.host and
1264 req.host != 'localhost'):
Senthil Kumaran383c32d2010-10-14 11:57:35 +00001265 if not req.host is self.get_names():
1266 raise URLError("file:// scheme is supported only on localhost")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001267 else:
1268 return self.open_local_file(req)
1269
1270 # names for the localhost
1271 names = None
1272 def get_names(self):
1273 if FileHandler.names is None:
1274 try:
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00001275 FileHandler.names = tuple(
1276 socket.gethostbyname_ex('localhost')[2] +
1277 socket.gethostbyname_ex(socket.gethostname())[2])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001278 except socket.gaierror:
1279 FileHandler.names = (socket.gethostbyname('localhost'),)
1280 return FileHandler.names
1281
1282 # not entirely sure what the rules are here
1283 def open_local_file(self, req):
1284 import email.utils
1285 import mimetypes
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001286 host = req.host
Senthil Kumaran06f5a532010-05-08 05:12:05 +00001287 filename = req.selector
1288 localfile = url2pathname(filename)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001289 try:
1290 stats = os.stat(localfile)
1291 size = stats.st_size
1292 modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
Senthil Kumaran06f5a532010-05-08 05:12:05 +00001293 mtype = mimetypes.guess_type(filename)[0]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001294 headers = email.message_from_string(
1295 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
1296 (mtype or 'text/plain', size, modified))
1297 if host:
Georg Brandl13e89462008-07-01 19:56:00 +00001298 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001299 if not host or \
1300 (not port and _safe_gethostbyname(host) in self.get_names()):
Senthil Kumaran06f5a532010-05-08 05:12:05 +00001301 if host:
1302 origurl = 'file://' + host + filename
1303 else:
1304 origurl = 'file://' + filename
1305 return addinfourl(open(localfile, 'rb'), headers, origurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001306 except OSError as msg:
Georg Brandl029986a2008-06-23 11:44:14 +00001307 # users shouldn't expect OSErrors coming from urlopen()
Georg Brandl13e89462008-07-01 19:56:00 +00001308 raise URLError(msg)
1309 raise URLError('file not on local host')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001310
1311def _safe_gethostbyname(host):
1312 try:
1313 return socket.gethostbyname(host)
1314 except socket.gaierror:
1315 return None
1316
1317class FTPHandler(BaseHandler):
1318 def ftp_open(self, req):
1319 import ftplib
1320 import mimetypes
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001321 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001322 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001323 raise URLError('ftp error: no host given')
1324 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001325 if port is None:
1326 port = ftplib.FTP_PORT
1327 else:
1328 port = int(port)
1329
1330 # username/password handling
Georg Brandl13e89462008-07-01 19:56:00 +00001331 user, host = splituser(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001332 if user:
Georg Brandl13e89462008-07-01 19:56:00 +00001333 user, passwd = splitpasswd(user)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001334 else:
1335 passwd = None
Georg Brandl13e89462008-07-01 19:56:00 +00001336 host = unquote(host)
Senthil Kumarandaa29d02010-11-18 15:36:41 +00001337 user = user or ''
1338 passwd = passwd or ''
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001339
1340 try:
1341 host = socket.gethostbyname(host)
1342 except socket.error as msg:
Georg Brandl13e89462008-07-01 19:56:00 +00001343 raise URLError(msg)
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001344 path, attrs = splitattr(req.selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001345 dirs = path.split('/')
Georg Brandl13e89462008-07-01 19:56:00 +00001346 dirs = list(map(unquote, dirs))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001347 dirs, file = dirs[:-1], dirs[-1]
1348 if dirs and not dirs[0]:
1349 dirs = dirs[1:]
1350 try:
1351 fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
1352 type = file and 'I' or 'D'
1353 for attr in attrs:
Georg Brandl13e89462008-07-01 19:56:00 +00001354 attr, value = splitvalue(attr)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001355 if attr.lower() == 'type' and \
1356 value in ('a', 'A', 'i', 'I', 'd', 'D'):
1357 type = value.upper()
1358 fp, retrlen = fw.retrfile(file, type)
1359 headers = ""
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001360 mtype = mimetypes.guess_type(req.full_url)[0]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001361 if mtype:
1362 headers += "Content-type: %s\n" % mtype
1363 if retrlen is not None and retrlen >= 0:
1364 headers += "Content-length: %d\n" % retrlen
1365 headers = email.message_from_string(headers)
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001366 return addinfourl(fp, headers, req.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001367 except ftplib.all_errors as msg:
Georg Brandl13e89462008-07-01 19:56:00 +00001368 exc = URLError('ftp error: %s' % msg)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001369 raise exc.with_traceback(sys.exc_info()[2])
1370
1371 def connect_ftp(self, user, passwd, host, port, dirs, timeout):
1372 fw = ftpwrapper(user, passwd, host, port, dirs, timeout)
1373 return fw
1374
1375class CacheFTPHandler(FTPHandler):
1376 # XXX would be nice to have pluggable cache strategies
1377 # XXX this stuff is definitely not thread safe
1378 def __init__(self):
1379 self.cache = {}
1380 self.timeout = {}
1381 self.soonest = 0
1382 self.delay = 60
1383 self.max_conns = 16
1384
1385 def setTimeout(self, t):
1386 self.delay = t
1387
1388 def setMaxConns(self, m):
1389 self.max_conns = m
1390
1391 def connect_ftp(self, user, passwd, host, port, dirs, timeout):
1392 key = user, host, port, '/'.join(dirs), timeout
1393 if key in self.cache:
1394 self.timeout[key] = time.time() + self.delay
1395 else:
1396 self.cache[key] = ftpwrapper(user, passwd, host, port,
1397 dirs, timeout)
1398 self.timeout[key] = time.time() + self.delay
1399 self.check_cache()
1400 return self.cache[key]
1401
1402 def check_cache(self):
1403 # first check for old ones
1404 t = time.time()
1405 if self.soonest <= t:
1406 for k, v in list(self.timeout.items()):
1407 if v < t:
1408 self.cache[k].close()
1409 del self.cache[k]
1410 del self.timeout[k]
1411 self.soonest = min(list(self.timeout.values()))
1412
1413 # then check the size
1414 if len(self.cache) == self.max_conns:
1415 for k, v in list(self.timeout.items()):
1416 if v == self.soonest:
1417 del self.cache[k]
1418 del self.timeout[k]
1419 break
1420 self.soonest = min(list(self.timeout.values()))
1421
1422# Code move from the old urllib module
1423
1424MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
1425
1426# Helper for non-unix systems
Ronald Oussoren94f25282010-05-05 19:11:21 +00001427if os.name == 'nt':
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001428 from nturl2path import url2pathname, pathname2url
1429else:
1430 def url2pathname(pathname):
1431 """OS-specific conversion from a relative URL of the 'file' scheme
1432 to a file system path; not recommended for general use."""
Georg Brandl13e89462008-07-01 19:56:00 +00001433 return unquote(pathname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001434
1435 def pathname2url(pathname):
1436 """OS-specific conversion from a file system path to a relative URL
1437 of the 'file' scheme; not recommended for general use."""
Georg Brandl13e89462008-07-01 19:56:00 +00001438 return quote(pathname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001439
1440# This really consists of two pieces:
1441# (1) a class which handles opening of all sorts of URLs
1442# (plus assorted utilities etc.)
1443# (2) a set of functions for parsing URLs
1444# XXX Should these be separated out into different modules?
1445
1446
1447ftpcache = {}
1448class URLopener:
1449 """Class to open URLs.
1450 This is a class rather than just a subroutine because we may need
1451 more than one set of global protocol-specific options.
1452 Note -- this is a base class for those who don't want the
1453 automatic handling of errors type 302 (relocated) and 401
1454 (authorization needed)."""
1455
1456 __tempfiles = None
1457
1458 version = "Python-urllib/%s" % __version__
1459
1460 # Constructor
1461 def __init__(self, proxies=None, **x509):
1462 if proxies is None:
1463 proxies = getproxies()
1464 assert hasattr(proxies, 'keys'), "proxies must be a mapping"
1465 self.proxies = proxies
1466 self.key_file = x509.get('key_file')
1467 self.cert_file = x509.get('cert_file')
1468 self.addheaders = [('User-Agent', self.version)]
1469 self.__tempfiles = []
1470 self.__unlink = os.unlink # See cleanup()
1471 self.tempcache = None
1472 # Undocumented feature: if you assign {} to tempcache,
1473 # it is used to cache files retrieved with
1474 # self.retrieve(). This is not enabled by default
1475 # since it does not work for changing documents (and I
1476 # haven't got the logic to check expiration headers
1477 # yet).
1478 self.ftpcache = ftpcache
1479 # Undocumented feature: you can use a different
1480 # ftp cache by assigning to the .ftpcache member;
1481 # in case you want logically independent URL openers
1482 # XXX This is not threadsafe. Bah.
1483
1484 def __del__(self):
1485 self.close()
1486
1487 def close(self):
1488 self.cleanup()
1489
1490 def cleanup(self):
1491 # This code sometimes runs when the rest of this module
1492 # has already been deleted, so it can't use any globals
1493 # or import anything.
1494 if self.__tempfiles:
1495 for file in self.__tempfiles:
1496 try:
1497 self.__unlink(file)
1498 except OSError:
1499 pass
1500 del self.__tempfiles[:]
1501 if self.tempcache:
1502 self.tempcache.clear()
1503
1504 def addheader(self, *args):
1505 """Add a header to be used by the HTTP interface only
1506 e.g. u.addheader('Accept', 'sound/basic')"""
1507 self.addheaders.append(args)
1508
1509 # External interface
1510 def open(self, fullurl, data=None):
1511 """Use URLopener().open(file) instead of open(file, 'r')."""
Georg Brandl13e89462008-07-01 19:56:00 +00001512 fullurl = unwrap(to_bytes(fullurl))
Senthil Kumaran734f0592010-02-20 22:19:04 +00001513 fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001514 if self.tempcache and fullurl in self.tempcache:
1515 filename, headers = self.tempcache[fullurl]
1516 fp = open(filename, 'rb')
Georg Brandl13e89462008-07-01 19:56:00 +00001517 return addinfourl(fp, headers, fullurl)
1518 urltype, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001519 if not urltype:
1520 urltype = 'file'
1521 if urltype in self.proxies:
1522 proxy = self.proxies[urltype]
Georg Brandl13e89462008-07-01 19:56:00 +00001523 urltype, proxyhost = splittype(proxy)
1524 host, selector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001525 url = (host, fullurl) # Signal special case to open_*()
1526 else:
1527 proxy = None
1528 name = 'open_' + urltype
1529 self.type = urltype
1530 name = name.replace('-', '_')
1531 if not hasattr(self, name):
1532 if proxy:
1533 return self.open_unknown_proxy(proxy, fullurl, data)
1534 else:
1535 return self.open_unknown(fullurl, data)
1536 try:
1537 if data is None:
1538 return getattr(self, name)(url)
1539 else:
1540 return getattr(self, name)(url, data)
1541 except socket.error as msg:
1542 raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
1543
1544 def open_unknown(self, fullurl, data=None):
1545 """Overridable interface to open unknown URL type."""
Georg Brandl13e89462008-07-01 19:56:00 +00001546 type, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001547 raise IOError('url error', 'unknown url type', type)
1548
1549 def open_unknown_proxy(self, proxy, fullurl, data=None):
1550 """Overridable interface to open unknown URL type."""
Georg Brandl13e89462008-07-01 19:56:00 +00001551 type, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001552 raise IOError('url error', 'invalid proxy for %s' % type, proxy)
1553
1554 # External interface
1555 def retrieve(self, url, filename=None, reporthook=None, data=None):
1556 """retrieve(url) returns (filename, headers) for a local object
1557 or (tempfilename, headers) for a remote object."""
Georg Brandl13e89462008-07-01 19:56:00 +00001558 url = unwrap(to_bytes(url))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001559 if self.tempcache and url in self.tempcache:
1560 return self.tempcache[url]
Georg Brandl13e89462008-07-01 19:56:00 +00001561 type, url1 = splittype(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001562 if filename is None and (not type or type == 'file'):
1563 try:
1564 fp = self.open_local_file(url1)
1565 hdrs = fp.info()
Philip Jenveycb134d72009-12-03 02:45:01 +00001566 fp.close()
Georg Brandl13e89462008-07-01 19:56:00 +00001567 return url2pathname(splithost(url1)[1]), hdrs
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001568 except IOError as msg:
1569 pass
1570 fp = self.open(url, data)
Benjamin Peterson5f28b7b2009-03-26 21:49:58 +00001571 try:
1572 headers = fp.info()
1573 if filename:
1574 tfp = open(filename, 'wb')
1575 else:
1576 import tempfile
1577 garbage, path = splittype(url)
1578 garbage, path = splithost(path or "")
1579 path, garbage = splitquery(path or "")
1580 path, garbage = splitattr(path or "")
1581 suffix = os.path.splitext(path)[1]
1582 (fd, filename) = tempfile.mkstemp(suffix)
1583 self.__tempfiles.append(filename)
1584 tfp = os.fdopen(fd, 'wb')
1585 try:
1586 result = filename, headers
1587 if self.tempcache is not None:
1588 self.tempcache[url] = result
1589 bs = 1024*8
1590 size = -1
1591 read = 0
1592 blocknum = 0
1593 if reporthook:
1594 if "content-length" in headers:
1595 size = int(headers["Content-Length"])
1596 reporthook(blocknum, bs, size)
1597 while 1:
1598 block = fp.read(bs)
1599 if not block:
1600 break
1601 read += len(block)
1602 tfp.write(block)
1603 blocknum += 1
1604 if reporthook:
1605 reporthook(blocknum, bs, size)
1606 finally:
1607 tfp.close()
1608 finally:
1609 fp.close()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001610
1611 # raise exception if actual size does not match content-length header
1612 if size >= 0 and read < size:
Georg Brandl13e89462008-07-01 19:56:00 +00001613 raise ContentTooShortError(
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001614 "retrieval incomplete: got only %i out of %i bytes"
1615 % (read, size), result)
1616
1617 return result
1618
1619 # Each method named open_<type> knows how to open that type of URL
1620
1621 def _open_generic_http(self, connection_factory, url, data):
1622 """Make an HTTP connection using connection_class.
1623
1624 This is an internal method that should be called from
1625 open_http() or open_https().
1626
1627 Arguments:
1628 - connection_factory should take a host name and return an
1629 HTTPConnection instance.
1630 - url is the url to retrieval or a host, relative-path pair.
1631 - data is payload for a POST request or None.
1632 """
1633
1634 user_passwd = None
1635 proxy_passwd= None
1636 if isinstance(url, str):
Georg Brandl13e89462008-07-01 19:56:00 +00001637 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001638 if host:
Georg Brandl13e89462008-07-01 19:56:00 +00001639 user_passwd, host = splituser(host)
1640 host = unquote(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001641 realhost = host
1642 else:
1643 host, selector = url
1644 # check whether the proxy contains authorization information
Georg Brandl13e89462008-07-01 19:56:00 +00001645 proxy_passwd, host = splituser(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001646 # now we proceed with the url we want to obtain
Georg Brandl13e89462008-07-01 19:56:00 +00001647 urltype, rest = splittype(selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001648 url = rest
1649 user_passwd = None
1650 if urltype.lower() != 'http':
1651 realhost = None
1652 else:
Georg Brandl13e89462008-07-01 19:56:00 +00001653 realhost, rest = splithost(rest)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001654 if realhost:
Georg Brandl13e89462008-07-01 19:56:00 +00001655 user_passwd, realhost = splituser(realhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001656 if user_passwd:
1657 selector = "%s://%s%s" % (urltype, realhost, rest)
1658 if proxy_bypass(realhost):
1659 host = realhost
1660
1661 #print "proxy via http:", host, selector
1662 if not host: raise IOError('http error', 'no host given')
1663
1664 if proxy_passwd:
1665 import base64
Senthil Kumaran5626eec2010-08-04 17:46:23 +00001666 proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001667 else:
1668 proxy_auth = None
1669
1670 if user_passwd:
1671 import base64
Senthil Kumaran5626eec2010-08-04 17:46:23 +00001672 auth = base64.b64encode(user_passwd.encode()).decode('ascii')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001673 else:
1674 auth = None
1675 http_conn = connection_factory(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001676 headers = {}
1677 if proxy_auth:
1678 headers["Proxy-Authorization"] = "Basic %s" % proxy_auth
1679 if auth:
1680 headers["Authorization"] = "Basic %s" % auth
1681 if realhost:
1682 headers["Host"] = realhost
Senthil Kumarand91ffca2011-03-19 17:25:27 +08001683
1684 # Add Connection:close as we don't support persistent connections yet.
1685 # This helps in closing the socket and avoiding ResourceWarning
1686
1687 headers["Connection"] = "close"
1688
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001689 for header, value in self.addheaders:
1690 headers[header] = value
1691
1692 if data is not None:
1693 headers["Content-Type"] = "application/x-www-form-urlencoded"
1694 http_conn.request("POST", selector, data, headers)
1695 else:
1696 http_conn.request("GET", selector, headers=headers)
1697
1698 try:
1699 response = http_conn.getresponse()
1700 except http.client.BadStatusLine:
1701 # something went wrong with the HTTP status line
Georg Brandl13e89462008-07-01 19:56:00 +00001702 raise URLError("http protocol error: bad status line")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001703
1704 # According to RFC 2616, "2xx" code indicates that the client's
1705 # request was successfully received, understood, and accepted.
1706 if 200 <= response.status < 300:
Antoine Pitroub353c122009-02-11 00:39:14 +00001707 return addinfourl(response, response.msg, "http:" + url,
Georg Brandl13e89462008-07-01 19:56:00 +00001708 response.status)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001709 else:
1710 return self.http_error(
1711 url, response.fp,
1712 response.status, response.reason, response.msg, data)
1713
1714 def open_http(self, url, data=None):
1715 """Use HTTP protocol."""
1716 return self._open_generic_http(http.client.HTTPConnection, url, data)
1717
1718 def http_error(self, url, fp, errcode, errmsg, headers, data=None):
1719 """Handle http errors.
1720
1721 Derived class can override this, or provide specific handlers
1722 named http_error_DDD where DDD is the 3-digit error code."""
1723 # First check if there's a specific handler for this error
1724 name = 'http_error_%d' % errcode
1725 if hasattr(self, name):
1726 method = getattr(self, name)
1727 if data is None:
1728 result = method(url, fp, errcode, errmsg, headers)
1729 else:
1730 result = method(url, fp, errcode, errmsg, headers, data)
1731 if result: return result
1732 return self.http_error_default(url, fp, errcode, errmsg, headers)
1733
1734 def http_error_default(self, url, fp, errcode, errmsg, headers):
1735 """Default error handler: close the connection and raise IOError."""
1736 void = fp.read()
1737 fp.close()
Georg Brandl13e89462008-07-01 19:56:00 +00001738 raise HTTPError(url, errcode, errmsg, headers, None)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001739
1740 if _have_ssl:
1741 def _https_connection(self, host):
1742 return http.client.HTTPSConnection(host,
1743 key_file=self.key_file,
1744 cert_file=self.cert_file)
1745
1746 def open_https(self, url, data=None):
1747 """Use HTTPS protocol."""
1748 return self._open_generic_http(self._https_connection, url, data)
1749
1750 def open_file(self, url):
1751 """Use local file or FTP depending on form of URL."""
1752 if not isinstance(url, str):
1753 raise URLError('file error', 'proxy support for file protocol currently not implemented')
1754 if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
Senthil Kumaran383c32d2010-10-14 11:57:35 +00001755 raise ValueError("file:// scheme is supported only on localhost")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001756 else:
1757 return self.open_local_file(url)
1758
1759 def open_local_file(self, url):
1760 """Use local file."""
1761 import mimetypes, email.utils
1762 from io import StringIO
Georg Brandl13e89462008-07-01 19:56:00 +00001763 host, file = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001764 localname = url2pathname(file)
1765 try:
1766 stats = os.stat(localname)
1767 except OSError as e:
1768 raise URLError(e.errno, e.strerror, e.filename)
1769 size = stats.st_size
1770 modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
1771 mtype = mimetypes.guess_type(url)[0]
1772 headers = email.message_from_string(
1773 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
1774 (mtype or 'text/plain', size, modified))
1775 if not host:
1776 urlfile = file
1777 if file[:1] == '/':
1778 urlfile = 'file://' + file
Georg Brandl13e89462008-07-01 19:56:00 +00001779 return addinfourl(open(localname, 'rb'), headers, urlfile)
1780 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001781 if (not port
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00001782 and socket.gethostbyname(host) in (localhost() + thishost())):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001783 urlfile = file
1784 if file[:1] == '/':
1785 urlfile = 'file://' + file
Georg Brandl13e89462008-07-01 19:56:00 +00001786 return addinfourl(open(localname, 'rb'), headers, urlfile)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001787 raise URLError('local file error', 'not on local host')
1788
1789 def open_ftp(self, url):
1790 """Use FTP protocol."""
1791 if not isinstance(url, str):
1792 raise URLError('ftp error', 'proxy support for ftp protocol currently not implemented')
1793 import mimetypes
1794 from io import StringIO
Georg Brandl13e89462008-07-01 19:56:00 +00001795 host, path = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001796 if not host: raise URLError('ftp error', 'no host given')
Georg Brandl13e89462008-07-01 19:56:00 +00001797 host, port = splitport(host)
1798 user, host = splituser(host)
1799 if user: user, passwd = splitpasswd(user)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001800 else: passwd = None
Georg Brandl13e89462008-07-01 19:56:00 +00001801 host = unquote(host)
1802 user = unquote(user or '')
1803 passwd = unquote(passwd or '')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001804 host = socket.gethostbyname(host)
1805 if not port:
1806 import ftplib
1807 port = ftplib.FTP_PORT
1808 else:
1809 port = int(port)
Georg Brandl13e89462008-07-01 19:56:00 +00001810 path, attrs = splitattr(path)
1811 path = unquote(path)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001812 dirs = path.split('/')
1813 dirs, file = dirs[:-1], dirs[-1]
1814 if dirs and not dirs[0]: dirs = dirs[1:]
1815 if dirs and not dirs[0]: dirs[0] = '/'
1816 key = user, host, port, '/'.join(dirs)
1817 # XXX thread unsafe!
1818 if len(self.ftpcache) > MAXFTPCACHE:
1819 # Prune the cache, rather arbitrarily
1820 for k in self.ftpcache.keys():
1821 if k != key:
1822 v = self.ftpcache[k]
1823 del self.ftpcache[k]
1824 v.close()
1825 try:
1826 if not key in self.ftpcache:
1827 self.ftpcache[key] = \
1828 ftpwrapper(user, passwd, host, port, dirs)
1829 if not file: type = 'D'
1830 else: type = 'I'
1831 for attr in attrs:
Georg Brandl13e89462008-07-01 19:56:00 +00001832 attr, value = splitvalue(attr)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001833 if attr.lower() == 'type' and \
1834 value in ('a', 'A', 'i', 'I', 'd', 'D'):
1835 type = value.upper()
1836 (fp, retrlen) = self.ftpcache[key].retrfile(file, type)
1837 mtype = mimetypes.guess_type("ftp:" + url)[0]
1838 headers = ""
1839 if mtype:
1840 headers += "Content-Type: %s\n" % mtype
1841 if retrlen is not None and retrlen >= 0:
1842 headers += "Content-Length: %d\n" % retrlen
1843 headers = email.message_from_string(headers)
Georg Brandl13e89462008-07-01 19:56:00 +00001844 return addinfourl(fp, headers, "ftp:" + url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001845 except ftperrors() as msg:
1846 raise URLError('ftp error', msg).with_traceback(sys.exc_info()[2])
1847
1848 def open_data(self, url, data=None):
1849 """Use "data" URL."""
1850 if not isinstance(url, str):
1851 raise URLError('data error', 'proxy support for data protocol currently not implemented')
1852 # ignore POSTed data
1853 #
1854 # syntax of data URLs:
1855 # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
1856 # mediatype := [ type "/" subtype ] *( ";" parameter )
1857 # data := *urlchar
1858 # parameter := attribute "=" value
1859 try:
1860 [type, data] = url.split(',', 1)
1861 except ValueError:
1862 raise IOError('data error', 'bad data URL')
1863 if not type:
1864 type = 'text/plain;charset=US-ASCII'
1865 semi = type.rfind(';')
1866 if semi >= 0 and '=' not in type[semi:]:
1867 encoding = type[semi+1:]
1868 type = type[:semi]
1869 else:
1870 encoding = ''
1871 msg = []
Senthil Kumaranf6c456d2010-05-01 08:29:18 +00001872 msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001873 time.gmtime(time.time())))
1874 msg.append('Content-type: %s' % type)
1875 if encoding == 'base64':
1876 import base64
Georg Brandl706824f2009-06-04 09:42:55 +00001877 # XXX is this encoding/decoding ok?
Marc-André Lemburg8f36af72011-02-25 15:42:01 +00001878 data = base64.decodebytes(data.encode('ascii')).decode('latin-1')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001879 else:
Georg Brandl13e89462008-07-01 19:56:00 +00001880 data = unquote(data)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001881 msg.append('Content-Length: %d' % len(data))
1882 msg.append('')
1883 msg.append(data)
1884 msg = '\n'.join(msg)
Georg Brandl13e89462008-07-01 19:56:00 +00001885 headers = email.message_from_string(msg)
1886 f = io.StringIO(msg)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001887 #f.fileno = None # needed for addinfourl
Georg Brandl13e89462008-07-01 19:56:00 +00001888 return addinfourl(f, headers, url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001889
1890
1891class FancyURLopener(URLopener):
1892 """Derived class with handlers for errors we can handle (perhaps)."""
1893
1894 def __init__(self, *args, **kwargs):
1895 URLopener.__init__(self, *args, **kwargs)
1896 self.auth_cache = {}
1897 self.tries = 0
1898 self.maxtries = 10
1899
1900 def http_error_default(self, url, fp, errcode, errmsg, headers):
1901 """Default error handling -- don't raise an exception."""
Georg Brandl13e89462008-07-01 19:56:00 +00001902 return addinfourl(fp, headers, "http:" + url, errcode)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001903
1904 def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
1905 """Error 302 -- relocated (temporarily)."""
1906 self.tries += 1
1907 if self.maxtries and self.tries >= self.maxtries:
1908 if hasattr(self, "http_error_500"):
1909 meth = self.http_error_500
1910 else:
1911 meth = self.http_error_default
1912 self.tries = 0
1913 return meth(url, fp, 500,
1914 "Internal Server Error: Redirect Recursion", headers)
1915 result = self.redirect_internal(url, fp, errcode, errmsg, headers,
1916 data)
1917 self.tries = 0
1918 return result
1919
1920 def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
1921 if 'location' in headers:
1922 newurl = headers['location']
1923 elif 'uri' in headers:
1924 newurl = headers['uri']
1925 else:
1926 return
1927 void = fp.read()
1928 fp.close()
guido@google.coma119df92011-03-29 11:41:02 -07001929
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001930 # In case the server sent a relative URL, join with original:
Georg Brandl13e89462008-07-01 19:56:00 +00001931 newurl = urljoin(self.type + ":" + url, newurl)
guido@google.coma119df92011-03-29 11:41:02 -07001932
1933 urlparts = urlparse(newurl)
1934
1935 # For security reasons, we don't allow redirection to anything other
1936 # than http, https and ftp.
1937
1938 # We are using newer HTTPError with older redirect_internal method
1939 # This older method will get deprecated in 3.3
1940
1941 if not urlparts.scheme in ('http', 'https', 'ftp'):
1942 raise HTTPError(newurl, errcode,
1943 errmsg +
1944 " Redirection to url '%s' is not allowed." % newurl,
1945 headers, fp)
1946
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001947 return self.open(newurl)
1948
1949 def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
1950 """Error 301 -- also relocated (permanently)."""
1951 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1952
1953 def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
1954 """Error 303 -- also relocated (essentially identical to 302)."""
1955 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1956
1957 def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
1958 """Error 307 -- relocated, but turn POST into error."""
1959 if data is None:
1960 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1961 else:
1962 return self.http_error_default(url, fp, errcode, errmsg, headers)
1963
Senthil Kumaran80f1b052010-06-18 15:08:18 +00001964 def http_error_401(self, url, fp, errcode, errmsg, headers, data=None,
1965 retry=False):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001966 """Error 401 -- authentication required.
1967 This function supports Basic authentication only."""
1968 if not 'www-authenticate' in headers:
1969 URLopener.http_error_default(self, url, fp,
1970 errcode, errmsg, headers)
1971 stuff = headers['www-authenticate']
1972 import re
1973 match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
1974 if not match:
1975 URLopener.http_error_default(self, url, fp,
1976 errcode, errmsg, headers)
1977 scheme, realm = match.groups()
1978 if scheme.lower() != 'basic':
1979 URLopener.http_error_default(self, url, fp,
1980 errcode, errmsg, headers)
Senthil Kumaran80f1b052010-06-18 15:08:18 +00001981 if not retry:
1982 URLopener.http_error_default(self, url, fp, errcode, errmsg,
1983 headers)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001984 name = 'retry_' + self.type + '_basic_auth'
1985 if data is None:
1986 return getattr(self,name)(url, realm)
1987 else:
1988 return getattr(self,name)(url, realm, data)
1989
Senthil Kumaran80f1b052010-06-18 15:08:18 +00001990 def http_error_407(self, url, fp, errcode, errmsg, headers, data=None,
1991 retry=False):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001992 """Error 407 -- proxy authentication required.
1993 This function supports Basic authentication only."""
1994 if not 'proxy-authenticate' in headers:
1995 URLopener.http_error_default(self, url, fp,
1996 errcode, errmsg, headers)
1997 stuff = headers['proxy-authenticate']
1998 import re
1999 match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
2000 if not match:
2001 URLopener.http_error_default(self, url, fp,
2002 errcode, errmsg, headers)
2003 scheme, realm = match.groups()
2004 if scheme.lower() != 'basic':
2005 URLopener.http_error_default(self, url, fp,
2006 errcode, errmsg, headers)
Senthil Kumaran80f1b052010-06-18 15:08:18 +00002007 if not retry:
2008 URLopener.http_error_default(self, url, fp, errcode, errmsg,
2009 headers)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002010 name = 'retry_proxy_' + self.type + '_basic_auth'
2011 if data is None:
2012 return getattr(self,name)(url, realm)
2013 else:
2014 return getattr(self,name)(url, realm, data)
2015
2016 def retry_proxy_http_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002017 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002018 newurl = 'http://' + host + selector
2019 proxy = self.proxies['http']
Georg Brandl13e89462008-07-01 19:56:00 +00002020 urltype, proxyhost = splittype(proxy)
2021 proxyhost, proxyselector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002022 i = proxyhost.find('@') + 1
2023 proxyhost = proxyhost[i:]
2024 user, passwd = self.get_user_passwd(proxyhost, realm, i)
2025 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002026 proxyhost = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002027 quote(passwd, safe=''), proxyhost)
2028 self.proxies['http'] = 'http://' + proxyhost + proxyselector
2029 if data is None:
2030 return self.open(newurl)
2031 else:
2032 return self.open(newurl, data)
2033
2034 def retry_proxy_https_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002035 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002036 newurl = 'https://' + host + selector
2037 proxy = self.proxies['https']
Georg Brandl13e89462008-07-01 19:56:00 +00002038 urltype, proxyhost = splittype(proxy)
2039 proxyhost, proxyselector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002040 i = proxyhost.find('@') + 1
2041 proxyhost = proxyhost[i:]
2042 user, passwd = self.get_user_passwd(proxyhost, realm, i)
2043 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002044 proxyhost = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002045 quote(passwd, safe=''), proxyhost)
2046 self.proxies['https'] = 'https://' + proxyhost + proxyselector
2047 if data is None:
2048 return self.open(newurl)
2049 else:
2050 return self.open(newurl, data)
2051
2052 def retry_http_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002053 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002054 i = host.find('@') + 1
2055 host = host[i:]
2056 user, passwd = self.get_user_passwd(host, realm, i)
2057 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002058 host = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002059 quote(passwd, safe=''), host)
2060 newurl = 'http://' + host + selector
2061 if data is None:
2062 return self.open(newurl)
2063 else:
2064 return self.open(newurl, data)
2065
2066 def retry_https_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002067 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002068 i = host.find('@') + 1
2069 host = host[i:]
2070 user, passwd = self.get_user_passwd(host, realm, i)
2071 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002072 host = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002073 quote(passwd, safe=''), host)
2074 newurl = 'https://' + host + selector
2075 if data is None:
2076 return self.open(newurl)
2077 else:
2078 return self.open(newurl, data)
2079
Florent Xicluna757445b2010-05-17 17:24:07 +00002080 def get_user_passwd(self, host, realm, clear_cache=0):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002081 key = realm + '@' + host.lower()
2082 if key in self.auth_cache:
2083 if clear_cache:
2084 del self.auth_cache[key]
2085 else:
2086 return self.auth_cache[key]
2087 user, passwd = self.prompt_user_passwd(host, realm)
2088 if user or passwd: self.auth_cache[key] = (user, passwd)
2089 return user, passwd
2090
2091 def prompt_user_passwd(self, host, realm):
2092 """Override this in a GUI environment!"""
2093 import getpass
2094 try:
2095 user = input("Enter username for %s at %s: " % (realm, host))
2096 passwd = getpass.getpass("Enter password for %s in %s at %s: " %
2097 (user, realm, host))
2098 return user, passwd
2099 except KeyboardInterrupt:
2100 print()
2101 return None, None
2102
2103
2104# Utility functions
2105
2106_localhost = None
2107def localhost():
2108 """Return the IP address of the magic hostname 'localhost'."""
2109 global _localhost
2110 if _localhost is None:
2111 _localhost = socket.gethostbyname('localhost')
2112 return _localhost
2113
2114_thishost = None
2115def thishost():
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00002116 """Return the IP addresses of the current host."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002117 global _thishost
2118 if _thishost is None:
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00002119 _thishost = tuple(socket.gethostbyname_ex(socket.gethostname()[2]))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002120 return _thishost
2121
2122_ftperrors = None
2123def ftperrors():
2124 """Return the set of errors raised by the FTP class."""
2125 global _ftperrors
2126 if _ftperrors is None:
2127 import ftplib
2128 _ftperrors = ftplib.all_errors
2129 return _ftperrors
2130
2131_noheaders = None
2132def noheaders():
Georg Brandl13e89462008-07-01 19:56:00 +00002133 """Return an empty email Message object."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002134 global _noheaders
2135 if _noheaders is None:
Georg Brandl13e89462008-07-01 19:56:00 +00002136 _noheaders = email.message_from_string("")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002137 return _noheaders
2138
2139
2140# Utility classes
2141
2142class ftpwrapper:
2143 """Class used by open_ftp() for cache of open FTP connections."""
2144
2145 def __init__(self, user, passwd, host, port, dirs, timeout=None):
2146 self.user = user
2147 self.passwd = passwd
2148 self.host = host
2149 self.port = port
2150 self.dirs = dirs
2151 self.timeout = timeout
2152 self.init()
2153
2154 def init(self):
2155 import ftplib
2156 self.busy = 0
2157 self.ftp = ftplib.FTP()
2158 self.ftp.connect(self.host, self.port, self.timeout)
2159 self.ftp.login(self.user, self.passwd)
2160 for dir in self.dirs:
2161 self.ftp.cwd(dir)
2162
2163 def retrfile(self, file, type):
2164 import ftplib
2165 self.endtransfer()
2166 if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
2167 else: cmd = 'TYPE ' + type; isdir = 0
2168 try:
2169 self.ftp.voidcmd(cmd)
2170 except ftplib.all_errors:
2171 self.init()
2172 self.ftp.voidcmd(cmd)
2173 conn = None
2174 if file and not isdir:
2175 # Try to retrieve as a file
2176 try:
2177 cmd = 'RETR ' + file
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002178 conn, retrlen = self.ftp.ntransfercmd(cmd)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002179 except ftplib.error_perm as reason:
2180 if str(reason)[:3] != '550':
Georg Brandl13e89462008-07-01 19:56:00 +00002181 raise URLError('ftp error', reason).with_traceback(
2182 sys.exc_info()[2])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002183 if not conn:
2184 # Set transfer mode to ASCII!
2185 self.ftp.voidcmd('TYPE A')
2186 # Try a directory listing. Verify that directory exists.
2187 if file:
2188 pwd = self.ftp.pwd()
2189 try:
2190 try:
2191 self.ftp.cwd(file)
2192 except ftplib.error_perm as reason:
Georg Brandl13e89462008-07-01 19:56:00 +00002193 raise URLError('ftp error', reason) from reason
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002194 finally:
2195 self.ftp.cwd(pwd)
2196 cmd = 'LIST ' + file
2197 else:
2198 cmd = 'LIST'
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002199 conn, retrlen = self.ftp.ntransfercmd(cmd)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002200 self.busy = 1
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002201
2202 ftpobj = addclosehook(conn.makefile('rb'), self.endtransfer)
2203 conn.close()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002204 # Pass back both a suitably decorated object and a retrieval length
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002205 return (ftpobj, retrlen)
2206
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002207 def endtransfer(self):
2208 if not self.busy:
2209 return
2210 self.busy = 0
2211 try:
2212 self.ftp.voidresp()
2213 except ftperrors():
2214 pass
2215
2216 def close(self):
2217 self.endtransfer()
2218 try:
2219 self.ftp.close()
2220 except ftperrors():
2221 pass
2222
2223# Proxy handling
2224def getproxies_environment():
2225 """Return a dictionary of scheme -> proxy server URL mappings.
2226
2227 Scan the environment for variables named <scheme>_proxy;
2228 this seems to be the standard convention. If you need a
2229 different way, you can pass a proxies dictionary to the
2230 [Fancy]URLopener constructor.
2231
2232 """
2233 proxies = {}
2234 for name, value in os.environ.items():
2235 name = name.lower()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002236 if value and name[-6:] == '_proxy':
2237 proxies[name[:-6]] = value
2238 return proxies
2239
2240def proxy_bypass_environment(host):
2241 """Test if proxies should not be used for a particular host.
2242
2243 Checks the environment for a variable named no_proxy, which should
2244 be a list of DNS suffixes separated by commas, or '*' for all hosts.
2245 """
2246 no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
2247 # '*' is special case for always bypass
2248 if no_proxy == '*':
2249 return 1
2250 # strip port off host
Georg Brandl13e89462008-07-01 19:56:00 +00002251 hostonly, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002252 # check if the host ends with any of the DNS suffixes
2253 for name in no_proxy.split(','):
2254 if name and (hostonly.endswith(name) or host.endswith(name)):
2255 return 1
2256 # otherwise, don't bypass
2257 return 0
2258
2259
Ronald Oussorene72e1612011-03-14 18:15:25 -04002260# This code tests an OSX specific data structure but is testable on all
2261# platforms
2262def _proxy_bypass_macosx_sysconf(host, proxy_settings):
2263 """
2264 Return True iff this host shouldn't be accessed using a proxy
2265
2266 This function uses the MacOSX framework SystemConfiguration
2267 to fetch the proxy information.
2268
2269 proxy_settings come from _scproxy._get_proxy_settings or get mocked ie:
2270 { 'exclude_simple': bool,
2271 'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16']
2272 }
2273 """
2274 import re
2275 import socket
2276 from fnmatch import fnmatch
2277
2278 hostonly, port = splitport(host)
2279
2280 def ip2num(ipAddr):
2281 parts = ipAddr.split('.')
2282 parts = list(map(int, parts))
2283 if len(parts) != 4:
2284 parts = (parts + [0, 0, 0, 0])[:4]
2285 return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
2286
2287 # Check for simple host names:
2288 if '.' not in host:
2289 if proxy_settings['exclude_simple']:
2290 return True
2291
2292 hostIP = None
2293
2294 for value in proxy_settings.get('exceptions', ()):
2295 # Items in the list are strings like these: *.local, 169.254/16
2296 if not value: continue
2297
2298 m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
2299 if m is not None:
2300 if hostIP is None:
2301 try:
2302 hostIP = socket.gethostbyname(hostonly)
2303 hostIP = ip2num(hostIP)
2304 except socket.error:
2305 continue
2306
2307 base = ip2num(m.group(1))
2308 mask = m.group(2)
2309 if mask is None:
2310 mask = 8 * (m.group(1).count('.') + 1)
2311 else:
2312 mask = int(mask[1:])
2313 mask = 32 - mask
2314
2315 if (hostIP >> mask) == (base >> mask):
2316 return True
2317
2318 elif fnmatch(host, value):
2319 return True
2320
2321 return False
2322
2323
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002324if sys.platform == 'darwin':
Ronald Oussoren84151202010-04-18 20:46:11 +00002325 from _scproxy import _get_proxy_settings, _get_proxies
2326
2327 def proxy_bypass_macosx_sysconf(host):
Ronald Oussoren84151202010-04-18 20:46:11 +00002328 proxy_settings = _get_proxy_settings()
Ronald Oussorene72e1612011-03-14 18:15:25 -04002329 return _proxy_bypass_macosx_sysconf(host, proxy_settings)
Ronald Oussoren84151202010-04-18 20:46:11 +00002330
2331 def getproxies_macosx_sysconf():
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002332 """Return a dictionary of scheme -> proxy server URL mappings.
2333
Ronald Oussoren84151202010-04-18 20:46:11 +00002334 This function uses the MacOSX framework SystemConfiguration
2335 to fetch the proxy information.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002336 """
Ronald Oussoren84151202010-04-18 20:46:11 +00002337 return _get_proxies()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002338
Ronald Oussoren84151202010-04-18 20:46:11 +00002339
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002340
2341 def proxy_bypass(host):
2342 if getproxies_environment():
2343 return proxy_bypass_environment(host)
2344 else:
Ronald Oussoren84151202010-04-18 20:46:11 +00002345 return proxy_bypass_macosx_sysconf(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002346
2347 def getproxies():
Ronald Oussoren84151202010-04-18 20:46:11 +00002348 return getproxies_environment() or getproxies_macosx_sysconf()
2349
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002350
2351elif os.name == 'nt':
2352 def getproxies_registry():
2353 """Return a dictionary of scheme -> proxy server URL mappings.
2354
2355 Win32 uses the registry to store proxies.
2356
2357 """
2358 proxies = {}
2359 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002360 import winreg
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002361 except ImportError:
2362 # Std module, so should be around - but you never know!
2363 return proxies
2364 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002365 internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002366 r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002367 proxyEnable = winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002368 'ProxyEnable')[0]
2369 if proxyEnable:
2370 # Returned as Unicode but problems if not converted to ASCII
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002371 proxyServer = str(winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002372 'ProxyServer')[0])
2373 if '=' in proxyServer:
2374 # Per-protocol settings
2375 for p in proxyServer.split(';'):
2376 protocol, address = p.split('=', 1)
2377 # See if address has a type:// prefix
2378 import re
2379 if not re.match('^([^/:]+)://', address):
2380 address = '%s://%s' % (protocol, address)
2381 proxies[protocol] = address
2382 else:
2383 # Use one setting for all protocols
2384 if proxyServer[:5] == 'http:':
2385 proxies['http'] = proxyServer
2386 else:
2387 proxies['http'] = 'http://%s' % proxyServer
Senthil Kumaran04f31b82010-07-14 20:10:52 +00002388 proxies['https'] = 'https://%s' % proxyServer
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002389 proxies['ftp'] = 'ftp://%s' % proxyServer
2390 internetSettings.Close()
2391 except (WindowsError, ValueError, TypeError):
2392 # Either registry key not found etc, or the value in an
2393 # unexpected format.
2394 # proxies already set up to be empty so nothing to do
2395 pass
2396 return proxies
2397
2398 def getproxies():
2399 """Return a dictionary of scheme -> proxy server URL mappings.
2400
2401 Returns settings gathered from the environment, if specified,
2402 or the registry.
2403
2404 """
2405 return getproxies_environment() or getproxies_registry()
2406
2407 def proxy_bypass_registry(host):
2408 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002409 import winreg
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002410 import re
2411 except ImportError:
2412 # Std modules, so should be around - but you never know!
2413 return 0
2414 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002415 internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002416 r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002417 proxyEnable = winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002418 'ProxyEnable')[0]
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002419 proxyOverride = str(winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002420 'ProxyOverride')[0])
2421 # ^^^^ Returned as Unicode but problems if not converted to ASCII
2422 except WindowsError:
2423 return 0
2424 if not proxyEnable or not proxyOverride:
2425 return 0
2426 # try to make a host list from name and IP address.
Georg Brandl13e89462008-07-01 19:56:00 +00002427 rawHost, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002428 host = [rawHost]
2429 try:
2430 addr = socket.gethostbyname(rawHost)
2431 if addr != rawHost:
2432 host.append(addr)
2433 except socket.error:
2434 pass
2435 try:
2436 fqdn = socket.getfqdn(rawHost)
2437 if fqdn != rawHost:
2438 host.append(fqdn)
2439 except socket.error:
2440 pass
2441 # make a check value list from the registry entry: replace the
2442 # '<local>' string by the localhost entry and the corresponding
2443 # canonical entry.
2444 proxyOverride = proxyOverride.split(';')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002445 # now check if we match one of the registry values.
2446 for test in proxyOverride:
Senthil Kumaran49476062009-05-01 06:00:23 +00002447 if test == '<local>':
2448 if '.' not in rawHost:
2449 return 1
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002450 test = test.replace(".", r"\.") # mask dots
2451 test = test.replace("*", r".*") # change glob sequence
2452 test = test.replace("?", r".") # change glob char
2453 for val in host:
2454 # print "%s <--> %s" %( test, val )
2455 if re.match(test, val, re.I):
2456 return 1
2457 return 0
2458
2459 def proxy_bypass(host):
2460 """Return a dictionary of scheme -> proxy server URL mappings.
2461
2462 Returns settings gathered from the environment, if specified,
2463 or the registry.
2464
2465 """
2466 if getproxies_environment():
2467 return proxy_bypass_environment(host)
2468 else:
2469 return proxy_bypass_registry(host)
2470
2471else:
2472 # By default use environment variables
2473 getproxies = getproxies_environment
2474 proxy_bypass = proxy_bypass_environment