blob: b41dd7eaa82fe56b2cbab6b70717029fbca630f0 [file] [log] [blame]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001"""An extensible library for opening URLs using a variety of protocols
2
3The simplest way to use this module is to call the urlopen function,
4which accepts a string containing a URL or a Request object (described
5below). It opens the URL and returns the results as file-like
6object; the returned object has some extra methods described below.
7
8The OpenerDirector manages a collection of Handler objects that do
9all the actual work. Each Handler implements a particular protocol or
10option. The OpenerDirector is a composite object that invokes the
11Handlers needed to open the requested URL. For example, the
12HTTPHandler performs HTTP GET and POST requests and deals with
13non-error returns. The HTTPRedirectHandler automatically deals with
14HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
15deals with digest authentication.
16
17urlopen(url, data=None) -- Basic usage is the same as original
18urllib. pass the url and optionally data to post to an HTTP URL, and
19get a file-like object back. One difference is that you can also pass
20a Request instance instead of URL. Raises a URLError (subclass of
21IOError); for HTTP errors, raises an HTTPError, which can also be
22treated as a valid response.
23
24build_opener -- Function that creates a new OpenerDirector instance.
25Will install the default handlers. Accepts one or more Handlers as
26arguments, either instances or Handler classes that it will
27instantiate. If one of the argument is a subclass of the default
28handler, the argument will be installed instead of the default.
29
30install_opener -- Installs a new opener as the default opener.
31
32objects of interest:
Senthil Kumaran1107c5d2009-11-15 06:20:55 +000033
Senthil Kumaran47fff872009-12-20 07:10:31 +000034OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
35the Handler classes, while dealing with requests and responses.
Jeremy Hylton1afc1692008-06-18 20:49:58 +000036
37Request -- An object that encapsulates the state of a request. The
38state can be as simple as the URL. It can also include extra HTTP
39headers, e.g. a User-Agent.
40
41BaseHandler --
42
43internals:
44BaseHandler and parent
45_call_chain conventions
46
47Example usage:
48
Georg Brandl029986a2008-06-23 11:44:14 +000049import urllib.request
Jeremy Hylton1afc1692008-06-18 20:49:58 +000050
51# set up authentication info
Georg Brandl029986a2008-06-23 11:44:14 +000052authinfo = urllib.request.HTTPBasicAuthHandler()
Jeremy Hylton1afc1692008-06-18 20:49:58 +000053authinfo.add_password(realm='PDQ Application',
54 uri='https://mahler:8092/site-updates.py',
55 user='klem',
56 passwd='geheim$parole')
57
Georg Brandl029986a2008-06-23 11:44:14 +000058proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"})
Jeremy Hylton1afc1692008-06-18 20:49:58 +000059
60# build a new opener that adds authentication and caching FTP handlers
Georg Brandl029986a2008-06-23 11:44:14 +000061opener = urllib.request.build_opener(proxy_support, authinfo,
62 urllib.request.CacheFTPHandler)
Jeremy Hylton1afc1692008-06-18 20:49:58 +000063
64# install it
Georg Brandl029986a2008-06-23 11:44:14 +000065urllib.request.install_opener(opener)
Jeremy Hylton1afc1692008-06-18 20:49:58 +000066
Georg Brandl029986a2008-06-23 11:44:14 +000067f = urllib.request.urlopen('http://www.python.org/')
Jeremy Hylton1afc1692008-06-18 20:49:58 +000068"""
69
70# XXX issues:
71# If an authentication error handler that tries to perform
72# authentication for some reason but fails, how should the error be
73# signalled? The client needs to know the HTTP error code. But if
74# the handler knows that the problem was, e.g., that it didn't know
75# that hash algo that requested in the challenge, it would be good to
76# pass that information along to the client, too.
77# ftp errors aren't handled cleanly
78# check digest against correct (i.e. non-apache) implementation
79
80# Possible extensions:
81# complex proxies XXX not sure what exactly was meant by this
82# abstract factory for opener
83
84import base64
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +000085import bisect
Jeremy Hylton1afc1692008-06-18 20:49:58 +000086import email
87import hashlib
88import http.client
89import io
90import os
91import posixpath
92import random
93import re
94import socket
95import sys
96import time
Senthil Kumaran7bc0d872010-12-19 10:49:52 +000097import collections
Jeremy Hylton1afc1692008-06-18 20:49:58 +000098
Georg Brandl13e89462008-07-01 19:56:00 +000099from urllib.error import URLError, HTTPError, ContentTooShortError
100from urllib.parse import (
101 urlparse, urlsplit, urljoin, unwrap, quote, unquote,
102 splittype, splithost, splitport, splituser, splitpasswd,
Senthil Kumarand95cc752010-08-08 11:27:53 +0000103 splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse)
Georg Brandl13e89462008-07-01 19:56:00 +0000104from urllib.response import addinfourl, addclosehook
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000105
106# check for SSL
107try:
108 import ssl
Senthil Kumaranc2958622010-11-22 04:48:26 +0000109except ImportError:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000110 _have_ssl = False
111else:
112 _have_ssl = True
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000113
114# used in User-Agent header sent
115__version__ = sys.version[:3]
116
117_opener = None
Antoine Pitrou803e6d62010-10-13 10:36:15 +0000118def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
119 *, cafile=None, capath=None):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000120 global _opener
Antoine Pitrou803e6d62010-10-13 10:36:15 +0000121 if cafile or capath:
122 if not _have_ssl:
123 raise ValueError('SSL support not available')
124 context = ssl.SSLContext(ssl.PROTOCOL_SSLv23)
125 context.options |= ssl.OP_NO_SSLv2
126 if cafile or capath:
127 context.verify_mode = ssl.CERT_REQUIRED
128 context.load_verify_locations(cafile, capath)
129 check_hostname = True
130 else:
131 check_hostname = False
132 https_handler = HTTPSHandler(context=context, check_hostname=check_hostname)
133 opener = build_opener(https_handler)
134 elif _opener is None:
135 _opener = opener = build_opener()
136 else:
137 opener = _opener
138 return opener.open(url, data, timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000139
140def install_opener(opener):
141 global _opener
142 _opener = opener
143
144# TODO(jhylton): Make this work with the same global opener.
145_urlopener = None
146def urlretrieve(url, filename=None, reporthook=None, data=None):
147 global _urlopener
148 if not _urlopener:
149 _urlopener = FancyURLopener()
150 return _urlopener.retrieve(url, filename, reporthook, data)
151
152def urlcleanup():
153 if _urlopener:
154 _urlopener.cleanup()
155 global _opener
156 if _opener:
157 _opener = None
158
159# copied from cookielib.py
Antoine Pitroufd036452008-08-19 17:56:33 +0000160_cut_port_re = re.compile(r":\d+$", re.ASCII)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000161def request_host(request):
162 """Return request-host, as defined by RFC 2965.
163
164 Variation from RFC: returned value is lowercased, for convenient
165 comparison.
166
167 """
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000168 url = request.full_url
Georg Brandl13e89462008-07-01 19:56:00 +0000169 host = urlparse(url)[1]
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000170 if host == "":
171 host = request.get_header("Host", "")
172
173 # remove port, if present
174 host = _cut_port_re.sub("", host, 1)
175 return host.lower()
176
177class Request:
178
179 def __init__(self, url, data=None, headers={},
180 origin_req_host=None, unverifiable=False):
181 # unwrap('<URL:type://host/path>') --> 'type://host/path'
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000182 self.full_url = unwrap(url)
Senthil Kumaran26430412011-04-13 07:01:19 +0800183 self.full_url, self.fragment = splittag(self.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000184 self.data = data
185 self.headers = {}
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +0000186 self._tunnel_host = None
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000187 for key, value in headers.items():
188 self.add_header(key, value)
189 self.unredirected_hdrs = {}
190 if origin_req_host is None:
191 origin_req_host = request_host(self)
192 self.origin_req_host = origin_req_host
193 self.unverifiable = unverifiable
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000194 self._parse()
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000195
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000196 def _parse(self):
197 self.type, rest = splittype(self.full_url)
198 if self.type is None:
199 raise ValueError("unknown url type: %s" % self.full_url)
200 self.host, self.selector = splithost(rest)
201 if self.host:
202 self.host = unquote(self.host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000203
204 def get_method(self):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000205 if self.data is not None:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000206 return "POST"
207 else:
208 return "GET"
209
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000210 # Begin deprecated methods
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000211
212 def add_data(self, data):
213 self.data = data
214
215 def has_data(self):
216 return self.data is not None
217
218 def get_data(self):
219 return self.data
220
221 def get_full_url(self):
Senthil Kumaran26430412011-04-13 07:01:19 +0800222 if self.fragment:
223 return '%s#%s' % (self.full_url, self.fragment)
224 else:
225 return self.full_url
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000226
227 def get_type(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000228 return self.type
229
230 def get_host(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000231 return self.host
232
233 def get_selector(self):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000234 return self.selector
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000235
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000236 def is_unverifiable(self):
237 return self.unverifiable
Facundo Batista72dc1ea2008-08-16 14:44:32 +0000238
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000239 def get_origin_req_host(self):
240 return self.origin_req_host
241
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000242 # End deprecated methods
243
244 def set_proxy(self, host, type):
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +0000245 if self.type == 'https' and not self._tunnel_host:
246 self._tunnel_host = self.host
247 else:
248 self.type= type
249 self.selector = self.full_url
250 self.host = host
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000251
252 def has_proxy(self):
253 return self.selector == self.full_url
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000254
255 def add_header(self, key, val):
256 # useful for something like authentication
257 self.headers[key.capitalize()] = val
258
259 def add_unredirected_header(self, key, val):
260 # will not be added to a redirected request
261 self.unredirected_hdrs[key.capitalize()] = val
262
263 def has_header(self, header_name):
264 return (header_name in self.headers or
265 header_name in self.unredirected_hdrs)
266
267 def get_header(self, header_name, default=None):
268 return self.headers.get(
269 header_name,
270 self.unredirected_hdrs.get(header_name, default))
271
272 def header_items(self):
273 hdrs = self.unredirected_hdrs.copy()
274 hdrs.update(self.headers)
275 return list(hdrs.items())
276
277class OpenerDirector:
278 def __init__(self):
279 client_version = "Python-urllib/%s" % __version__
280 self.addheaders = [('User-agent', client_version)]
R. David Murray25b8cca2010-12-23 19:44:49 +0000281 # self.handlers is retained only for backward compatibility
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000282 self.handlers = []
R. David Murray25b8cca2010-12-23 19:44:49 +0000283 # manage the individual handlers
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000284 self.handle_open = {}
285 self.handle_error = {}
286 self.process_response = {}
287 self.process_request = {}
288
289 def add_handler(self, handler):
290 if not hasattr(handler, "add_parent"):
291 raise TypeError("expected BaseHandler instance, got %r" %
292 type(handler))
293
294 added = False
295 for meth in dir(handler):
296 if meth in ["redirect_request", "do_open", "proxy_open"]:
297 # oops, coincidental match
298 continue
299
300 i = meth.find("_")
301 protocol = meth[:i]
302 condition = meth[i+1:]
303
304 if condition.startswith("error"):
305 j = condition.find("_") + i + 1
306 kind = meth[j+1:]
307 try:
308 kind = int(kind)
309 except ValueError:
310 pass
311 lookup = self.handle_error.get(protocol, {})
312 self.handle_error[protocol] = lookup
313 elif condition == "open":
314 kind = protocol
315 lookup = self.handle_open
316 elif condition == "response":
317 kind = protocol
318 lookup = self.process_response
319 elif condition == "request":
320 kind = protocol
321 lookup = self.process_request
322 else:
323 continue
324
325 handlers = lookup.setdefault(kind, [])
326 if handlers:
327 bisect.insort(handlers, handler)
328 else:
329 handlers.append(handler)
330 added = True
331
332 if added:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000333 bisect.insort(self.handlers, handler)
334 handler.add_parent(self)
335
336 def close(self):
337 # Only exists for backwards compatibility.
338 pass
339
340 def _call_chain(self, chain, kind, meth_name, *args):
341 # Handlers raise an exception if no one else should try to handle
342 # the request, or return None if they can't but another handler
343 # could. Otherwise, they return the response.
344 handlers = chain.get(kind, ())
345 for handler in handlers:
346 func = getattr(handler, meth_name)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000347 result = func(*args)
348 if result is not None:
349 return result
350
351 def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
352 # accept a URL or a Request object
353 if isinstance(fullurl, str):
354 req = Request(fullurl, data)
355 else:
356 req = fullurl
357 if data is not None:
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000358 req.data = data
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000359
360 req.timeout = timeout
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000361 protocol = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000362
363 # pre-process request
364 meth_name = protocol+"_request"
365 for processor in self.process_request.get(protocol, []):
366 meth = getattr(processor, meth_name)
367 req = meth(req)
368
369 response = self._open(req, data)
370
371 # post-process response
372 meth_name = protocol+"_response"
373 for processor in self.process_response.get(protocol, []):
374 meth = getattr(processor, meth_name)
375 response = meth(req, response)
376
377 return response
378
379 def _open(self, req, data=None):
380 result = self._call_chain(self.handle_open, 'default',
381 'default_open', req)
382 if result:
383 return result
384
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000385 protocol = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000386 result = self._call_chain(self.handle_open, protocol, protocol +
387 '_open', req)
388 if result:
389 return result
390
391 return self._call_chain(self.handle_open, 'unknown',
392 'unknown_open', req)
393
394 def error(self, proto, *args):
395 if proto in ('http', 'https'):
396 # XXX http[s] protocols are special-cased
397 dict = self.handle_error['http'] # https is not different than http
398 proto = args[2] # YUCK!
399 meth_name = 'http_error_%s' % proto
400 http_err = 1
401 orig_args = args
402 else:
403 dict = self.handle_error
404 meth_name = proto + '_error'
405 http_err = 0
406 args = (dict, proto, meth_name) + args
407 result = self._call_chain(*args)
408 if result:
409 return result
410
411 if http_err:
412 args = (dict, 'default', 'http_error_default') + orig_args
413 return self._call_chain(*args)
414
415# XXX probably also want an abstract factory that knows when it makes
416# sense to skip a superclass in favor of a subclass and when it might
417# make sense to include both
418
419def build_opener(*handlers):
420 """Create an opener object from a list of handlers.
421
422 The opener will use several default handlers, including support
Senthil Kumaran1107c5d2009-11-15 06:20:55 +0000423 for HTTP, FTP and when applicable HTTPS.
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000424
425 If any of the handlers passed as arguments are subclasses of the
426 default handlers, the default handlers will not be used.
427 """
428 def isclass(obj):
429 return isinstance(obj, type) or hasattr(obj, "__bases__")
430
431 opener = OpenerDirector()
432 default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
433 HTTPDefaultErrorHandler, HTTPRedirectHandler,
434 FTPHandler, FileHandler, HTTPErrorProcessor]
435 if hasattr(http.client, "HTTPSConnection"):
436 default_classes.append(HTTPSHandler)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000437 skip = set()
438 for klass in default_classes:
439 for check in handlers:
440 if isclass(check):
441 if issubclass(check, klass):
442 skip.add(klass)
443 elif isinstance(check, klass):
444 skip.add(klass)
445 for klass in skip:
446 default_classes.remove(klass)
447
448 for klass in default_classes:
449 opener.add_handler(klass())
450
451 for h in handlers:
452 if isclass(h):
453 h = h()
454 opener.add_handler(h)
455 return opener
456
457class BaseHandler:
458 handler_order = 500
459
460 def add_parent(self, parent):
461 self.parent = parent
462
463 def close(self):
464 # Only exists for backwards compatibility
465 pass
466
467 def __lt__(self, other):
468 if not hasattr(other, "handler_order"):
469 # Try to preserve the old behavior of having custom classes
470 # inserted after default ones (works only for custom user
471 # classes which are not aware of handler_order).
472 return True
473 return self.handler_order < other.handler_order
474
475
476class HTTPErrorProcessor(BaseHandler):
477 """Process HTTP error responses."""
478 handler_order = 1000 # after all other processing
479
480 def http_response(self, request, response):
481 code, msg, hdrs = response.code, response.msg, response.info()
482
483 # According to RFC 2616, "2xx" code indicates that the client's
484 # request was successfully received, understood, and accepted.
485 if not (200 <= code < 300):
486 response = self.parent.error(
487 'http', request, response, code, msg, hdrs)
488
489 return response
490
491 https_response = http_response
492
493class HTTPDefaultErrorHandler(BaseHandler):
494 def http_error_default(self, req, fp, code, msg, hdrs):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000495 raise HTTPError(req.full_url, code, msg, hdrs, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000496
497class HTTPRedirectHandler(BaseHandler):
498 # maximum number of redirections to any single URL
499 # this is needed because of the state that cookies introduce
500 max_repeats = 4
501 # maximum total number of redirections (regardless of URL) before
502 # assuming we're in a loop
503 max_redirections = 10
504
505 def redirect_request(self, req, fp, code, msg, headers, newurl):
506 """Return a Request or None in response to a redirect.
507
508 This is called by the http_error_30x methods when a
509 redirection response is received. If a redirection should
510 take place, return a new Request to allow http_error_30x to
511 perform the redirect. Otherwise, raise HTTPError if no-one
512 else should try to handle this url. Return None if you can't
513 but another Handler might.
514 """
515 m = req.get_method()
516 if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
517 or code in (301, 302, 303) and m == "POST")):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000518 raise HTTPError(req.full_url, code, msg, headers, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000519
520 # Strictly (according to RFC 2616), 301 or 302 in response to
521 # a POST MUST NOT cause a redirection without confirmation
Georg Brandl029986a2008-06-23 11:44:14 +0000522 # from the user (of urllib.request, in this case). In practice,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000523 # essentially all clients do redirect in this case, so we do
524 # the same.
525 # be conciliant with URIs containing a space
526 newurl = newurl.replace(' ', '%20')
527 CONTENT_HEADERS = ("content-length", "content-type")
528 newheaders = dict((k, v) for k, v in req.headers.items()
529 if k.lower() not in CONTENT_HEADERS)
530 return Request(newurl,
531 headers=newheaders,
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000532 origin_req_host=req.origin_req_host,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000533 unverifiable=True)
534
535 # Implementation note: To avoid the server sending us into an
536 # infinite loop, the request object needs to track what URLs we
537 # have already seen. Do this by adding a handler-specific
538 # attribute to the Request object.
539 def http_error_302(self, req, fp, code, msg, headers):
540 # Some servers (incorrectly) return multiple Location headers
541 # (so probably same goes for URI). Use first header.
542 if "location" in headers:
543 newurl = headers["location"]
544 elif "uri" in headers:
545 newurl = headers["uri"]
546 else:
547 return
Facundo Batistaf24802c2008-08-17 03:36:03 +0000548
549 # fix a possible malformed URL
550 urlparts = urlparse(newurl)
guido@google.coma119df92011-03-29 11:41:02 -0700551
552 # For security reasons we don't allow redirection to anything other
553 # than http, https or ftp.
554
Senthil Kumaran34d38dc2011-10-20 02:48:01 +0800555 if urlparts.scheme not in ('http', 'https', 'ftp'):
556 raise HTTPError(
557 newurl, code,
558 "%s - Redirection to url '%s' is not allowed" % (msg, newurl),
559 headers, fp)
guido@google.coma119df92011-03-29 11:41:02 -0700560
Facundo Batistaf24802c2008-08-17 03:36:03 +0000561 if not urlparts.path:
562 urlparts = list(urlparts)
563 urlparts[2] = "/"
564 newurl = urlunparse(urlparts)
565
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000566 newurl = urljoin(req.full_url, newurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000567
568 # XXX Probably want to forget about the state of the current
569 # request, although that might interact poorly with other
570 # handlers that also use handler-specific request attributes
571 new = self.redirect_request(req, fp, code, msg, headers, newurl)
572 if new is None:
573 return
574
575 # loop detection
576 # .redirect_dict has a key url if url was previously visited.
577 if hasattr(req, 'redirect_dict'):
578 visited = new.redirect_dict = req.redirect_dict
579 if (visited.get(newurl, 0) >= self.max_repeats or
580 len(visited) >= self.max_redirections):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000581 raise HTTPError(req.full_url, code,
Georg Brandl13e89462008-07-01 19:56:00 +0000582 self.inf_msg + msg, headers, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000583 else:
584 visited = new.redirect_dict = req.redirect_dict = {}
585 visited[newurl] = visited.get(newurl, 0) + 1
586
587 # Don't close the fp until we are sure that we won't use it
588 # with HTTPError.
589 fp.read()
590 fp.close()
591
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000592 return self.parent.open(new, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000593
594 http_error_301 = http_error_303 = http_error_307 = http_error_302
595
596 inf_msg = "The HTTP server returned a redirect error that would " \
597 "lead to an infinite loop.\n" \
598 "The last 30x error message was:\n"
599
600
601def _parse_proxy(proxy):
602 """Return (scheme, user, password, host/port) given a URL or an authority.
603
604 If a URL is supplied, it must have an authority (host:port) component.
605 According to RFC 3986, having an authority component means the URL must
606 have two slashes after the scheme:
607
608 >>> _parse_proxy('file:/ftp.example.com/')
609 Traceback (most recent call last):
610 ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
611
612 The first three items of the returned tuple may be None.
613
614 Examples of authority parsing:
615
616 >>> _parse_proxy('proxy.example.com')
617 (None, None, None, 'proxy.example.com')
618 >>> _parse_proxy('proxy.example.com:3128')
619 (None, None, None, 'proxy.example.com:3128')
620
621 The authority component may optionally include userinfo (assumed to be
622 username:password):
623
624 >>> _parse_proxy('joe:password@proxy.example.com')
625 (None, 'joe', 'password', 'proxy.example.com')
626 >>> _parse_proxy('joe:password@proxy.example.com:3128')
627 (None, 'joe', 'password', 'proxy.example.com:3128')
628
629 Same examples, but with URLs instead:
630
631 >>> _parse_proxy('http://proxy.example.com/')
632 ('http', None, None, 'proxy.example.com')
633 >>> _parse_proxy('http://proxy.example.com:3128/')
634 ('http', None, None, 'proxy.example.com:3128')
635 >>> _parse_proxy('http://joe:password@proxy.example.com/')
636 ('http', 'joe', 'password', 'proxy.example.com')
637 >>> _parse_proxy('http://joe:password@proxy.example.com:3128')
638 ('http', 'joe', 'password', 'proxy.example.com:3128')
639
640 Everything after the authority is ignored:
641
642 >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
643 ('ftp', 'joe', 'password', 'proxy.example.com')
644
645 Test for no trailing '/' case:
646
647 >>> _parse_proxy('http://joe:password@proxy.example.com')
648 ('http', 'joe', 'password', 'proxy.example.com')
649
650 """
Georg Brandl13e89462008-07-01 19:56:00 +0000651 scheme, r_scheme = splittype(proxy)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000652 if not r_scheme.startswith("/"):
653 # authority
654 scheme = None
655 authority = proxy
656 else:
657 # URL
658 if not r_scheme.startswith("//"):
659 raise ValueError("proxy URL with no authority: %r" % proxy)
660 # We have an authority, so for RFC 3986-compliant URLs (by ss 3.
661 # and 3.3.), path is empty or starts with '/'
662 end = r_scheme.find("/", 2)
663 if end == -1:
664 end = None
665 authority = r_scheme[2:end]
Georg Brandl13e89462008-07-01 19:56:00 +0000666 userinfo, hostport = splituser(authority)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000667 if userinfo is not None:
Georg Brandl13e89462008-07-01 19:56:00 +0000668 user, password = splitpasswd(userinfo)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000669 else:
670 user = password = None
671 return scheme, user, password, hostport
672
673class ProxyHandler(BaseHandler):
674 # Proxies must be in front
675 handler_order = 100
676
677 def __init__(self, proxies=None):
678 if proxies is None:
679 proxies = getproxies()
680 assert hasattr(proxies, 'keys'), "proxies must be a mapping"
681 self.proxies = proxies
682 for type, url in proxies.items():
683 setattr(self, '%s_open' % type,
684 lambda r, proxy=url, type=type, meth=self.proxy_open: \
685 meth(r, proxy, type))
686
687 def proxy_open(self, req, proxy, type):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000688 orig_type = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000689 proxy_type, user, password, hostport = _parse_proxy(proxy)
690 if proxy_type is None:
691 proxy_type = orig_type
Senthil Kumaran7bb04972009-10-11 04:58:55 +0000692
693 if req.host and proxy_bypass(req.host):
694 return None
695
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000696 if user and password:
Georg Brandl13e89462008-07-01 19:56:00 +0000697 user_pass = '%s:%s' % (unquote(user),
698 unquote(password))
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000699 creds = base64.b64encode(user_pass.encode()).decode("ascii")
700 req.add_header('Proxy-authorization', 'Basic ' + creds)
Georg Brandl13e89462008-07-01 19:56:00 +0000701 hostport = unquote(hostport)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000702 req.set_proxy(hostport, proxy_type)
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +0000703 if orig_type == proxy_type or orig_type == 'https':
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000704 # let other handlers take care of it
705 return None
706 else:
707 # need to start over, because the other handlers don't
708 # grok the proxy's URL type
709 # e.g. if we have a constructor arg proxies like so:
710 # {'http': 'ftp://proxy.example.com'}, we may end up turning
711 # a request for http://acme.example.com/a into one for
712 # ftp://proxy.example.com/a
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000713 return self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000714
715class HTTPPasswordMgr:
716
717 def __init__(self):
718 self.passwd = {}
719
720 def add_password(self, realm, uri, user, passwd):
721 # uri could be a single URI or a sequence
722 if isinstance(uri, str):
723 uri = [uri]
Senthil Kumaran34d38dc2011-10-20 02:48:01 +0800724 if realm not in self.passwd:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000725 self.passwd[realm] = {}
726 for default_port in True, False:
727 reduced_uri = tuple(
728 [self.reduce_uri(u, default_port) for u in uri])
729 self.passwd[realm][reduced_uri] = (user, passwd)
730
731 def find_user_password(self, realm, authuri):
732 domains = self.passwd.get(realm, {})
733 for default_port in True, False:
734 reduced_authuri = self.reduce_uri(authuri, default_port)
735 for uris, authinfo in domains.items():
736 for uri in uris:
737 if self.is_suburi(uri, reduced_authuri):
738 return authinfo
739 return None, None
740
741 def reduce_uri(self, uri, default_port=True):
742 """Accept authority or URI and extract only the authority and path."""
743 # note HTTP URLs do not have a userinfo component
Georg Brandl13e89462008-07-01 19:56:00 +0000744 parts = urlsplit(uri)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000745 if parts[1]:
746 # URI
747 scheme = parts[0]
748 authority = parts[1]
749 path = parts[2] or '/'
750 else:
751 # host or host:port
752 scheme = None
753 authority = uri
754 path = '/'
Georg Brandl13e89462008-07-01 19:56:00 +0000755 host, port = splitport(authority)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000756 if default_port and port is None and scheme is not None:
757 dport = {"http": 80,
758 "https": 443,
759 }.get(scheme)
760 if dport is not None:
761 authority = "%s:%d" % (host, dport)
762 return authority, path
763
764 def is_suburi(self, base, test):
765 """Check if test is below base in a URI tree
766
767 Both args must be URIs in reduced form.
768 """
769 if base == test:
770 return True
771 if base[0] != test[0]:
772 return False
773 common = posixpath.commonprefix((base[1], test[1]))
774 if len(common) == len(base[1]):
775 return True
776 return False
777
778
779class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
780
781 def find_user_password(self, realm, authuri):
782 user, password = HTTPPasswordMgr.find_user_password(self, realm,
783 authuri)
784 if user is not None:
785 return user, password
786 return HTTPPasswordMgr.find_user_password(self, None, authuri)
787
788
789class AbstractBasicAuthHandler:
790
791 # XXX this allows for multiple auth-schemes, but will stupidly pick
792 # the last one with a realm specified.
793
794 # allow for double- and single-quoted realm values
795 # (single quotes are a violation of the RFC, but appear in the wild)
796 rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
797 'realm=(["\'])(.*?)\\2', re.I)
798
799 # XXX could pre-emptively send auth info already accepted (RFC 2617,
800 # end of section 2, and section 1.2 immediately after "credentials"
801 # production).
802
803 def __init__(self, password_mgr=None):
804 if password_mgr is None:
805 password_mgr = HTTPPasswordMgr()
806 self.passwd = password_mgr
807 self.add_password = self.passwd.add_password
Senthil Kumaranf4998ac2010-06-01 12:53:48 +0000808 self.retried = 0
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000809
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000810 def reset_retry_count(self):
811 self.retried = 0
812
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000813 def http_error_auth_reqed(self, authreq, host, req, headers):
814 # host may be an authority (without userinfo) or a URL with an
815 # authority
816 # XXX could be multiple headers
817 authreq = headers.get(authreq, None)
Senthil Kumaranf4998ac2010-06-01 12:53:48 +0000818
819 if self.retried > 5:
820 # retry sending the username:password 5 times before failing.
821 raise HTTPError(req.get_full_url(), 401, "basic auth failed",
822 headers, None)
823 else:
824 self.retried += 1
825
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000826 if authreq:
827 mo = AbstractBasicAuthHandler.rx.search(authreq)
828 if mo:
829 scheme, quote, realm = mo.groups()
830 if scheme.lower() == 'basic':
Senthil Kumaran4bb5c272010-08-26 06:16:22 +0000831 response = self.retry_http_basic_auth(host, req, realm)
832 if response and response.code != 401:
833 self.retried = 0
834 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000835
836 def retry_http_basic_auth(self, host, req, realm):
837 user, pw = self.passwd.find_user_password(realm, host)
838 if pw is not None:
839 raw = "%s:%s" % (user, pw)
840 auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii")
841 if req.headers.get(self.auth_header, None) == auth:
842 return None
Senthil Kumaranca2fc9e2010-02-24 16:53:16 +0000843 req.add_unredirected_header(self.auth_header, auth)
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000844 return self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000845 else:
846 return None
847
848
849class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
850
851 auth_header = 'Authorization'
852
853 def http_error_401(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000854 url = req.full_url
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000855 response = self.http_error_auth_reqed('www-authenticate',
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000856 url, req, headers)
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000857 self.reset_retry_count()
858 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000859
860
861class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
862
863 auth_header = 'Proxy-authorization'
864
865 def http_error_407(self, req, fp, code, msg, headers):
866 # http_error_auth_reqed requires that there is no userinfo component in
Georg Brandl029986a2008-06-23 11:44:14 +0000867 # authority. Assume there isn't one, since urllib.request does not (and
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000868 # should not, RFC 3986 s. 3.2.1) support requests for URLs containing
869 # userinfo.
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000870 authority = req.host
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000871 response = self.http_error_auth_reqed('proxy-authenticate',
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000872 authority, req, headers)
Senthil Kumaran67a62a42010-08-19 17:50:31 +0000873 self.reset_retry_count()
874 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000875
876
877def randombytes(n):
878 """Return n random bytes."""
879 return os.urandom(n)
880
881class AbstractDigestAuthHandler:
882 # Digest authentication is specified in RFC 2617.
883
884 # XXX The client does not inspect the Authentication-Info header
885 # in a successful response.
886
887 # XXX It should be possible to test this implementation against
888 # a mock server that just generates a static set of challenges.
889
890 # XXX qop="auth-int" supports is shaky
891
892 def __init__(self, passwd=None):
893 if passwd is None:
894 passwd = HTTPPasswordMgr()
895 self.passwd = passwd
896 self.add_password = self.passwd.add_password
897 self.retried = 0
898 self.nonce_count = 0
Senthil Kumaran4c7eaee2009-11-15 08:43:45 +0000899 self.last_nonce = None
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000900
901 def reset_retry_count(self):
902 self.retried = 0
903
904 def http_error_auth_reqed(self, auth_header, host, req, headers):
905 authreq = headers.get(auth_header, None)
906 if self.retried > 5:
907 # Don't fail endlessly - if we failed once, we'll probably
908 # fail a second time. Hm. Unless the Password Manager is
909 # prompting for the information. Crap. This isn't great
910 # but it's better than the current 'repeat until recursion
911 # depth exceeded' approach <wink>
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000912 raise HTTPError(req.full_url, 401, "digest auth failed",
Georg Brandl13e89462008-07-01 19:56:00 +0000913 headers, None)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000914 else:
915 self.retried += 1
916 if authreq:
917 scheme = authreq.split()[0]
918 if scheme.lower() == 'digest':
919 return self.retry_http_digest_auth(req, authreq)
920
921 def retry_http_digest_auth(self, req, auth):
922 token, challenge = auth.split(' ', 1)
923 chal = parse_keqv_list(filter(None, parse_http_list(challenge)))
924 auth = self.get_authorization(req, chal)
925 if auth:
926 auth_val = 'Digest %s' % auth
927 if req.headers.get(self.auth_header, None) == auth_val:
928 return None
929 req.add_unredirected_header(self.auth_header, auth_val)
Senthil Kumaranfb8cc2f2009-07-19 02:44:19 +0000930 resp = self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000931 return resp
932
933 def get_cnonce(self, nonce):
934 # The cnonce-value is an opaque
935 # quoted string value provided by the client and used by both client
936 # and server to avoid chosen plaintext attacks, to provide mutual
937 # authentication, and to provide some message integrity protection.
938 # This isn't a fabulous effort, but it's probably Good Enough.
939 s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime())
940 b = s.encode("ascii") + randombytes(8)
941 dig = hashlib.sha1(b).hexdigest()
942 return dig[:16]
943
944 def get_authorization(self, req, chal):
945 try:
946 realm = chal['realm']
947 nonce = chal['nonce']
948 qop = chal.get('qop')
949 algorithm = chal.get('algorithm', 'MD5')
950 # mod_digest doesn't send an opaque, even though it isn't
951 # supposed to be optional
952 opaque = chal.get('opaque', None)
953 except KeyError:
954 return None
955
956 H, KD = self.get_algorithm_impls(algorithm)
957 if H is None:
958 return None
959
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000960 user, pw = self.passwd.find_user_password(realm, req.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000961 if user is None:
962 return None
963
964 # XXX not implemented yet
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000965 if req.data is not None:
966 entdig = self.get_entity_digest(req.data, chal)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000967 else:
968 entdig = None
969
970 A1 = "%s:%s:%s" % (user, realm, pw)
971 A2 = "%s:%s" % (req.get_method(),
972 # XXX selector: what about proxies and full urls
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000973 req.selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000974 if qop == 'auth':
Senthil Kumaran4c7eaee2009-11-15 08:43:45 +0000975 if nonce == self.last_nonce:
976 self.nonce_count += 1
977 else:
978 self.nonce_count = 1
979 self.last_nonce = nonce
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000980 ncvalue = '%08x' % self.nonce_count
981 cnonce = self.get_cnonce(nonce)
982 noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
983 respdig = KD(H(A1), noncebit)
984 elif qop is None:
985 respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
986 else:
987 # XXX handle auth-int.
Georg Brandl13e89462008-07-01 19:56:00 +0000988 raise URLError("qop '%s' is not supported." % qop)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000989
990 # XXX should the partial digests be encoded too?
991
992 base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000993 'response="%s"' % (user, realm, nonce, req.selector,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000994 respdig)
995 if opaque:
996 base += ', opaque="%s"' % opaque
997 if entdig:
998 base += ', digest="%s"' % entdig
999 base += ', algorithm="%s"' % algorithm
1000 if qop:
1001 base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
1002 return base
1003
1004 def get_algorithm_impls(self, algorithm):
1005 # lambdas assume digest modules are imported at the top level
1006 if algorithm == 'MD5':
1007 H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest()
1008 elif algorithm == 'SHA':
1009 H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest()
1010 # XXX MD5-sess
1011 KD = lambda s, d: H("%s:%s" % (s, d))
1012 return H, KD
1013
1014 def get_entity_digest(self, data, chal):
1015 # XXX not implemented yet
1016 return None
1017
1018
1019class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
1020 """An authentication protocol defined by RFC 2069
1021
1022 Digest authentication improves on basic authentication because it
1023 does not transmit passwords in the clear.
1024 """
1025
1026 auth_header = 'Authorization'
1027 handler_order = 490 # before Basic auth
1028
1029 def http_error_401(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001030 host = urlparse(req.full_url)[1]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001031 retry = self.http_error_auth_reqed('www-authenticate',
1032 host, req, headers)
1033 self.reset_retry_count()
1034 return retry
1035
1036
1037class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
1038
1039 auth_header = 'Proxy-Authorization'
1040 handler_order = 490 # before Basic auth
1041
1042 def http_error_407(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001043 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001044 retry = self.http_error_auth_reqed('proxy-authenticate',
1045 host, req, headers)
1046 self.reset_retry_count()
1047 return retry
1048
1049class AbstractHTTPHandler(BaseHandler):
1050
1051 def __init__(self, debuglevel=0):
1052 self._debuglevel = debuglevel
1053
1054 def set_http_debuglevel(self, level):
1055 self._debuglevel = level
1056
1057 def do_request_(self, request):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001058 host = request.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001059 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001060 raise URLError('no host given')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001061
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001062 if request.data is not None: # POST
1063 data = request.data
Senthil Kumaran29333122011-02-11 11:25:47 +00001064 if isinstance(data, str):
1065 raise TypeError("POST data should be bytes"
1066 " or an iterable of bytes. It cannot be str.")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001067 if not request.has_header('Content-type'):
1068 request.add_unredirected_header(
1069 'Content-type',
1070 'application/x-www-form-urlencoded')
1071 if not request.has_header('Content-length'):
Senthil Kumaran7bc0d872010-12-19 10:49:52 +00001072 try:
1073 mv = memoryview(data)
1074 except TypeError:
Senthil Kumaran7bc0d872010-12-19 10:49:52 +00001075 if isinstance(data, collections.Iterable):
Georg Brandl61536042011-02-03 07:46:41 +00001076 raise ValueError("Content-Length should be specified "
1077 "for iterable data of type %r %r" % (type(data),
Senthil Kumaran7bc0d872010-12-19 10:49:52 +00001078 data))
1079 else:
1080 request.add_unredirected_header(
Senthil Kumaran1e991f22010-12-24 04:03:59 +00001081 'Content-length', '%d' % (len(mv) * mv.itemsize))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001082
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001083 sel_host = host
1084 if request.has_proxy():
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001085 scheme, sel = splittype(request.selector)
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001086 sel_host, sel_path = splithost(sel)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001087 if not request.has_header('Host'):
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001088 request.add_unredirected_header('Host', sel_host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001089 for name, value in self.parent.addheaders:
1090 name = name.capitalize()
1091 if not request.has_header(name):
1092 request.add_unredirected_header(name, value)
1093
1094 return request
1095
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001096 def do_open(self, http_class, req, **http_conn_args):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001097 """Return an HTTPResponse object for the request, using http_class.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001098
1099 http_class must implement the HTTPConnection API from http.client.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001100 """
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001101 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001102 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001103 raise URLError('no host given')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001104
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001105 # will parse host:port
1106 h = http_class(host, timeout=req.timeout, **http_conn_args)
Senthil Kumaran42ef4b12010-09-27 01:26:03 +00001107
1108 headers = dict(req.unredirected_hdrs)
1109 headers.update(dict((k, v) for k, v in req.headers.items()
1110 if k not in headers))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001111
1112 # TODO(jhylton): Should this be redesigned to handle
1113 # persistent connections?
1114
1115 # We want to make an HTTP/1.1 request, but the addinfourl
1116 # class isn't prepared to deal with a persistent connection.
1117 # It will try to read all remaining data from the socket,
1118 # which will block while the server waits for the next request.
1119 # So make sure the connection gets closed after the (only)
1120 # request.
1121 headers["Connection"] = "close"
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001122 headers = dict((name.title(), val) for name, val in headers.items())
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +00001123
1124 if req._tunnel_host:
Senthil Kumaran47fff872009-12-20 07:10:31 +00001125 tunnel_headers = {}
1126 proxy_auth_hdr = "Proxy-Authorization"
1127 if proxy_auth_hdr in headers:
1128 tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
1129 # Proxy-Authorization should not be sent to origin
1130 # server.
1131 del headers[proxy_auth_hdr]
1132 h.set_tunnel(req._tunnel_host, headers=tunnel_headers)
Senthil Kumaran97f0c6b2009-07-25 04:24:38 +00001133
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001134 try:
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001135 h.request(req.get_method(), req.selector, req.data, headers)
Senthil Kumaran1299a8f2011-07-27 08:05:58 +08001136 except socket.error as err: # timeout error
Senthil Kumaran45686b42011-07-27 09:31:03 +08001137 h.close()
Georg Brandl13e89462008-07-01 19:56:00 +00001138 raise URLError(err)
Senthil Kumaran45686b42011-07-27 09:31:03 +08001139 else:
1140 r = h.getresponse()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001141
Senthil Kumaran26430412011-04-13 07:01:19 +08001142 r.url = req.get_full_url()
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001143 # This line replaces the .msg attribute of the HTTPResponse
1144 # with .headers, because urllib clients expect the response to
1145 # have the reason in .msg. It would be good to mark this
1146 # attribute is deprecated and get then to use info() or
1147 # .headers.
1148 r.msg = r.reason
1149 return r
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001150
1151
1152class HTTPHandler(AbstractHTTPHandler):
1153
1154 def http_open(self, req):
1155 return self.do_open(http.client.HTTPConnection, req)
1156
1157 http_request = AbstractHTTPHandler.do_request_
1158
1159if hasattr(http.client, 'HTTPSConnection'):
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001160 import ssl
1161
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001162 class HTTPSHandler(AbstractHTTPHandler):
1163
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001164 def __init__(self, debuglevel=0, context=None, check_hostname=None):
1165 AbstractHTTPHandler.__init__(self, debuglevel)
1166 self._context = context
1167 self._check_hostname = check_hostname
1168
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001169 def https_open(self, req):
Antoine Pitrou803e6d62010-10-13 10:36:15 +00001170 return self.do_open(http.client.HTTPSConnection, req,
1171 context=self._context, check_hostname=self._check_hostname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001172
1173 https_request = AbstractHTTPHandler.do_request_
1174
1175class HTTPCookieProcessor(BaseHandler):
1176 def __init__(self, cookiejar=None):
1177 import http.cookiejar
1178 if cookiejar is None:
1179 cookiejar = http.cookiejar.CookieJar()
1180 self.cookiejar = cookiejar
1181
1182 def http_request(self, request):
1183 self.cookiejar.add_cookie_header(request)
1184 return request
1185
1186 def http_response(self, request, response):
1187 self.cookiejar.extract_cookies(response, request)
1188 return response
1189
1190 https_request = http_request
1191 https_response = http_response
1192
1193class UnknownHandler(BaseHandler):
1194 def unknown_open(self, req):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001195 type = req.type
Georg Brandl13e89462008-07-01 19:56:00 +00001196 raise URLError('unknown url type: %s' % type)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001197
1198def parse_keqv_list(l):
1199 """Parse list of key=value strings where keys are not duplicated."""
1200 parsed = {}
1201 for elt in l:
1202 k, v = elt.split('=', 1)
1203 if v[0] == '"' and v[-1] == '"':
1204 v = v[1:-1]
1205 parsed[k] = v
1206 return parsed
1207
1208def parse_http_list(s):
1209 """Parse lists as described by RFC 2068 Section 2.
1210
1211 In particular, parse comma-separated lists where the elements of
1212 the list may include quoted-strings. A quoted-string could
1213 contain a comma. A non-quoted string could have quotes in the
1214 middle. Neither commas nor quotes count if they are escaped.
1215 Only double-quotes count, not single-quotes.
1216 """
1217 res = []
1218 part = ''
1219
1220 escape = quote = False
1221 for cur in s:
1222 if escape:
1223 part += cur
1224 escape = False
1225 continue
1226 if quote:
1227 if cur == '\\':
1228 escape = True
1229 continue
1230 elif cur == '"':
1231 quote = False
1232 part += cur
1233 continue
1234
1235 if cur == ',':
1236 res.append(part)
1237 part = ''
1238 continue
1239
1240 if cur == '"':
1241 quote = True
1242
1243 part += cur
1244
1245 # append last part
1246 if part:
1247 res.append(part)
1248
1249 return [part.strip() for part in res]
1250
1251class FileHandler(BaseHandler):
1252 # Use local file or FTP depending on form of URL
1253 def file_open(self, req):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001254 url = req.selector
Senthil Kumaran2ef16322010-07-11 03:12:43 +00001255 if url[:2] == '//' and url[2:3] != '/' and (req.host and
1256 req.host != 'localhost'):
Senthil Kumaran383c32d2010-10-14 11:57:35 +00001257 if not req.host is self.get_names():
1258 raise URLError("file:// scheme is supported only on localhost")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001259 else:
1260 return self.open_local_file(req)
1261
1262 # names for the localhost
1263 names = None
1264 def get_names(self):
1265 if FileHandler.names is None:
1266 try:
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00001267 FileHandler.names = tuple(
1268 socket.gethostbyname_ex('localhost')[2] +
1269 socket.gethostbyname_ex(socket.gethostname())[2])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001270 except socket.gaierror:
1271 FileHandler.names = (socket.gethostbyname('localhost'),)
1272 return FileHandler.names
1273
1274 # not entirely sure what the rules are here
1275 def open_local_file(self, req):
1276 import email.utils
1277 import mimetypes
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001278 host = req.host
Senthil Kumaran06f5a532010-05-08 05:12:05 +00001279 filename = req.selector
1280 localfile = url2pathname(filename)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001281 try:
1282 stats = os.stat(localfile)
1283 size = stats.st_size
1284 modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
Senthil Kumaran06f5a532010-05-08 05:12:05 +00001285 mtype = mimetypes.guess_type(filename)[0]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001286 headers = email.message_from_string(
1287 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
1288 (mtype or 'text/plain', size, modified))
1289 if host:
Georg Brandl13e89462008-07-01 19:56:00 +00001290 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001291 if not host or \
1292 (not port and _safe_gethostbyname(host) in self.get_names()):
Senthil Kumaran06f5a532010-05-08 05:12:05 +00001293 if host:
1294 origurl = 'file://' + host + filename
1295 else:
1296 origurl = 'file://' + filename
1297 return addinfourl(open(localfile, 'rb'), headers, origurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001298 except OSError as msg:
Georg Brandl029986a2008-06-23 11:44:14 +00001299 # users shouldn't expect OSErrors coming from urlopen()
Georg Brandl13e89462008-07-01 19:56:00 +00001300 raise URLError(msg)
1301 raise URLError('file not on local host')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001302
1303def _safe_gethostbyname(host):
1304 try:
1305 return socket.gethostbyname(host)
1306 except socket.gaierror:
1307 return None
1308
1309class FTPHandler(BaseHandler):
1310 def ftp_open(self, req):
1311 import ftplib
1312 import mimetypes
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001313 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001314 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001315 raise URLError('ftp error: no host given')
1316 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001317 if port is None:
1318 port = ftplib.FTP_PORT
1319 else:
1320 port = int(port)
1321
1322 # username/password handling
Georg Brandl13e89462008-07-01 19:56:00 +00001323 user, host = splituser(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001324 if user:
Georg Brandl13e89462008-07-01 19:56:00 +00001325 user, passwd = splitpasswd(user)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001326 else:
1327 passwd = None
Georg Brandl13e89462008-07-01 19:56:00 +00001328 host = unquote(host)
Senthil Kumarandaa29d02010-11-18 15:36:41 +00001329 user = user or ''
1330 passwd = passwd or ''
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001331
1332 try:
1333 host = socket.gethostbyname(host)
1334 except socket.error as msg:
Georg Brandl13e89462008-07-01 19:56:00 +00001335 raise URLError(msg)
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001336 path, attrs = splitattr(req.selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001337 dirs = path.split('/')
Georg Brandl13e89462008-07-01 19:56:00 +00001338 dirs = list(map(unquote, dirs))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001339 dirs, file = dirs[:-1], dirs[-1]
1340 if dirs and not dirs[0]:
1341 dirs = dirs[1:]
1342 try:
1343 fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
1344 type = file and 'I' or 'D'
1345 for attr in attrs:
Georg Brandl13e89462008-07-01 19:56:00 +00001346 attr, value = splitvalue(attr)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001347 if attr.lower() == 'type' and \
1348 value in ('a', 'A', 'i', 'I', 'd', 'D'):
1349 type = value.upper()
1350 fp, retrlen = fw.retrfile(file, type)
1351 headers = ""
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001352 mtype = mimetypes.guess_type(req.full_url)[0]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001353 if mtype:
1354 headers += "Content-type: %s\n" % mtype
1355 if retrlen is not None and retrlen >= 0:
1356 headers += "Content-length: %d\n" % retrlen
1357 headers = email.message_from_string(headers)
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001358 return addinfourl(fp, headers, req.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001359 except ftplib.all_errors as msg:
Georg Brandl13e89462008-07-01 19:56:00 +00001360 exc = URLError('ftp error: %s' % msg)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001361 raise exc.with_traceback(sys.exc_info()[2])
1362
1363 def connect_ftp(self, user, passwd, host, port, dirs, timeout):
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02001364 return ftpwrapper(user, passwd, host, port, dirs, timeout,
1365 persistent=False)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001366
1367class CacheFTPHandler(FTPHandler):
1368 # XXX would be nice to have pluggable cache strategies
1369 # XXX this stuff is definitely not thread safe
1370 def __init__(self):
1371 self.cache = {}
1372 self.timeout = {}
1373 self.soonest = 0
1374 self.delay = 60
1375 self.max_conns = 16
1376
1377 def setTimeout(self, t):
1378 self.delay = t
1379
1380 def setMaxConns(self, m):
1381 self.max_conns = m
1382
1383 def connect_ftp(self, user, passwd, host, port, dirs, timeout):
1384 key = user, host, port, '/'.join(dirs), timeout
1385 if key in self.cache:
1386 self.timeout[key] = time.time() + self.delay
1387 else:
1388 self.cache[key] = ftpwrapper(user, passwd, host, port,
1389 dirs, timeout)
1390 self.timeout[key] = time.time() + self.delay
1391 self.check_cache()
1392 return self.cache[key]
1393
1394 def check_cache(self):
1395 # first check for old ones
1396 t = time.time()
1397 if self.soonest <= t:
1398 for k, v in list(self.timeout.items()):
1399 if v < t:
1400 self.cache[k].close()
1401 del self.cache[k]
1402 del self.timeout[k]
1403 self.soonest = min(list(self.timeout.values()))
1404
1405 # then check the size
1406 if len(self.cache) == self.max_conns:
1407 for k, v in list(self.timeout.items()):
1408 if v == self.soonest:
1409 del self.cache[k]
1410 del self.timeout[k]
1411 break
1412 self.soonest = min(list(self.timeout.values()))
1413
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02001414 def clear_cache(self):
1415 for conn in self.cache.values():
1416 conn.close()
1417 self.cache.clear()
1418 self.timeout.clear()
1419
1420
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001421# Code move from the old urllib module
1422
1423MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
1424
1425# Helper for non-unix systems
Ronald Oussoren94f25282010-05-05 19:11:21 +00001426if os.name == 'nt':
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001427 from nturl2path import url2pathname, pathname2url
1428else:
1429 def url2pathname(pathname):
1430 """OS-specific conversion from a relative URL of the 'file' scheme
1431 to a file system path; not recommended for general use."""
Georg Brandl13e89462008-07-01 19:56:00 +00001432 return unquote(pathname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001433
1434 def pathname2url(pathname):
1435 """OS-specific conversion from a file system path to a relative URL
1436 of the 'file' scheme; not recommended for general use."""
Georg Brandl13e89462008-07-01 19:56:00 +00001437 return quote(pathname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001438
1439# This really consists of two pieces:
1440# (1) a class which handles opening of all sorts of URLs
1441# (plus assorted utilities etc.)
1442# (2) a set of functions for parsing URLs
1443# XXX Should these be separated out into different modules?
1444
1445
1446ftpcache = {}
1447class URLopener:
1448 """Class to open URLs.
1449 This is a class rather than just a subroutine because we may need
1450 more than one set of global protocol-specific options.
1451 Note -- this is a base class for those who don't want the
1452 automatic handling of errors type 302 (relocated) and 401
1453 (authorization needed)."""
1454
1455 __tempfiles = None
1456
1457 version = "Python-urllib/%s" % __version__
1458
1459 # Constructor
1460 def __init__(self, proxies=None, **x509):
1461 if proxies is None:
1462 proxies = getproxies()
1463 assert hasattr(proxies, 'keys'), "proxies must be a mapping"
1464 self.proxies = proxies
1465 self.key_file = x509.get('key_file')
1466 self.cert_file = x509.get('cert_file')
1467 self.addheaders = [('User-Agent', self.version)]
1468 self.__tempfiles = []
1469 self.__unlink = os.unlink # See cleanup()
1470 self.tempcache = None
1471 # Undocumented feature: if you assign {} to tempcache,
1472 # it is used to cache files retrieved with
1473 # self.retrieve(). This is not enabled by default
1474 # since it does not work for changing documents (and I
1475 # haven't got the logic to check expiration headers
1476 # yet).
1477 self.ftpcache = ftpcache
1478 # Undocumented feature: you can use a different
1479 # ftp cache by assigning to the .ftpcache member;
1480 # in case you want logically independent URL openers
1481 # XXX This is not threadsafe. Bah.
1482
1483 def __del__(self):
1484 self.close()
1485
1486 def close(self):
1487 self.cleanup()
1488
1489 def cleanup(self):
1490 # This code sometimes runs when the rest of this module
1491 # has already been deleted, so it can't use any globals
1492 # or import anything.
1493 if self.__tempfiles:
1494 for file in self.__tempfiles:
1495 try:
1496 self.__unlink(file)
1497 except OSError:
1498 pass
1499 del self.__tempfiles[:]
1500 if self.tempcache:
1501 self.tempcache.clear()
1502
1503 def addheader(self, *args):
1504 """Add a header to be used by the HTTP interface only
1505 e.g. u.addheader('Accept', 'sound/basic')"""
1506 self.addheaders.append(args)
1507
1508 # External interface
1509 def open(self, fullurl, data=None):
1510 """Use URLopener().open(file) instead of open(file, 'r')."""
Georg Brandl13e89462008-07-01 19:56:00 +00001511 fullurl = unwrap(to_bytes(fullurl))
Senthil Kumaran734f0592010-02-20 22:19:04 +00001512 fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001513 if self.tempcache and fullurl in self.tempcache:
1514 filename, headers = self.tempcache[fullurl]
1515 fp = open(filename, 'rb')
Georg Brandl13e89462008-07-01 19:56:00 +00001516 return addinfourl(fp, headers, fullurl)
1517 urltype, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001518 if not urltype:
1519 urltype = 'file'
1520 if urltype in self.proxies:
1521 proxy = self.proxies[urltype]
Georg Brandl13e89462008-07-01 19:56:00 +00001522 urltype, proxyhost = splittype(proxy)
1523 host, selector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001524 url = (host, fullurl) # Signal special case to open_*()
1525 else:
1526 proxy = None
1527 name = 'open_' + urltype
1528 self.type = urltype
1529 name = name.replace('-', '_')
1530 if not hasattr(self, name):
1531 if proxy:
1532 return self.open_unknown_proxy(proxy, fullurl, data)
1533 else:
1534 return self.open_unknown(fullurl, data)
1535 try:
1536 if data is None:
1537 return getattr(self, name)(url)
1538 else:
1539 return getattr(self, name)(url, data)
1540 except socket.error as msg:
1541 raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
1542
1543 def open_unknown(self, fullurl, data=None):
1544 """Overridable interface to open unknown URL type."""
Georg Brandl13e89462008-07-01 19:56:00 +00001545 type, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001546 raise IOError('url error', 'unknown url type', type)
1547
1548 def open_unknown_proxy(self, proxy, fullurl, data=None):
1549 """Overridable interface to open unknown URL type."""
Georg Brandl13e89462008-07-01 19:56:00 +00001550 type, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001551 raise IOError('url error', 'invalid proxy for %s' % type, proxy)
1552
1553 # External interface
1554 def retrieve(self, url, filename=None, reporthook=None, data=None):
1555 """retrieve(url) returns (filename, headers) for a local object
1556 or (tempfilename, headers) for a remote object."""
Georg Brandl13e89462008-07-01 19:56:00 +00001557 url = unwrap(to_bytes(url))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001558 if self.tempcache and url in self.tempcache:
1559 return self.tempcache[url]
Georg Brandl13e89462008-07-01 19:56:00 +00001560 type, url1 = splittype(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001561 if filename is None and (not type or type == 'file'):
1562 try:
1563 fp = self.open_local_file(url1)
1564 hdrs = fp.info()
Philip Jenveycb134d72009-12-03 02:45:01 +00001565 fp.close()
Georg Brandl13e89462008-07-01 19:56:00 +00001566 return url2pathname(splithost(url1)[1]), hdrs
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001567 except IOError as msg:
1568 pass
1569 fp = self.open(url, data)
Benjamin Peterson5f28b7b2009-03-26 21:49:58 +00001570 try:
1571 headers = fp.info()
1572 if filename:
1573 tfp = open(filename, 'wb')
1574 else:
1575 import tempfile
1576 garbage, path = splittype(url)
1577 garbage, path = splithost(path or "")
1578 path, garbage = splitquery(path or "")
1579 path, garbage = splitattr(path or "")
1580 suffix = os.path.splitext(path)[1]
1581 (fd, filename) = tempfile.mkstemp(suffix)
1582 self.__tempfiles.append(filename)
1583 tfp = os.fdopen(fd, 'wb')
1584 try:
1585 result = filename, headers
1586 if self.tempcache is not None:
1587 self.tempcache[url] = result
1588 bs = 1024*8
1589 size = -1
1590 read = 0
1591 blocknum = 0
Senthil Kumarance260142011-11-01 01:35:17 +08001592 if "content-length" in headers:
1593 size = int(headers["Content-Length"])
Benjamin Peterson5f28b7b2009-03-26 21:49:58 +00001594 if reporthook:
Benjamin Peterson5f28b7b2009-03-26 21:49:58 +00001595 reporthook(blocknum, bs, size)
1596 while 1:
1597 block = fp.read(bs)
1598 if not block:
1599 break
1600 read += len(block)
1601 tfp.write(block)
1602 blocknum += 1
1603 if reporthook:
1604 reporthook(blocknum, bs, size)
1605 finally:
1606 tfp.close()
1607 finally:
1608 fp.close()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001609
1610 # raise exception if actual size does not match content-length header
1611 if size >= 0 and read < size:
Georg Brandl13e89462008-07-01 19:56:00 +00001612 raise ContentTooShortError(
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001613 "retrieval incomplete: got only %i out of %i bytes"
1614 % (read, size), result)
1615
1616 return result
1617
1618 # Each method named open_<type> knows how to open that type of URL
1619
1620 def _open_generic_http(self, connection_factory, url, data):
1621 """Make an HTTP connection using connection_class.
1622
1623 This is an internal method that should be called from
1624 open_http() or open_https().
1625
1626 Arguments:
1627 - connection_factory should take a host name and return an
1628 HTTPConnection instance.
1629 - url is the url to retrieval or a host, relative-path pair.
1630 - data is payload for a POST request or None.
1631 """
1632
1633 user_passwd = None
1634 proxy_passwd= None
1635 if isinstance(url, str):
Georg Brandl13e89462008-07-01 19:56:00 +00001636 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001637 if host:
Georg Brandl13e89462008-07-01 19:56:00 +00001638 user_passwd, host = splituser(host)
1639 host = unquote(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001640 realhost = host
1641 else:
1642 host, selector = url
1643 # check whether the proxy contains authorization information
Georg Brandl13e89462008-07-01 19:56:00 +00001644 proxy_passwd, host = splituser(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001645 # now we proceed with the url we want to obtain
Georg Brandl13e89462008-07-01 19:56:00 +00001646 urltype, rest = splittype(selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001647 url = rest
1648 user_passwd = None
1649 if urltype.lower() != 'http':
1650 realhost = None
1651 else:
Georg Brandl13e89462008-07-01 19:56:00 +00001652 realhost, rest = splithost(rest)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001653 if realhost:
Georg Brandl13e89462008-07-01 19:56:00 +00001654 user_passwd, realhost = splituser(realhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001655 if user_passwd:
1656 selector = "%s://%s%s" % (urltype, realhost, rest)
1657 if proxy_bypass(realhost):
1658 host = realhost
1659
1660 #print "proxy via http:", host, selector
1661 if not host: raise IOError('http error', 'no host given')
1662
1663 if proxy_passwd:
1664 import base64
Senthil Kumaran5626eec2010-08-04 17:46:23 +00001665 proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001666 else:
1667 proxy_auth = None
1668
1669 if user_passwd:
1670 import base64
Senthil Kumaran5626eec2010-08-04 17:46:23 +00001671 auth = base64.b64encode(user_passwd.encode()).decode('ascii')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001672 else:
1673 auth = None
1674 http_conn = connection_factory(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001675 headers = {}
1676 if proxy_auth:
1677 headers["Proxy-Authorization"] = "Basic %s" % proxy_auth
1678 if auth:
1679 headers["Authorization"] = "Basic %s" % auth
1680 if realhost:
1681 headers["Host"] = realhost
Senthil Kumarand91ffca2011-03-19 17:25:27 +08001682
1683 # Add Connection:close as we don't support persistent connections yet.
1684 # This helps in closing the socket and avoiding ResourceWarning
1685
1686 headers["Connection"] = "close"
1687
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001688 for header, value in self.addheaders:
1689 headers[header] = value
1690
1691 if data is not None:
1692 headers["Content-Type"] = "application/x-www-form-urlencoded"
1693 http_conn.request("POST", selector, data, headers)
1694 else:
1695 http_conn.request("GET", selector, headers=headers)
1696
1697 try:
1698 response = http_conn.getresponse()
1699 except http.client.BadStatusLine:
1700 # something went wrong with the HTTP status line
Georg Brandl13e89462008-07-01 19:56:00 +00001701 raise URLError("http protocol error: bad status line")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001702
1703 # According to RFC 2616, "2xx" code indicates that the client's
1704 # request was successfully received, understood, and accepted.
1705 if 200 <= response.status < 300:
Antoine Pitroub353c122009-02-11 00:39:14 +00001706 return addinfourl(response, response.msg, "http:" + url,
Georg Brandl13e89462008-07-01 19:56:00 +00001707 response.status)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001708 else:
1709 return self.http_error(
1710 url, response.fp,
1711 response.status, response.reason, response.msg, data)
1712
1713 def open_http(self, url, data=None):
1714 """Use HTTP protocol."""
1715 return self._open_generic_http(http.client.HTTPConnection, url, data)
1716
1717 def http_error(self, url, fp, errcode, errmsg, headers, data=None):
1718 """Handle http errors.
1719
1720 Derived class can override this, or provide specific handlers
1721 named http_error_DDD where DDD is the 3-digit error code."""
1722 # First check if there's a specific handler for this error
1723 name = 'http_error_%d' % errcode
1724 if hasattr(self, name):
1725 method = getattr(self, name)
1726 if data is None:
1727 result = method(url, fp, errcode, errmsg, headers)
1728 else:
1729 result = method(url, fp, errcode, errmsg, headers, data)
1730 if result: return result
1731 return self.http_error_default(url, fp, errcode, errmsg, headers)
1732
1733 def http_error_default(self, url, fp, errcode, errmsg, headers):
1734 """Default error handler: close the connection and raise IOError."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001735 fp.close()
Georg Brandl13e89462008-07-01 19:56:00 +00001736 raise HTTPError(url, errcode, errmsg, headers, None)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001737
1738 if _have_ssl:
1739 def _https_connection(self, host):
1740 return http.client.HTTPSConnection(host,
1741 key_file=self.key_file,
1742 cert_file=self.cert_file)
1743
1744 def open_https(self, url, data=None):
1745 """Use HTTPS protocol."""
1746 return self._open_generic_http(self._https_connection, url, data)
1747
1748 def open_file(self, url):
1749 """Use local file or FTP depending on form of URL."""
1750 if not isinstance(url, str):
1751 raise URLError('file error', 'proxy support for file protocol currently not implemented')
1752 if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
Senthil Kumaran383c32d2010-10-14 11:57:35 +00001753 raise ValueError("file:// scheme is supported only on localhost")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001754 else:
1755 return self.open_local_file(url)
1756
1757 def open_local_file(self, url):
1758 """Use local file."""
1759 import mimetypes, email.utils
1760 from io import StringIO
Georg Brandl13e89462008-07-01 19:56:00 +00001761 host, file = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001762 localname = url2pathname(file)
1763 try:
1764 stats = os.stat(localname)
1765 except OSError as e:
1766 raise URLError(e.errno, e.strerror, e.filename)
1767 size = stats.st_size
1768 modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
1769 mtype = mimetypes.guess_type(url)[0]
1770 headers = email.message_from_string(
1771 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
1772 (mtype or 'text/plain', size, modified))
1773 if not host:
1774 urlfile = file
1775 if file[:1] == '/':
1776 urlfile = 'file://' + file
Georg Brandl13e89462008-07-01 19:56:00 +00001777 return addinfourl(open(localname, 'rb'), headers, urlfile)
1778 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001779 if (not port
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00001780 and socket.gethostbyname(host) in (localhost() + thishost())):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001781 urlfile = file
1782 if file[:1] == '/':
1783 urlfile = 'file://' + file
Georg Brandl13e89462008-07-01 19:56:00 +00001784 return addinfourl(open(localname, 'rb'), headers, urlfile)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001785 raise URLError('local file error', 'not on local host')
1786
1787 def open_ftp(self, url):
1788 """Use FTP protocol."""
1789 if not isinstance(url, str):
1790 raise URLError('ftp error', 'proxy support for ftp protocol currently not implemented')
1791 import mimetypes
1792 from io import StringIO
Georg Brandl13e89462008-07-01 19:56:00 +00001793 host, path = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001794 if not host: raise URLError('ftp error', 'no host given')
Georg Brandl13e89462008-07-01 19:56:00 +00001795 host, port = splitport(host)
1796 user, host = splituser(host)
1797 if user: user, passwd = splitpasswd(user)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001798 else: passwd = None
Georg Brandl13e89462008-07-01 19:56:00 +00001799 host = unquote(host)
1800 user = unquote(user or '')
1801 passwd = unquote(passwd or '')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001802 host = socket.gethostbyname(host)
1803 if not port:
1804 import ftplib
1805 port = ftplib.FTP_PORT
1806 else:
1807 port = int(port)
Georg Brandl13e89462008-07-01 19:56:00 +00001808 path, attrs = splitattr(path)
1809 path = unquote(path)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001810 dirs = path.split('/')
1811 dirs, file = dirs[:-1], dirs[-1]
1812 if dirs and not dirs[0]: dirs = dirs[1:]
1813 if dirs and not dirs[0]: dirs[0] = '/'
1814 key = user, host, port, '/'.join(dirs)
1815 # XXX thread unsafe!
1816 if len(self.ftpcache) > MAXFTPCACHE:
1817 # Prune the cache, rather arbitrarily
1818 for k in self.ftpcache.keys():
1819 if k != key:
1820 v = self.ftpcache[k]
1821 del self.ftpcache[k]
1822 v.close()
1823 try:
Senthil Kumaran34d38dc2011-10-20 02:48:01 +08001824 if key not in self.ftpcache:
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001825 self.ftpcache[key] = \
1826 ftpwrapper(user, passwd, host, port, dirs)
1827 if not file: type = 'D'
1828 else: type = 'I'
1829 for attr in attrs:
Georg Brandl13e89462008-07-01 19:56:00 +00001830 attr, value = splitvalue(attr)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001831 if attr.lower() == 'type' and \
1832 value in ('a', 'A', 'i', 'I', 'd', 'D'):
1833 type = value.upper()
1834 (fp, retrlen) = self.ftpcache[key].retrfile(file, type)
1835 mtype = mimetypes.guess_type("ftp:" + url)[0]
1836 headers = ""
1837 if mtype:
1838 headers += "Content-Type: %s\n" % mtype
1839 if retrlen is not None and retrlen >= 0:
1840 headers += "Content-Length: %d\n" % retrlen
1841 headers = email.message_from_string(headers)
Georg Brandl13e89462008-07-01 19:56:00 +00001842 return addinfourl(fp, headers, "ftp:" + url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001843 except ftperrors() as msg:
1844 raise URLError('ftp error', msg).with_traceback(sys.exc_info()[2])
1845
1846 def open_data(self, url, data=None):
1847 """Use "data" URL."""
1848 if not isinstance(url, str):
1849 raise URLError('data error', 'proxy support for data protocol currently not implemented')
1850 # ignore POSTed data
1851 #
1852 # syntax of data URLs:
1853 # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
1854 # mediatype := [ type "/" subtype ] *( ";" parameter )
1855 # data := *urlchar
1856 # parameter := attribute "=" value
1857 try:
1858 [type, data] = url.split(',', 1)
1859 except ValueError:
1860 raise IOError('data error', 'bad data URL')
1861 if not type:
1862 type = 'text/plain;charset=US-ASCII'
1863 semi = type.rfind(';')
1864 if semi >= 0 and '=' not in type[semi:]:
1865 encoding = type[semi+1:]
1866 type = type[:semi]
1867 else:
1868 encoding = ''
1869 msg = []
Senthil Kumaranf6c456d2010-05-01 08:29:18 +00001870 msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001871 time.gmtime(time.time())))
1872 msg.append('Content-type: %s' % type)
1873 if encoding == 'base64':
1874 import base64
Georg Brandl706824f2009-06-04 09:42:55 +00001875 # XXX is this encoding/decoding ok?
1876 data = base64.decodebytes(data.encode('ascii')).decode('latin1')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001877 else:
Georg Brandl13e89462008-07-01 19:56:00 +00001878 data = unquote(data)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001879 msg.append('Content-Length: %d' % len(data))
1880 msg.append('')
1881 msg.append(data)
1882 msg = '\n'.join(msg)
Georg Brandl13e89462008-07-01 19:56:00 +00001883 headers = email.message_from_string(msg)
1884 f = io.StringIO(msg)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001885 #f.fileno = None # needed for addinfourl
Georg Brandl13e89462008-07-01 19:56:00 +00001886 return addinfourl(f, headers, url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001887
1888
1889class FancyURLopener(URLopener):
1890 """Derived class with handlers for errors we can handle (perhaps)."""
1891
1892 def __init__(self, *args, **kwargs):
1893 URLopener.__init__(self, *args, **kwargs)
1894 self.auth_cache = {}
1895 self.tries = 0
1896 self.maxtries = 10
1897
1898 def http_error_default(self, url, fp, errcode, errmsg, headers):
1899 """Default error handling -- don't raise an exception."""
Georg Brandl13e89462008-07-01 19:56:00 +00001900 return addinfourl(fp, headers, "http:" + url, errcode)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001901
1902 def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
1903 """Error 302 -- relocated (temporarily)."""
1904 self.tries += 1
1905 if self.maxtries and self.tries >= self.maxtries:
1906 if hasattr(self, "http_error_500"):
1907 meth = self.http_error_500
1908 else:
1909 meth = self.http_error_default
1910 self.tries = 0
1911 return meth(url, fp, 500,
1912 "Internal Server Error: Redirect Recursion", headers)
1913 result = self.redirect_internal(url, fp, errcode, errmsg, headers,
1914 data)
1915 self.tries = 0
1916 return result
1917
1918 def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
1919 if 'location' in headers:
1920 newurl = headers['location']
1921 elif 'uri' in headers:
1922 newurl = headers['uri']
1923 else:
1924 return
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001925 fp.close()
guido@google.coma119df92011-03-29 11:41:02 -07001926
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001927 # In case the server sent a relative URL, join with original:
Georg Brandl13e89462008-07-01 19:56:00 +00001928 newurl = urljoin(self.type + ":" + url, newurl)
guido@google.coma119df92011-03-29 11:41:02 -07001929
1930 urlparts = urlparse(newurl)
1931
1932 # For security reasons, we don't allow redirection to anything other
1933 # than http, https and ftp.
1934
1935 # We are using newer HTTPError with older redirect_internal method
1936 # This older method will get deprecated in 3.3
1937
Senthil Kumaran34d38dc2011-10-20 02:48:01 +08001938 if urlparts.scheme not in ('http', 'https', 'ftp'):
guido@google.coma119df92011-03-29 11:41:02 -07001939 raise HTTPError(newurl, errcode,
1940 errmsg +
1941 " Redirection to url '%s' is not allowed." % newurl,
1942 headers, fp)
1943
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001944 return self.open(newurl)
1945
1946 def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
1947 """Error 301 -- also relocated (permanently)."""
1948 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1949
1950 def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
1951 """Error 303 -- also relocated (essentially identical to 302)."""
1952 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1953
1954 def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
1955 """Error 307 -- relocated, but turn POST into error."""
1956 if data is None:
1957 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1958 else:
1959 return self.http_error_default(url, fp, errcode, errmsg, headers)
1960
Senthil Kumaran80f1b052010-06-18 15:08:18 +00001961 def http_error_401(self, url, fp, errcode, errmsg, headers, data=None,
1962 retry=False):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001963 """Error 401 -- authentication required.
1964 This function supports Basic authentication only."""
Senthil Kumaran34d38dc2011-10-20 02:48:01 +08001965 if 'www-authenticate' not in headers:
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001966 URLopener.http_error_default(self, url, fp,
1967 errcode, errmsg, headers)
1968 stuff = headers['www-authenticate']
1969 import re
1970 match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
1971 if not match:
1972 URLopener.http_error_default(self, url, fp,
1973 errcode, errmsg, headers)
1974 scheme, realm = match.groups()
1975 if scheme.lower() != 'basic':
1976 URLopener.http_error_default(self, url, fp,
1977 errcode, errmsg, headers)
Senthil Kumaran80f1b052010-06-18 15:08:18 +00001978 if not retry:
1979 URLopener.http_error_default(self, url, fp, errcode, errmsg,
1980 headers)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001981 name = 'retry_' + self.type + '_basic_auth'
1982 if data is None:
1983 return getattr(self,name)(url, realm)
1984 else:
1985 return getattr(self,name)(url, realm, data)
1986
Senthil Kumaran80f1b052010-06-18 15:08:18 +00001987 def http_error_407(self, url, fp, errcode, errmsg, headers, data=None,
1988 retry=False):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001989 """Error 407 -- proxy authentication required.
1990 This function supports Basic authentication only."""
Senthil Kumaran34d38dc2011-10-20 02:48:01 +08001991 if 'proxy-authenticate' not in headers:
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001992 URLopener.http_error_default(self, url, fp,
1993 errcode, errmsg, headers)
1994 stuff = headers['proxy-authenticate']
1995 import re
1996 match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
1997 if not match:
1998 URLopener.http_error_default(self, url, fp,
1999 errcode, errmsg, headers)
2000 scheme, realm = match.groups()
2001 if scheme.lower() != 'basic':
2002 URLopener.http_error_default(self, url, fp,
2003 errcode, errmsg, headers)
Senthil Kumaran80f1b052010-06-18 15:08:18 +00002004 if not retry:
2005 URLopener.http_error_default(self, url, fp, errcode, errmsg,
2006 headers)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002007 name = 'retry_proxy_' + self.type + '_basic_auth'
2008 if data is None:
2009 return getattr(self,name)(url, realm)
2010 else:
2011 return getattr(self,name)(url, realm, data)
2012
2013 def retry_proxy_http_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002014 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002015 newurl = 'http://' + host + selector
2016 proxy = self.proxies['http']
Georg Brandl13e89462008-07-01 19:56:00 +00002017 urltype, proxyhost = splittype(proxy)
2018 proxyhost, proxyselector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002019 i = proxyhost.find('@') + 1
2020 proxyhost = proxyhost[i:]
2021 user, passwd = self.get_user_passwd(proxyhost, realm, i)
2022 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002023 proxyhost = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002024 quote(passwd, safe=''), proxyhost)
2025 self.proxies['http'] = 'http://' + proxyhost + proxyselector
2026 if data is None:
2027 return self.open(newurl)
2028 else:
2029 return self.open(newurl, data)
2030
2031 def retry_proxy_https_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002032 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002033 newurl = 'https://' + host + selector
2034 proxy = self.proxies['https']
Georg Brandl13e89462008-07-01 19:56:00 +00002035 urltype, proxyhost = splittype(proxy)
2036 proxyhost, proxyselector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002037 i = proxyhost.find('@') + 1
2038 proxyhost = proxyhost[i:]
2039 user, passwd = self.get_user_passwd(proxyhost, realm, i)
2040 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002041 proxyhost = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002042 quote(passwd, safe=''), proxyhost)
2043 self.proxies['https'] = 'https://' + proxyhost + proxyselector
2044 if data is None:
2045 return self.open(newurl)
2046 else:
2047 return self.open(newurl, data)
2048
2049 def retry_http_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002050 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002051 i = host.find('@') + 1
2052 host = host[i:]
2053 user, passwd = self.get_user_passwd(host, realm, i)
2054 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002055 host = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002056 quote(passwd, safe=''), host)
2057 newurl = 'http://' + host + selector
2058 if data is None:
2059 return self.open(newurl)
2060 else:
2061 return self.open(newurl, data)
2062
2063 def retry_https_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002064 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002065 i = host.find('@') + 1
2066 host = host[i:]
2067 user, passwd = self.get_user_passwd(host, realm, i)
2068 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002069 host = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002070 quote(passwd, safe=''), host)
2071 newurl = 'https://' + host + selector
2072 if data is None:
2073 return self.open(newurl)
2074 else:
2075 return self.open(newurl, data)
2076
Florent Xicluna757445b2010-05-17 17:24:07 +00002077 def get_user_passwd(self, host, realm, clear_cache=0):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002078 key = realm + '@' + host.lower()
2079 if key in self.auth_cache:
2080 if clear_cache:
2081 del self.auth_cache[key]
2082 else:
2083 return self.auth_cache[key]
2084 user, passwd = self.prompt_user_passwd(host, realm)
2085 if user or passwd: self.auth_cache[key] = (user, passwd)
2086 return user, passwd
2087
2088 def prompt_user_passwd(self, host, realm):
2089 """Override this in a GUI environment!"""
2090 import getpass
2091 try:
2092 user = input("Enter username for %s at %s: " % (realm, host))
2093 passwd = getpass.getpass("Enter password for %s in %s at %s: " %
2094 (user, realm, host))
2095 return user, passwd
2096 except KeyboardInterrupt:
2097 print()
2098 return None, None
2099
2100
2101# Utility functions
2102
2103_localhost = None
2104def localhost():
2105 """Return the IP address of the magic hostname 'localhost'."""
2106 global _localhost
2107 if _localhost is None:
2108 _localhost = socket.gethostbyname('localhost')
2109 return _localhost
2110
2111_thishost = None
2112def thishost():
Senthil Kumaran99b2c8f2009-12-27 10:13:39 +00002113 """Return the IP addresses of the current host."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002114 global _thishost
2115 if _thishost is None:
Senthil Kumaran1b7da512011-10-06 00:32:02 +08002116 _thishost = tuple(socket.gethostbyname_ex(socket.gethostname())[2])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002117 return _thishost
2118
2119_ftperrors = None
2120def ftperrors():
2121 """Return the set of errors raised by the FTP class."""
2122 global _ftperrors
2123 if _ftperrors is None:
2124 import ftplib
2125 _ftperrors = ftplib.all_errors
2126 return _ftperrors
2127
2128_noheaders = None
2129def noheaders():
Georg Brandl13e89462008-07-01 19:56:00 +00002130 """Return an empty email Message object."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002131 global _noheaders
2132 if _noheaders is None:
Georg Brandl13e89462008-07-01 19:56:00 +00002133 _noheaders = email.message_from_string("")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002134 return _noheaders
2135
2136
2137# Utility classes
2138
2139class ftpwrapper:
2140 """Class used by open_ftp() for cache of open FTP connections."""
2141
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02002142 def __init__(self, user, passwd, host, port, dirs, timeout=None,
2143 persistent=True):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002144 self.user = user
2145 self.passwd = passwd
2146 self.host = host
2147 self.port = port
2148 self.dirs = dirs
2149 self.timeout = timeout
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02002150 self.refcount = 0
2151 self.keepalive = persistent
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002152 self.init()
2153
2154 def init(self):
2155 import ftplib
2156 self.busy = 0
2157 self.ftp = ftplib.FTP()
2158 self.ftp.connect(self.host, self.port, self.timeout)
2159 self.ftp.login(self.user, self.passwd)
2160 for dir in self.dirs:
2161 self.ftp.cwd(dir)
2162
2163 def retrfile(self, file, type):
2164 import ftplib
2165 self.endtransfer()
2166 if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
2167 else: cmd = 'TYPE ' + type; isdir = 0
2168 try:
2169 self.ftp.voidcmd(cmd)
2170 except ftplib.all_errors:
2171 self.init()
2172 self.ftp.voidcmd(cmd)
2173 conn = None
2174 if file and not isdir:
2175 # Try to retrieve as a file
2176 try:
2177 cmd = 'RETR ' + file
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002178 conn, retrlen = self.ftp.ntransfercmd(cmd)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002179 except ftplib.error_perm as reason:
2180 if str(reason)[:3] != '550':
Georg Brandl13e89462008-07-01 19:56:00 +00002181 raise URLError('ftp error', reason).with_traceback(
2182 sys.exc_info()[2])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002183 if not conn:
2184 # Set transfer mode to ASCII!
2185 self.ftp.voidcmd('TYPE A')
2186 # Try a directory listing. Verify that directory exists.
2187 if file:
2188 pwd = self.ftp.pwd()
2189 try:
2190 try:
2191 self.ftp.cwd(file)
2192 except ftplib.error_perm as reason:
Georg Brandl13e89462008-07-01 19:56:00 +00002193 raise URLError('ftp error', reason) from reason
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002194 finally:
2195 self.ftp.cwd(pwd)
2196 cmd = 'LIST ' + file
2197 else:
2198 cmd = 'LIST'
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002199 conn, retrlen = self.ftp.ntransfercmd(cmd)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002200 self.busy = 1
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002201
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02002202 ftpobj = addclosehook(conn.makefile('rb'), self.file_close)
2203 self.refcount += 1
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002204 conn.close()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002205 # Pass back both a suitably decorated object and a retrieval length
Senthil Kumaran2024acd2011-03-24 11:46:19 +08002206 return (ftpobj, retrlen)
2207
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002208 def endtransfer(self):
2209 if not self.busy:
2210 return
2211 self.busy = 0
2212 try:
2213 self.ftp.voidresp()
2214 except ftperrors():
2215 pass
2216
2217 def close(self):
Nadeem Vawda08f5f7a2011-07-23 14:03:00 +02002218 self.keepalive = False
2219 if self.refcount <= 0:
2220 self.real_close()
2221
2222 def file_close(self):
2223 self.endtransfer()
2224 self.refcount -= 1
2225 if self.refcount <= 0 and not self.keepalive:
2226 self.real_close()
2227
2228 def real_close(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002229 self.endtransfer()
2230 try:
2231 self.ftp.close()
2232 except ftperrors():
2233 pass
2234
2235# Proxy handling
2236def getproxies_environment():
2237 """Return a dictionary of scheme -> proxy server URL mappings.
2238
2239 Scan the environment for variables named <scheme>_proxy;
2240 this seems to be the standard convention. If you need a
2241 different way, you can pass a proxies dictionary to the
2242 [Fancy]URLopener constructor.
2243
2244 """
2245 proxies = {}
2246 for name, value in os.environ.items():
2247 name = name.lower()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002248 if value and name[-6:] == '_proxy':
2249 proxies[name[:-6]] = value
2250 return proxies
2251
2252def proxy_bypass_environment(host):
2253 """Test if proxies should not be used for a particular host.
2254
2255 Checks the environment for a variable named no_proxy, which should
2256 be a list of DNS suffixes separated by commas, or '*' for all hosts.
2257 """
2258 no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
2259 # '*' is special case for always bypass
2260 if no_proxy == '*':
2261 return 1
2262 # strip port off host
Georg Brandl13e89462008-07-01 19:56:00 +00002263 hostonly, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002264 # check if the host ends with any of the DNS suffixes
Senthil Kumaran89976f12011-08-06 12:27:40 +08002265 no_proxy_list = [proxy.strip() for proxy in no_proxy.split(',')]
2266 for name in no_proxy_list:
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002267 if name and (hostonly.endswith(name) or host.endswith(name)):
2268 return 1
2269 # otherwise, don't bypass
2270 return 0
2271
2272
Ronald Oussorene72e1612011-03-14 18:15:25 -04002273# This code tests an OSX specific data structure but is testable on all
2274# platforms
2275def _proxy_bypass_macosx_sysconf(host, proxy_settings):
2276 """
2277 Return True iff this host shouldn't be accessed using a proxy
2278
2279 This function uses the MacOSX framework SystemConfiguration
2280 to fetch the proxy information.
2281
2282 proxy_settings come from _scproxy._get_proxy_settings or get mocked ie:
2283 { 'exclude_simple': bool,
2284 'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16']
2285 }
2286 """
2287 import re
2288 import socket
2289 from fnmatch import fnmatch
2290
2291 hostonly, port = splitport(host)
2292
2293 def ip2num(ipAddr):
2294 parts = ipAddr.split('.')
2295 parts = list(map(int, parts))
2296 if len(parts) != 4:
2297 parts = (parts + [0, 0, 0, 0])[:4]
2298 return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
2299
2300 # Check for simple host names:
2301 if '.' not in host:
2302 if proxy_settings['exclude_simple']:
2303 return True
2304
2305 hostIP = None
2306
2307 for value in proxy_settings.get('exceptions', ()):
2308 # Items in the list are strings like these: *.local, 169.254/16
2309 if not value: continue
2310
2311 m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
2312 if m is not None:
2313 if hostIP is None:
2314 try:
2315 hostIP = socket.gethostbyname(hostonly)
2316 hostIP = ip2num(hostIP)
2317 except socket.error:
2318 continue
2319
2320 base = ip2num(m.group(1))
2321 mask = m.group(2)
2322 if mask is None:
2323 mask = 8 * (m.group(1).count('.') + 1)
2324 else:
2325 mask = int(mask[1:])
2326 mask = 32 - mask
2327
2328 if (hostIP >> mask) == (base >> mask):
2329 return True
2330
2331 elif fnmatch(host, value):
2332 return True
2333
2334 return False
2335
2336
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002337if sys.platform == 'darwin':
Ronald Oussoren84151202010-04-18 20:46:11 +00002338 from _scproxy import _get_proxy_settings, _get_proxies
2339
2340 def proxy_bypass_macosx_sysconf(host):
Ronald Oussoren84151202010-04-18 20:46:11 +00002341 proxy_settings = _get_proxy_settings()
Ronald Oussorene72e1612011-03-14 18:15:25 -04002342 return _proxy_bypass_macosx_sysconf(host, proxy_settings)
Ronald Oussoren84151202010-04-18 20:46:11 +00002343
2344 def getproxies_macosx_sysconf():
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002345 """Return a dictionary of scheme -> proxy server URL mappings.
2346
Ronald Oussoren84151202010-04-18 20:46:11 +00002347 This function uses the MacOSX framework SystemConfiguration
2348 to fetch the proxy information.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002349 """
Ronald Oussoren84151202010-04-18 20:46:11 +00002350 return _get_proxies()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002351
Ronald Oussoren84151202010-04-18 20:46:11 +00002352
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002353
2354 def proxy_bypass(host):
2355 if getproxies_environment():
2356 return proxy_bypass_environment(host)
2357 else:
Ronald Oussoren84151202010-04-18 20:46:11 +00002358 return proxy_bypass_macosx_sysconf(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002359
2360 def getproxies():
Ronald Oussoren84151202010-04-18 20:46:11 +00002361 return getproxies_environment() or getproxies_macosx_sysconf()
2362
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002363
2364elif os.name == 'nt':
2365 def getproxies_registry():
2366 """Return a dictionary of scheme -> proxy server URL mappings.
2367
2368 Win32 uses the registry to store proxies.
2369
2370 """
2371 proxies = {}
2372 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002373 import winreg
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002374 except ImportError:
2375 # Std module, so should be around - but you never know!
2376 return proxies
2377 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002378 internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002379 r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002380 proxyEnable = winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002381 'ProxyEnable')[0]
2382 if proxyEnable:
2383 # Returned as Unicode but problems if not converted to ASCII
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002384 proxyServer = str(winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002385 'ProxyServer')[0])
2386 if '=' in proxyServer:
2387 # Per-protocol settings
2388 for p in proxyServer.split(';'):
2389 protocol, address = p.split('=', 1)
2390 # See if address has a type:// prefix
2391 import re
2392 if not re.match('^([^/:]+)://', address):
2393 address = '%s://%s' % (protocol, address)
2394 proxies[protocol] = address
2395 else:
2396 # Use one setting for all protocols
2397 if proxyServer[:5] == 'http:':
2398 proxies['http'] = proxyServer
2399 else:
2400 proxies['http'] = 'http://%s' % proxyServer
Senthil Kumaran04f31b82010-07-14 20:10:52 +00002401 proxies['https'] = 'https://%s' % proxyServer
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002402 proxies['ftp'] = 'ftp://%s' % proxyServer
2403 internetSettings.Close()
2404 except (WindowsError, ValueError, TypeError):
2405 # Either registry key not found etc, or the value in an
2406 # unexpected format.
2407 # proxies already set up to be empty so nothing to do
2408 pass
2409 return proxies
2410
2411 def getproxies():
2412 """Return a dictionary of scheme -> proxy server URL mappings.
2413
2414 Returns settings gathered from the environment, if specified,
2415 or the registry.
2416
2417 """
2418 return getproxies_environment() or getproxies_registry()
2419
2420 def proxy_bypass_registry(host):
2421 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002422 import winreg
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002423 import re
2424 except ImportError:
2425 # Std modules, so should be around - but you never know!
2426 return 0
2427 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002428 internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002429 r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002430 proxyEnable = winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002431 'ProxyEnable')[0]
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002432 proxyOverride = str(winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002433 'ProxyOverride')[0])
2434 # ^^^^ Returned as Unicode but problems if not converted to ASCII
2435 except WindowsError:
2436 return 0
2437 if not proxyEnable or not proxyOverride:
2438 return 0
2439 # try to make a host list from name and IP address.
Georg Brandl13e89462008-07-01 19:56:00 +00002440 rawHost, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002441 host = [rawHost]
2442 try:
2443 addr = socket.gethostbyname(rawHost)
2444 if addr != rawHost:
2445 host.append(addr)
2446 except socket.error:
2447 pass
2448 try:
2449 fqdn = socket.getfqdn(rawHost)
2450 if fqdn != rawHost:
2451 host.append(fqdn)
2452 except socket.error:
2453 pass
2454 # make a check value list from the registry entry: replace the
2455 # '<local>' string by the localhost entry and the corresponding
2456 # canonical entry.
2457 proxyOverride = proxyOverride.split(';')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002458 # now check if we match one of the registry values.
2459 for test in proxyOverride:
Senthil Kumaran49476062009-05-01 06:00:23 +00002460 if test == '<local>':
2461 if '.' not in rawHost:
2462 return 1
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002463 test = test.replace(".", r"\.") # mask dots
2464 test = test.replace("*", r".*") # change glob sequence
2465 test = test.replace("?", r".") # change glob char
2466 for val in host:
2467 # print "%s <--> %s" %( test, val )
2468 if re.match(test, val, re.I):
2469 return 1
2470 return 0
2471
2472 def proxy_bypass(host):
2473 """Return a dictionary of scheme -> proxy server URL mappings.
2474
2475 Returns settings gathered from the environment, if specified,
2476 or the registry.
2477
2478 """
2479 if getproxies_environment():
2480 return proxy_bypass_environment(host)
2481 else:
2482 return proxy_bypass_registry(host)
2483
2484else:
2485 # By default use environment variables
2486 getproxies = getproxies_environment
2487 proxy_bypass = proxy_bypass_environment