blob: 220dfe4b19bd17ec4386fe48640124cff3e06a46 [file] [log] [blame]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001"""An extensible library for opening URLs using a variety of protocols
2
3The simplest way to use this module is to call the urlopen function,
4which accepts a string containing a URL or a Request object (described
5below). It opens the URL and returns the results as file-like
6object; the returned object has some extra methods described below.
7
8The OpenerDirector manages a collection of Handler objects that do
9all the actual work. Each Handler implements a particular protocol or
10option. The OpenerDirector is a composite object that invokes the
11Handlers needed to open the requested URL. For example, the
12HTTPHandler performs HTTP GET and POST requests and deals with
13non-error returns. The HTTPRedirectHandler automatically deals with
14HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
15deals with digest authentication.
16
17urlopen(url, data=None) -- Basic usage is the same as original
18urllib. pass the url and optionally data to post to an HTTP URL, and
19get a file-like object back. One difference is that you can also pass
20a Request instance instead of URL. Raises a URLError (subclass of
21IOError); for HTTP errors, raises an HTTPError, which can also be
22treated as a valid response.
23
24build_opener -- Function that creates a new OpenerDirector instance.
25Will install the default handlers. Accepts one or more Handlers as
26arguments, either instances or Handler classes that it will
27instantiate. If one of the argument is a subclass of the default
28handler, the argument will be installed instead of the default.
29
30install_opener -- Installs a new opener as the default opener.
31
32objects of interest:
Senthil Kumaran04454cd2009-11-15 07:27:02 +000033
Senthil Kumaran4b9fbeb2009-12-20 07:18:22 +000034OpenerDirector -- Sets up the User Agent as the Python-urllib client and manages
35the Handler classes, while dealing with requests and responses.
Jeremy Hylton1afc1692008-06-18 20:49:58 +000036
37Request -- An object that encapsulates the state of a request. The
38state can be as simple as the URL. It can also include extra HTTP
39headers, e.g. a User-Agent.
40
41BaseHandler --
42
43internals:
44BaseHandler and parent
45_call_chain conventions
46
47Example usage:
48
Georg Brandl029986a2008-06-23 11:44:14 +000049import urllib.request
Jeremy Hylton1afc1692008-06-18 20:49:58 +000050
51# set up authentication info
Georg Brandl029986a2008-06-23 11:44:14 +000052authinfo = urllib.request.HTTPBasicAuthHandler()
Jeremy Hylton1afc1692008-06-18 20:49:58 +000053authinfo.add_password(realm='PDQ Application',
54 uri='https://mahler:8092/site-updates.py',
55 user='klem',
56 passwd='geheim$parole')
57
Georg Brandl029986a2008-06-23 11:44:14 +000058proxy_support = urllib.request.ProxyHandler({"http" : "http://ahad-haam:3128"})
Jeremy Hylton1afc1692008-06-18 20:49:58 +000059
60# build a new opener that adds authentication and caching FTP handlers
Georg Brandl029986a2008-06-23 11:44:14 +000061opener = urllib.request.build_opener(proxy_support, authinfo,
62 urllib.request.CacheFTPHandler)
Jeremy Hylton1afc1692008-06-18 20:49:58 +000063
64# install it
Georg Brandl029986a2008-06-23 11:44:14 +000065urllib.request.install_opener(opener)
Jeremy Hylton1afc1692008-06-18 20:49:58 +000066
Georg Brandl029986a2008-06-23 11:44:14 +000067f = urllib.request.urlopen('http://www.python.org/')
Jeremy Hylton1afc1692008-06-18 20:49:58 +000068"""
69
70# XXX issues:
71# If an authentication error handler that tries to perform
72# authentication for some reason but fails, how should the error be
73# signalled? The client needs to know the HTTP error code. But if
74# the handler knows that the problem was, e.g., that it didn't know
75# that hash algo that requested in the challenge, it would be good to
76# pass that information along to the client, too.
77# ftp errors aren't handled cleanly
78# check digest against correct (i.e. non-apache) implementation
79
80# Possible extensions:
81# complex proxies XXX not sure what exactly was meant by this
82# abstract factory for opener
83
84import base64
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +000085import bisect
Jeremy Hylton1afc1692008-06-18 20:49:58 +000086import email
87import hashlib
88import http.client
89import io
90import os
91import posixpath
92import random
93import re
94import socket
95import sys
96import time
Jeremy Hylton1afc1692008-06-18 20:49:58 +000097
Georg Brandl13e89462008-07-01 19:56:00 +000098from urllib.error import URLError, HTTPError, ContentTooShortError
99from urllib.parse import (
100 urlparse, urlsplit, urljoin, unwrap, quote, unquote,
101 splittype, splithost, splitport, splituser, splitpasswd,
Senthil Kumaran4c88db72010-08-08 11:30:58 +0000102 splitattr, splitquery, splitvalue, splittag, to_bytes, urlunparse)
Georg Brandl13e89462008-07-01 19:56:00 +0000103from urllib.response import addinfourl, addclosehook
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000104
105# check for SSL
106try:
107 import ssl
Senthil Kumarand17ebdb2010-11-22 04:53:57 +0000108except ImportError:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000109 _have_ssl = False
110else:
111 _have_ssl = True
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000112
113# used in User-Agent header sent
114__version__ = sys.version[:3]
115
116_opener = None
117def urlopen(url, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
118 global _opener
119 if _opener is None:
120 _opener = build_opener()
121 return _opener.open(url, data, timeout)
122
123def install_opener(opener):
124 global _opener
125 _opener = opener
126
127# TODO(jhylton): Make this work with the same global opener.
128_urlopener = None
129def urlretrieve(url, filename=None, reporthook=None, data=None):
130 global _urlopener
131 if not _urlopener:
132 _urlopener = FancyURLopener()
133 return _urlopener.retrieve(url, filename, reporthook, data)
134
135def urlcleanup():
136 if _urlopener:
137 _urlopener.cleanup()
138 global _opener
139 if _opener:
140 _opener = None
141
142# copied from cookielib.py
Antoine Pitroufd036452008-08-19 17:56:33 +0000143_cut_port_re = re.compile(r":\d+$", re.ASCII)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000144def request_host(request):
145 """Return request-host, as defined by RFC 2965.
146
147 Variation from RFC: returned value is lowercased, for convenient
148 comparison.
149
150 """
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000151 url = request.full_url
Georg Brandl13e89462008-07-01 19:56:00 +0000152 host = urlparse(url)[1]
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000153 if host == "":
154 host = request.get_header("Host", "")
155
156 # remove port, if present
157 host = _cut_port_re.sub("", host, 1)
158 return host.lower()
159
160class Request:
161
162 def __init__(self, url, data=None, headers={},
163 origin_req_host=None, unverifiable=False):
164 # unwrap('<URL:type://host/path>') --> 'type://host/path'
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000165 self.full_url = unwrap(url)
Senthil Kumaran4c88db72010-08-08 11:30:58 +0000166 self.full_url, fragment = splittag(self.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000167 self.data = data
168 self.headers = {}
Senthil Kumaran0ac1f832009-07-26 12:39:47 +0000169 self._tunnel_host = None
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000170 for key, value in headers.items():
171 self.add_header(key, value)
172 self.unredirected_hdrs = {}
173 if origin_req_host is None:
174 origin_req_host = request_host(self)
175 self.origin_req_host = origin_req_host
176 self.unverifiable = unverifiable
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000177 self._parse()
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000178
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000179 def _parse(self):
180 self.type, rest = splittype(self.full_url)
181 if self.type is None:
182 raise ValueError("unknown url type: %s" % self.full_url)
183 self.host, self.selector = splithost(rest)
184 if self.host:
185 self.host = unquote(self.host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000186
187 def get_method(self):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000188 if self.data is not None:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000189 return "POST"
190 else:
191 return "GET"
192
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000193 # Begin deprecated methods
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000194
195 def add_data(self, data):
196 self.data = data
197
198 def has_data(self):
199 return self.data is not None
200
201 def get_data(self):
202 return self.data
203
204 def get_full_url(self):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000205 return self.full_url
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000206
207 def get_type(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000208 return self.type
209
210 def get_host(self):
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000211 return self.host
212
213 def get_selector(self):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000214 return self.selector
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000215
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000216 def is_unverifiable(self):
217 return self.unverifiable
Facundo Batista72dc1ea2008-08-16 14:44:32 +0000218
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000219 def get_origin_req_host(self):
220 return self.origin_req_host
221
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000222 # End deprecated methods
223
224 def set_proxy(self, host, type):
Senthil Kumaran0ac1f832009-07-26 12:39:47 +0000225 if self.type == 'https' and not self._tunnel_host:
226 self._tunnel_host = self.host
227 else:
228 self.type= type
229 self.selector = self.full_url
230 self.host = host
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000231
232 def has_proxy(self):
233 return self.selector == self.full_url
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000234
235 def add_header(self, key, val):
236 # useful for something like authentication
237 self.headers[key.capitalize()] = val
238
239 def add_unredirected_header(self, key, val):
240 # will not be added to a redirected request
241 self.unredirected_hdrs[key.capitalize()] = val
242
243 def has_header(self, header_name):
244 return (header_name in self.headers or
245 header_name in self.unredirected_hdrs)
246
247 def get_header(self, header_name, default=None):
248 return self.headers.get(
249 header_name,
250 self.unredirected_hdrs.get(header_name, default))
251
252 def header_items(self):
253 hdrs = self.unredirected_hdrs.copy()
254 hdrs.update(self.headers)
255 return list(hdrs.items())
256
257class OpenerDirector:
258 def __init__(self):
259 client_version = "Python-urllib/%s" % __version__
260 self.addheaders = [('User-agent', client_version)]
R. David Murray46c6fd62010-12-23 19:47:37 +0000261 # self.handlers is retained only for backward compatibility
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000262 self.handlers = []
R. David Murray46c6fd62010-12-23 19:47:37 +0000263 # manage the individual handlers
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000264 self.handle_open = {}
265 self.handle_error = {}
266 self.process_response = {}
267 self.process_request = {}
268
269 def add_handler(self, handler):
270 if not hasattr(handler, "add_parent"):
271 raise TypeError("expected BaseHandler instance, got %r" %
272 type(handler))
273
274 added = False
275 for meth in dir(handler):
276 if meth in ["redirect_request", "do_open", "proxy_open"]:
277 # oops, coincidental match
278 continue
279
280 i = meth.find("_")
281 protocol = meth[:i]
282 condition = meth[i+1:]
283
284 if condition.startswith("error"):
285 j = condition.find("_") + i + 1
286 kind = meth[j+1:]
287 try:
288 kind = int(kind)
289 except ValueError:
290 pass
291 lookup = self.handle_error.get(protocol, {})
292 self.handle_error[protocol] = lookup
293 elif condition == "open":
294 kind = protocol
295 lookup = self.handle_open
296 elif condition == "response":
297 kind = protocol
298 lookup = self.process_response
299 elif condition == "request":
300 kind = protocol
301 lookup = self.process_request
302 else:
303 continue
304
305 handlers = lookup.setdefault(kind, [])
306 if handlers:
307 bisect.insort(handlers, handler)
308 else:
309 handlers.append(handler)
310 added = True
311
312 if added:
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000313 bisect.insort(self.handlers, handler)
314 handler.add_parent(self)
315
316 def close(self):
317 # Only exists for backwards compatibility.
318 pass
319
320 def _call_chain(self, chain, kind, meth_name, *args):
321 # Handlers raise an exception if no one else should try to handle
322 # the request, or return None if they can't but another handler
323 # could. Otherwise, they return the response.
324 handlers = chain.get(kind, ())
325 for handler in handlers:
326 func = getattr(handler, meth_name)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000327 result = func(*args)
328 if result is not None:
329 return result
330
331 def open(self, fullurl, data=None, timeout=socket._GLOBAL_DEFAULT_TIMEOUT):
332 # accept a URL or a Request object
333 if isinstance(fullurl, str):
334 req = Request(fullurl, data)
335 else:
336 req = fullurl
337 if data is not None:
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000338 req.data = data
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000339
340 req.timeout = timeout
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000341 protocol = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000342
343 # pre-process request
344 meth_name = protocol+"_request"
345 for processor in self.process_request.get(protocol, []):
346 meth = getattr(processor, meth_name)
347 req = meth(req)
348
349 response = self._open(req, data)
350
351 # post-process response
352 meth_name = protocol+"_response"
353 for processor in self.process_response.get(protocol, []):
354 meth = getattr(processor, meth_name)
355 response = meth(req, response)
356
357 return response
358
359 def _open(self, req, data=None):
360 result = self._call_chain(self.handle_open, 'default',
361 'default_open', req)
362 if result:
363 return result
364
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000365 protocol = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000366 result = self._call_chain(self.handle_open, protocol, protocol +
367 '_open', req)
368 if result:
369 return result
370
371 return self._call_chain(self.handle_open, 'unknown',
372 'unknown_open', req)
373
374 def error(self, proto, *args):
375 if proto in ('http', 'https'):
376 # XXX http[s] protocols are special-cased
377 dict = self.handle_error['http'] # https is not different than http
378 proto = args[2] # YUCK!
379 meth_name = 'http_error_%s' % proto
380 http_err = 1
381 orig_args = args
382 else:
383 dict = self.handle_error
384 meth_name = proto + '_error'
385 http_err = 0
386 args = (dict, proto, meth_name) + args
387 result = self._call_chain(*args)
388 if result:
389 return result
390
391 if http_err:
392 args = (dict, 'default', 'http_error_default') + orig_args
393 return self._call_chain(*args)
394
395# XXX probably also want an abstract factory that knows when it makes
396# sense to skip a superclass in favor of a subclass and when it might
397# make sense to include both
398
399def build_opener(*handlers):
400 """Create an opener object from a list of handlers.
401
402 The opener will use several default handlers, including support
Senthil Kumaran04454cd2009-11-15 07:27:02 +0000403 for HTTP, FTP and when applicable HTTPS.
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000404
405 If any of the handlers passed as arguments are subclasses of the
406 default handlers, the default handlers will not be used.
407 """
408 def isclass(obj):
409 return isinstance(obj, type) or hasattr(obj, "__bases__")
410
411 opener = OpenerDirector()
412 default_classes = [ProxyHandler, UnknownHandler, HTTPHandler,
413 HTTPDefaultErrorHandler, HTTPRedirectHandler,
414 FTPHandler, FileHandler, HTTPErrorProcessor]
415 if hasattr(http.client, "HTTPSConnection"):
416 default_classes.append(HTTPSHandler)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000417 skip = set()
418 for klass in default_classes:
419 for check in handlers:
420 if isclass(check):
421 if issubclass(check, klass):
422 skip.add(klass)
423 elif isinstance(check, klass):
424 skip.add(klass)
425 for klass in skip:
426 default_classes.remove(klass)
427
428 for klass in default_classes:
429 opener.add_handler(klass())
430
431 for h in handlers:
432 if isclass(h):
433 h = h()
434 opener.add_handler(h)
435 return opener
436
437class BaseHandler:
438 handler_order = 500
439
440 def add_parent(self, parent):
441 self.parent = parent
442
443 def close(self):
444 # Only exists for backwards compatibility
445 pass
446
447 def __lt__(self, other):
448 if not hasattr(other, "handler_order"):
449 # Try to preserve the old behavior of having custom classes
450 # inserted after default ones (works only for custom user
451 # classes which are not aware of handler_order).
452 return True
453 return self.handler_order < other.handler_order
454
455
456class HTTPErrorProcessor(BaseHandler):
457 """Process HTTP error responses."""
458 handler_order = 1000 # after all other processing
459
460 def http_response(self, request, response):
461 code, msg, hdrs = response.code, response.msg, response.info()
462
463 # According to RFC 2616, "2xx" code indicates that the client's
464 # request was successfully received, understood, and accepted.
465 if not (200 <= code < 300):
466 response = self.parent.error(
467 'http', request, response, code, msg, hdrs)
468
469 return response
470
471 https_response = http_response
472
473class HTTPDefaultErrorHandler(BaseHandler):
474 def http_error_default(self, req, fp, code, msg, hdrs):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000475 raise HTTPError(req.full_url, code, msg, hdrs, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000476
477class HTTPRedirectHandler(BaseHandler):
478 # maximum number of redirections to any single URL
479 # this is needed because of the state that cookies introduce
480 max_repeats = 4
481 # maximum total number of redirections (regardless of URL) before
482 # assuming we're in a loop
483 max_redirections = 10
484
485 def redirect_request(self, req, fp, code, msg, headers, newurl):
486 """Return a Request or None in response to a redirect.
487
488 This is called by the http_error_30x methods when a
489 redirection response is received. If a redirection should
490 take place, return a new Request to allow http_error_30x to
491 perform the redirect. Otherwise, raise HTTPError if no-one
492 else should try to handle this url. Return None if you can't
493 but another Handler might.
494 """
495 m = req.get_method()
496 if (not (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
497 or code in (301, 302, 303) and m == "POST")):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000498 raise HTTPError(req.full_url, code, msg, headers, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000499
500 # Strictly (according to RFC 2616), 301 or 302 in response to
501 # a POST MUST NOT cause a redirection without confirmation
Georg Brandl029986a2008-06-23 11:44:14 +0000502 # from the user (of urllib.request, in this case). In practice,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000503 # essentially all clients do redirect in this case, so we do
504 # the same.
505 # be conciliant with URIs containing a space
506 newurl = newurl.replace(' ', '%20')
507 CONTENT_HEADERS = ("content-length", "content-type")
508 newheaders = dict((k, v) for k, v in req.headers.items()
509 if k.lower() not in CONTENT_HEADERS)
510 return Request(newurl,
511 headers=newheaders,
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000512 origin_req_host=req.origin_req_host,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000513 unverifiable=True)
514
515 # Implementation note: To avoid the server sending us into an
516 # infinite loop, the request object needs to track what URLs we
517 # have already seen. Do this by adding a handler-specific
518 # attribute to the Request object.
519 def http_error_302(self, req, fp, code, msg, headers):
520 # Some servers (incorrectly) return multiple Location headers
521 # (so probably same goes for URI). Use first header.
522 if "location" in headers:
523 newurl = headers["location"]
524 elif "uri" in headers:
525 newurl = headers["uri"]
526 else:
527 return
Facundo Batistaf24802c2008-08-17 03:36:03 +0000528
529 # fix a possible malformed URL
530 urlparts = urlparse(newurl)
guido@google.coma119df92011-03-29 11:41:02 -0700531
532 # For security reasons we don't allow redirection to anything other
533 # than http, https or ftp.
534
535 if not urlparts.scheme in ('http', 'https', 'ftp'):
536 raise HTTPError(newurl, code,
537 msg +
538 " - Redirection to url '%s' is not allowed" %
539 newurl,
540 headers, fp)
541
Facundo Batistaf24802c2008-08-17 03:36:03 +0000542 if not urlparts.path:
543 urlparts = list(urlparts)
544 urlparts[2] = "/"
545 newurl = urlunparse(urlparts)
546
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000547 newurl = urljoin(req.full_url, newurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000548
549 # XXX Probably want to forget about the state of the current
550 # request, although that might interact poorly with other
551 # handlers that also use handler-specific request attributes
552 new = self.redirect_request(req, fp, code, msg, headers, newurl)
553 if new is None:
554 return
555
556 # loop detection
557 # .redirect_dict has a key url if url was previously visited.
558 if hasattr(req, 'redirect_dict'):
559 visited = new.redirect_dict = req.redirect_dict
560 if (visited.get(newurl, 0) >= self.max_repeats or
561 len(visited) >= self.max_redirections):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000562 raise HTTPError(req.full_url, code,
Georg Brandl13e89462008-07-01 19:56:00 +0000563 self.inf_msg + msg, headers, fp)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000564 else:
565 visited = new.redirect_dict = req.redirect_dict = {}
566 visited[newurl] = visited.get(newurl, 0) + 1
567
568 # Don't close the fp until we are sure that we won't use it
569 # with HTTPError.
570 fp.read()
571 fp.close()
572
Senthil Kumarane9da06f2009-07-19 04:20:12 +0000573 return self.parent.open(new, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000574
575 http_error_301 = http_error_303 = http_error_307 = http_error_302
576
577 inf_msg = "The HTTP server returned a redirect error that would " \
578 "lead to an infinite loop.\n" \
579 "The last 30x error message was:\n"
580
581
582def _parse_proxy(proxy):
583 """Return (scheme, user, password, host/port) given a URL or an authority.
584
585 If a URL is supplied, it must have an authority (host:port) component.
586 According to RFC 3986, having an authority component means the URL must
587 have two slashes after the scheme:
588
589 >>> _parse_proxy('file:/ftp.example.com/')
590 Traceback (most recent call last):
591 ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
592
593 The first three items of the returned tuple may be None.
594
595 Examples of authority parsing:
596
597 >>> _parse_proxy('proxy.example.com')
598 (None, None, None, 'proxy.example.com')
599 >>> _parse_proxy('proxy.example.com:3128')
600 (None, None, None, 'proxy.example.com:3128')
601
602 The authority component may optionally include userinfo (assumed to be
603 username:password):
604
605 >>> _parse_proxy('joe:password@proxy.example.com')
606 (None, 'joe', 'password', 'proxy.example.com')
607 >>> _parse_proxy('joe:password@proxy.example.com:3128')
608 (None, 'joe', 'password', 'proxy.example.com:3128')
609
610 Same examples, but with URLs instead:
611
612 >>> _parse_proxy('http://proxy.example.com/')
613 ('http', None, None, 'proxy.example.com')
614 >>> _parse_proxy('http://proxy.example.com:3128/')
615 ('http', None, None, 'proxy.example.com:3128')
616 >>> _parse_proxy('http://joe:password@proxy.example.com/')
617 ('http', 'joe', 'password', 'proxy.example.com')
618 >>> _parse_proxy('http://joe:password@proxy.example.com:3128')
619 ('http', 'joe', 'password', 'proxy.example.com:3128')
620
621 Everything after the authority is ignored:
622
623 >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
624 ('ftp', 'joe', 'password', 'proxy.example.com')
625
626 Test for no trailing '/' case:
627
628 >>> _parse_proxy('http://joe:password@proxy.example.com')
629 ('http', 'joe', 'password', 'proxy.example.com')
630
631 """
Georg Brandl13e89462008-07-01 19:56:00 +0000632 scheme, r_scheme = splittype(proxy)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000633 if not r_scheme.startswith("/"):
634 # authority
635 scheme = None
636 authority = proxy
637 else:
638 # URL
639 if not r_scheme.startswith("//"):
640 raise ValueError("proxy URL with no authority: %r" % proxy)
641 # We have an authority, so for RFC 3986-compliant URLs (by ss 3.
642 # and 3.3.), path is empty or starts with '/'
643 end = r_scheme.find("/", 2)
644 if end == -1:
645 end = None
646 authority = r_scheme[2:end]
Georg Brandl13e89462008-07-01 19:56:00 +0000647 userinfo, hostport = splituser(authority)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000648 if userinfo is not None:
Georg Brandl13e89462008-07-01 19:56:00 +0000649 user, password = splitpasswd(userinfo)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000650 else:
651 user = password = None
652 return scheme, user, password, hostport
653
654class ProxyHandler(BaseHandler):
655 # Proxies must be in front
656 handler_order = 100
657
658 def __init__(self, proxies=None):
659 if proxies is None:
660 proxies = getproxies()
661 assert hasattr(proxies, 'keys'), "proxies must be a mapping"
662 self.proxies = proxies
663 for type, url in proxies.items():
664 setattr(self, '%s_open' % type,
665 lambda r, proxy=url, type=type, meth=self.proxy_open: \
666 meth(r, proxy, type))
667
668 def proxy_open(self, req, proxy, type):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000669 orig_type = req.type
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000670 proxy_type, user, password, hostport = _parse_proxy(proxy)
671 if proxy_type is None:
672 proxy_type = orig_type
Senthil Kumaran11301632009-10-11 06:07:46 +0000673
674 if req.host and proxy_bypass(req.host):
675 return None
676
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000677 if user and password:
Georg Brandl13e89462008-07-01 19:56:00 +0000678 user_pass = '%s:%s' % (unquote(user),
679 unquote(password))
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000680 creds = base64.b64encode(user_pass.encode()).decode("ascii")
681 req.add_header('Proxy-authorization', 'Basic ' + creds)
Georg Brandl13e89462008-07-01 19:56:00 +0000682 hostport = unquote(hostport)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000683 req.set_proxy(hostport, proxy_type)
Senthil Kumaran0ac1f832009-07-26 12:39:47 +0000684 if orig_type == proxy_type or orig_type == 'https':
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000685 # let other handlers take care of it
686 return None
687 else:
688 # need to start over, because the other handlers don't
689 # grok the proxy's URL type
690 # e.g. if we have a constructor arg proxies like so:
691 # {'http': 'ftp://proxy.example.com'}, we may end up turning
692 # a request for http://acme.example.com/a into one for
693 # ftp://proxy.example.com/a
Senthil Kumarane9da06f2009-07-19 04:20:12 +0000694 return self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000695
696class HTTPPasswordMgr:
697
698 def __init__(self):
699 self.passwd = {}
700
701 def add_password(self, realm, uri, user, passwd):
702 # uri could be a single URI or a sequence
703 if isinstance(uri, str):
704 uri = [uri]
705 if not realm in self.passwd:
706 self.passwd[realm] = {}
707 for default_port in True, False:
708 reduced_uri = tuple(
709 [self.reduce_uri(u, default_port) for u in uri])
710 self.passwd[realm][reduced_uri] = (user, passwd)
711
712 def find_user_password(self, realm, authuri):
713 domains = self.passwd.get(realm, {})
714 for default_port in True, False:
715 reduced_authuri = self.reduce_uri(authuri, default_port)
716 for uris, authinfo in domains.items():
717 for uri in uris:
718 if self.is_suburi(uri, reduced_authuri):
719 return authinfo
720 return None, None
721
722 def reduce_uri(self, uri, default_port=True):
723 """Accept authority or URI and extract only the authority and path."""
724 # note HTTP URLs do not have a userinfo component
Georg Brandl13e89462008-07-01 19:56:00 +0000725 parts = urlsplit(uri)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000726 if parts[1]:
727 # URI
728 scheme = parts[0]
729 authority = parts[1]
730 path = parts[2] or '/'
731 else:
732 # host or host:port
733 scheme = None
734 authority = uri
735 path = '/'
Georg Brandl13e89462008-07-01 19:56:00 +0000736 host, port = splitport(authority)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000737 if default_port and port is None and scheme is not None:
738 dport = {"http": 80,
739 "https": 443,
740 }.get(scheme)
741 if dport is not None:
742 authority = "%s:%d" % (host, dport)
743 return authority, path
744
745 def is_suburi(self, base, test):
746 """Check if test is below base in a URI tree
747
748 Both args must be URIs in reduced form.
749 """
750 if base == test:
751 return True
752 if base[0] != test[0]:
753 return False
754 common = posixpath.commonprefix((base[1], test[1]))
755 if len(common) == len(base[1]):
756 return True
757 return False
758
759
760class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr):
761
762 def find_user_password(self, realm, authuri):
763 user, password = HTTPPasswordMgr.find_user_password(self, realm,
764 authuri)
765 if user is not None:
766 return user, password
767 return HTTPPasswordMgr.find_user_password(self, None, authuri)
768
769
770class AbstractBasicAuthHandler:
771
772 # XXX this allows for multiple auth-schemes, but will stupidly pick
773 # the last one with a realm specified.
774
775 # allow for double- and single-quoted realm values
776 # (single quotes are a violation of the RFC, but appear in the wild)
777 rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
778 'realm=(["\'])(.*?)\\2', re.I)
779
780 # XXX could pre-emptively send auth info already accepted (RFC 2617,
781 # end of section 2, and section 1.2 immediately after "credentials"
782 # production).
783
784 def __init__(self, password_mgr=None):
785 if password_mgr is None:
786 password_mgr = HTTPPasswordMgr()
787 self.passwd = password_mgr
788 self.add_password = self.passwd.add_password
Senthil Kumaranefafdc72010-06-01 12:56:17 +0000789 self.retried = 0
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000790
Senthil Kumarancb39d6c2010-08-19 17:54:33 +0000791 def reset_retry_count(self):
792 self.retried = 0
793
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000794 def http_error_auth_reqed(self, authreq, host, req, headers):
795 # host may be an authority (without userinfo) or a URL with an
796 # authority
797 # XXX could be multiple headers
798 authreq = headers.get(authreq, None)
Senthil Kumaranefafdc72010-06-01 12:56:17 +0000799
800 if self.retried > 5:
801 # retry sending the username:password 5 times before failing.
802 raise HTTPError(req.get_full_url(), 401, "basic auth failed",
803 headers, None)
804 else:
805 self.retried += 1
806
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000807 if authreq:
808 mo = AbstractBasicAuthHandler.rx.search(authreq)
809 if mo:
810 scheme, quote, realm = mo.groups()
811 if scheme.lower() == 'basic':
Senthil Kumaran06509382010-08-26 06:24:04 +0000812 response = self.retry_http_basic_auth(host, req, realm)
813 if response and response.code != 401:
814 self.retried = 0
815 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000816
817 def retry_http_basic_auth(self, host, req, realm):
818 user, pw = self.passwd.find_user_password(realm, host)
819 if pw is not None:
820 raw = "%s:%s" % (user, pw)
821 auth = "Basic " + base64.b64encode(raw.encode()).decode("ascii")
822 if req.headers.get(self.auth_header, None) == auth:
823 return None
Senthil Kumaranefcd8832010-02-24 16:56:20 +0000824 req.add_unredirected_header(self.auth_header, auth)
Senthil Kumarane9da06f2009-07-19 04:20:12 +0000825 return self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000826 else:
827 return None
828
829
830class HTTPBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
831
832 auth_header = 'Authorization'
833
834 def http_error_401(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000835 url = req.full_url
Senthil Kumarancb39d6c2010-08-19 17:54:33 +0000836 response = self.http_error_auth_reqed('www-authenticate',
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000837 url, req, headers)
Senthil Kumarancb39d6c2010-08-19 17:54:33 +0000838 self.reset_retry_count()
839 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000840
841
842class ProxyBasicAuthHandler(AbstractBasicAuthHandler, BaseHandler):
843
844 auth_header = 'Proxy-authorization'
845
846 def http_error_407(self, req, fp, code, msg, headers):
847 # http_error_auth_reqed requires that there is no userinfo component in
Georg Brandl029986a2008-06-23 11:44:14 +0000848 # authority. Assume there isn't one, since urllib.request does not (and
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000849 # should not, RFC 3986 s. 3.2.1) support requests for URLs containing
850 # userinfo.
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000851 authority = req.host
Senthil Kumarancb39d6c2010-08-19 17:54:33 +0000852 response = self.http_error_auth_reqed('proxy-authenticate',
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000853 authority, req, headers)
Senthil Kumarancb39d6c2010-08-19 17:54:33 +0000854 self.reset_retry_count()
855 return response
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000856
857
858def randombytes(n):
859 """Return n random bytes."""
860 return os.urandom(n)
861
862class AbstractDigestAuthHandler:
863 # Digest authentication is specified in RFC 2617.
864
865 # XXX The client does not inspect the Authentication-Info header
866 # in a successful response.
867
868 # XXX It should be possible to test this implementation against
869 # a mock server that just generates a static set of challenges.
870
871 # XXX qop="auth-int" supports is shaky
872
873 def __init__(self, passwd=None):
874 if passwd is None:
875 passwd = HTTPPasswordMgr()
876 self.passwd = passwd
877 self.add_password = self.passwd.add_password
878 self.retried = 0
879 self.nonce_count = 0
Senthil Kumaranb58474f2009-11-15 08:45:27 +0000880 self.last_nonce = None
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000881
882 def reset_retry_count(self):
883 self.retried = 0
884
885 def http_error_auth_reqed(self, auth_header, host, req, headers):
886 authreq = headers.get(auth_header, None)
887 if self.retried > 5:
888 # Don't fail endlessly - if we failed once, we'll probably
889 # fail a second time. Hm. Unless the Password Manager is
890 # prompting for the information. Crap. This isn't great
891 # but it's better than the current 'repeat until recursion
892 # depth exceeded' approach <wink>
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000893 raise HTTPError(req.full_url, 401, "digest auth failed",
Georg Brandl13e89462008-07-01 19:56:00 +0000894 headers, None)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000895 else:
896 self.retried += 1
897 if authreq:
898 scheme = authreq.split()[0]
899 if scheme.lower() == 'digest':
900 return self.retry_http_digest_auth(req, authreq)
901
902 def retry_http_digest_auth(self, req, auth):
903 token, challenge = auth.split(' ', 1)
904 chal = parse_keqv_list(filter(None, parse_http_list(challenge)))
905 auth = self.get_authorization(req, chal)
906 if auth:
907 auth_val = 'Digest %s' % auth
908 if req.headers.get(self.auth_header, None) == auth_val:
909 return None
910 req.add_unredirected_header(self.auth_header, auth_val)
Senthil Kumarane9da06f2009-07-19 04:20:12 +0000911 resp = self.parent.open(req, timeout=req.timeout)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000912 return resp
913
914 def get_cnonce(self, nonce):
915 # The cnonce-value is an opaque
916 # quoted string value provided by the client and used by both client
917 # and server to avoid chosen plaintext attacks, to provide mutual
918 # authentication, and to provide some message integrity protection.
919 # This isn't a fabulous effort, but it's probably Good Enough.
920 s = "%s:%s:%s:" % (self.nonce_count, nonce, time.ctime())
921 b = s.encode("ascii") + randombytes(8)
922 dig = hashlib.sha1(b).hexdigest()
923 return dig[:16]
924
925 def get_authorization(self, req, chal):
926 try:
927 realm = chal['realm']
928 nonce = chal['nonce']
929 qop = chal.get('qop')
930 algorithm = chal.get('algorithm', 'MD5')
931 # mod_digest doesn't send an opaque, even though it isn't
932 # supposed to be optional
933 opaque = chal.get('opaque', None)
934 except KeyError:
935 return None
936
937 H, KD = self.get_algorithm_impls(algorithm)
938 if H is None:
939 return None
940
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000941 user, pw = self.passwd.find_user_password(realm, req.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000942 if user is None:
943 return None
944
945 # XXX not implemented yet
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000946 if req.data is not None:
947 entdig = self.get_entity_digest(req.data, chal)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000948 else:
949 entdig = None
950
951 A1 = "%s:%s:%s" % (user, realm, pw)
952 A2 = "%s:%s" % (req.get_method(),
953 # XXX selector: what about proxies and full urls
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000954 req.selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000955 if qop == 'auth':
Senthil Kumaranb58474f2009-11-15 08:45:27 +0000956 if nonce == self.last_nonce:
957 self.nonce_count += 1
958 else:
959 self.nonce_count = 1
960 self.last_nonce = nonce
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000961 ncvalue = '%08x' % self.nonce_count
962 cnonce = self.get_cnonce(nonce)
963 noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
964 respdig = KD(H(A1), noncebit)
965 elif qop is None:
966 respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
967 else:
968 # XXX handle auth-int.
Georg Brandl13e89462008-07-01 19:56:00 +0000969 raise URLError("qop '%s' is not supported." % qop)
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000970
971 # XXX should the partial digests be encoded too?
972
973 base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +0000974 'response="%s"' % (user, realm, nonce, req.selector,
Jeremy Hylton1afc1692008-06-18 20:49:58 +0000975 respdig)
976 if opaque:
977 base += ', opaque="%s"' % opaque
978 if entdig:
979 base += ', digest="%s"' % entdig
980 base += ', algorithm="%s"' % algorithm
981 if qop:
982 base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
983 return base
984
985 def get_algorithm_impls(self, algorithm):
986 # lambdas assume digest modules are imported at the top level
987 if algorithm == 'MD5':
988 H = lambda x: hashlib.md5(x.encode("ascii")).hexdigest()
989 elif algorithm == 'SHA':
990 H = lambda x: hashlib.sha1(x.encode("ascii")).hexdigest()
991 # XXX MD5-sess
992 KD = lambda s, d: H("%s:%s" % (s, d))
993 return H, KD
994
995 def get_entity_digest(self, data, chal):
996 # XXX not implemented yet
997 return None
998
999
1000class HTTPDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
1001 """An authentication protocol defined by RFC 2069
1002
1003 Digest authentication improves on basic authentication because it
1004 does not transmit passwords in the clear.
1005 """
1006
1007 auth_header = 'Authorization'
1008 handler_order = 490 # before Basic auth
1009
1010 def http_error_401(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001011 host = urlparse(req.full_url)[1]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001012 retry = self.http_error_auth_reqed('www-authenticate',
1013 host, req, headers)
1014 self.reset_retry_count()
1015 return retry
1016
1017
1018class ProxyDigestAuthHandler(BaseHandler, AbstractDigestAuthHandler):
1019
1020 auth_header = 'Proxy-Authorization'
1021 handler_order = 490 # before Basic auth
1022
1023 def http_error_407(self, req, fp, code, msg, headers):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001024 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001025 retry = self.http_error_auth_reqed('proxy-authenticate',
1026 host, req, headers)
1027 self.reset_retry_count()
1028 return retry
1029
1030class AbstractHTTPHandler(BaseHandler):
1031
1032 def __init__(self, debuglevel=0):
1033 self._debuglevel = debuglevel
1034
1035 def set_http_debuglevel(self, level):
1036 self._debuglevel = level
1037
1038 def do_request_(self, request):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001039 host = request.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001040 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001041 raise URLError('no host given')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001042
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001043 if request.data is not None: # POST
1044 data = request.data
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001045 if not request.has_header('Content-type'):
1046 request.add_unredirected_header(
1047 'Content-type',
1048 'application/x-www-form-urlencoded')
1049 if not request.has_header('Content-length'):
1050 request.add_unredirected_header(
1051 'Content-length', '%d' % len(data))
1052
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001053 sel_host = host
1054 if request.has_proxy():
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001055 scheme, sel = splittype(request.selector)
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001056 sel_host, sel_path = splithost(sel)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001057 if not request.has_header('Host'):
Facundo Batista72dc1ea2008-08-16 14:44:32 +00001058 request.add_unredirected_header('Host', sel_host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001059 for name, value in self.parent.addheaders:
1060 name = name.capitalize()
1061 if not request.has_header(name):
1062 request.add_unredirected_header(name, value)
1063
1064 return request
1065
1066 def do_open(self, http_class, req):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001067 """Return an HTTPResponse object for the request, using http_class.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001068
1069 http_class must implement the HTTPConnection API from http.client.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001070 """
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001071 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001072 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001073 raise URLError('no host given')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001074
1075 h = http_class(host, timeout=req.timeout) # will parse host:port
Senthil Kumaran603ca412010-09-27 01:28:10 +00001076
1077 headers = dict(req.unredirected_hdrs)
1078 headers.update(dict((k, v) for k, v in req.headers.items()
1079 if k not in headers))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001080
1081 # TODO(jhylton): Should this be redesigned to handle
1082 # persistent connections?
1083
1084 # We want to make an HTTP/1.1 request, but the addinfourl
1085 # class isn't prepared to deal with a persistent connection.
1086 # It will try to read all remaining data from the socket,
1087 # which will block while the server waits for the next request.
1088 # So make sure the connection gets closed after the (only)
1089 # request.
1090 headers["Connection"] = "close"
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001091 headers = dict((name.title(), val) for name, val in headers.items())
Senthil Kumaran0ac1f832009-07-26 12:39:47 +00001092
1093 if req._tunnel_host:
Senthil Kumaran4b9fbeb2009-12-20 07:18:22 +00001094 tunnel_headers = {}
1095 proxy_auth_hdr = "Proxy-Authorization"
1096 if proxy_auth_hdr in headers:
1097 tunnel_headers[proxy_auth_hdr] = headers[proxy_auth_hdr]
1098 # Proxy-Authorization should not be sent to origin
1099 # server.
1100 del headers[proxy_auth_hdr]
1101 h._set_tunnel(req._tunnel_host, headers=tunnel_headers)
Senthil Kumaran0ac1f832009-07-26 12:39:47 +00001102
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001103 try:
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001104 h.request(req.get_method(), req.selector, req.data, headers)
1105 r = h.getresponse() # an HTTPResponse instance
1106 except socket.error as err:
Georg Brandl13e89462008-07-01 19:56:00 +00001107 raise URLError(err)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001108
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001109 r.url = req.full_url
1110 # This line replaces the .msg attribute of the HTTPResponse
1111 # with .headers, because urllib clients expect the response to
1112 # have the reason in .msg. It would be good to mark this
1113 # attribute is deprecated and get then to use info() or
1114 # .headers.
1115 r.msg = r.reason
1116 return r
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001117
1118
1119class HTTPHandler(AbstractHTTPHandler):
1120
1121 def http_open(self, req):
1122 return self.do_open(http.client.HTTPConnection, req)
1123
1124 http_request = AbstractHTTPHandler.do_request_
1125
1126if hasattr(http.client, 'HTTPSConnection'):
1127 class HTTPSHandler(AbstractHTTPHandler):
1128
1129 def https_open(self, req):
1130 return self.do_open(http.client.HTTPSConnection, req)
1131
1132 https_request = AbstractHTTPHandler.do_request_
1133
1134class HTTPCookieProcessor(BaseHandler):
1135 def __init__(self, cookiejar=None):
1136 import http.cookiejar
1137 if cookiejar is None:
1138 cookiejar = http.cookiejar.CookieJar()
1139 self.cookiejar = cookiejar
1140
1141 def http_request(self, request):
1142 self.cookiejar.add_cookie_header(request)
1143 return request
1144
1145 def http_response(self, request, response):
1146 self.cookiejar.extract_cookies(response, request)
1147 return response
1148
1149 https_request = http_request
1150 https_response = http_response
1151
1152class UnknownHandler(BaseHandler):
1153 def unknown_open(self, req):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001154 type = req.type
Georg Brandl13e89462008-07-01 19:56:00 +00001155 raise URLError('unknown url type: %s' % type)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001156
1157def parse_keqv_list(l):
1158 """Parse list of key=value strings where keys are not duplicated."""
1159 parsed = {}
1160 for elt in l:
1161 k, v = elt.split('=', 1)
1162 if v[0] == '"' and v[-1] == '"':
1163 v = v[1:-1]
1164 parsed[k] = v
1165 return parsed
1166
1167def parse_http_list(s):
1168 """Parse lists as described by RFC 2068 Section 2.
1169
1170 In particular, parse comma-separated lists where the elements of
1171 the list may include quoted-strings. A quoted-string could
1172 contain a comma. A non-quoted string could have quotes in the
1173 middle. Neither commas nor quotes count if they are escaped.
1174 Only double-quotes count, not single-quotes.
1175 """
1176 res = []
1177 part = ''
1178
1179 escape = quote = False
1180 for cur in s:
1181 if escape:
1182 part += cur
1183 escape = False
1184 continue
1185 if quote:
1186 if cur == '\\':
1187 escape = True
1188 continue
1189 elif cur == '"':
1190 quote = False
1191 part += cur
1192 continue
1193
1194 if cur == ',':
1195 res.append(part)
1196 part = ''
1197 continue
1198
1199 if cur == '"':
1200 quote = True
1201
1202 part += cur
1203
1204 # append last part
1205 if part:
1206 res.append(part)
1207
1208 return [part.strip() for part in res]
1209
1210class FileHandler(BaseHandler):
1211 # Use local file or FTP depending on form of URL
1212 def file_open(self, req):
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001213 url = req.selector
Senthil Kumaran34024142010-07-11 03:15:25 +00001214 if url[:2] == '//' and url[2:3] != '/' and (req.host and
1215 req.host != 'localhost'):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001216 req.type = 'ftp'
1217 return self.parent.open(req)
1218 else:
1219 return self.open_local_file(req)
1220
1221 # names for the localhost
1222 names = None
1223 def get_names(self):
1224 if FileHandler.names is None:
1225 try:
Senthil Kumaran88a495d2009-12-27 10:15:45 +00001226 FileHandler.names = tuple(
1227 socket.gethostbyname_ex('localhost')[2] +
1228 socket.gethostbyname_ex(socket.gethostname())[2])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001229 except socket.gaierror:
1230 FileHandler.names = (socket.gethostbyname('localhost'),)
1231 return FileHandler.names
1232
1233 # not entirely sure what the rules are here
1234 def open_local_file(self, req):
1235 import email.utils
1236 import mimetypes
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001237 host = req.host
Senthil Kumaran1e72bd32010-05-08 05:14:29 +00001238 filename = req.selector
1239 localfile = url2pathname(filename)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001240 try:
1241 stats = os.stat(localfile)
1242 size = stats.st_size
1243 modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
Senthil Kumaran1e72bd32010-05-08 05:14:29 +00001244 mtype = mimetypes.guess_type(filename)[0]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001245 headers = email.message_from_string(
1246 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
1247 (mtype or 'text/plain', size, modified))
1248 if host:
Georg Brandl13e89462008-07-01 19:56:00 +00001249 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001250 if not host or \
1251 (not port and _safe_gethostbyname(host) in self.get_names()):
Senthil Kumaran1e72bd32010-05-08 05:14:29 +00001252 if host:
1253 origurl = 'file://' + host + filename
1254 else:
1255 origurl = 'file://' + filename
1256 return addinfourl(open(localfile, 'rb'), headers, origurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001257 except OSError as msg:
Georg Brandl029986a2008-06-23 11:44:14 +00001258 # users shouldn't expect OSErrors coming from urlopen()
Georg Brandl13e89462008-07-01 19:56:00 +00001259 raise URLError(msg)
1260 raise URLError('file not on local host')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001261
1262def _safe_gethostbyname(host):
1263 try:
1264 return socket.gethostbyname(host)
1265 except socket.gaierror:
1266 return None
1267
1268class FTPHandler(BaseHandler):
1269 def ftp_open(self, req):
1270 import ftplib
1271 import mimetypes
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001272 host = req.host
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001273 if not host:
Georg Brandl13e89462008-07-01 19:56:00 +00001274 raise URLError('ftp error: no host given')
1275 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001276 if port is None:
1277 port = ftplib.FTP_PORT
1278 else:
1279 port = int(port)
1280
1281 # username/password handling
Georg Brandl13e89462008-07-01 19:56:00 +00001282 user, host = splituser(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001283 if user:
Georg Brandl13e89462008-07-01 19:56:00 +00001284 user, passwd = splitpasswd(user)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001285 else:
1286 passwd = None
Georg Brandl13e89462008-07-01 19:56:00 +00001287 host = unquote(host)
Senthil Kumaran723a7a62010-11-18 16:44:38 +00001288 user = user or ''
1289 passwd = passwd or ''
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001290
1291 try:
1292 host = socket.gethostbyname(host)
1293 except socket.error as msg:
Georg Brandl13e89462008-07-01 19:56:00 +00001294 raise URLError(msg)
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001295 path, attrs = splitattr(req.selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001296 dirs = path.split('/')
Georg Brandl13e89462008-07-01 19:56:00 +00001297 dirs = list(map(unquote, dirs))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001298 dirs, file = dirs[:-1], dirs[-1]
1299 if dirs and not dirs[0]:
1300 dirs = dirs[1:]
1301 try:
1302 fw = self.connect_ftp(user, passwd, host, port, dirs, req.timeout)
1303 type = file and 'I' or 'D'
1304 for attr in attrs:
Georg Brandl13e89462008-07-01 19:56:00 +00001305 attr, value = splitvalue(attr)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001306 if attr.lower() == 'type' and \
1307 value in ('a', 'A', 'i', 'I', 'd', 'D'):
1308 type = value.upper()
1309 fp, retrlen = fw.retrfile(file, type)
1310 headers = ""
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001311 mtype = mimetypes.guess_type(req.full_url)[0]
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001312 if mtype:
1313 headers += "Content-type: %s\n" % mtype
1314 if retrlen is not None and retrlen >= 0:
1315 headers += "Content-length: %d\n" % retrlen
1316 headers = email.message_from_string(headers)
Jeremy Hylton6c5e28c2009-03-31 14:35:53 +00001317 return addinfourl(fp, headers, req.full_url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001318 except ftplib.all_errors as msg:
Georg Brandl13e89462008-07-01 19:56:00 +00001319 exc = URLError('ftp error: %s' % msg)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001320 raise exc.with_traceback(sys.exc_info()[2])
1321
1322 def connect_ftp(self, user, passwd, host, port, dirs, timeout):
1323 fw = ftpwrapper(user, passwd, host, port, dirs, timeout)
1324 return fw
1325
1326class CacheFTPHandler(FTPHandler):
1327 # XXX would be nice to have pluggable cache strategies
1328 # XXX this stuff is definitely not thread safe
1329 def __init__(self):
1330 self.cache = {}
1331 self.timeout = {}
1332 self.soonest = 0
1333 self.delay = 60
1334 self.max_conns = 16
1335
1336 def setTimeout(self, t):
1337 self.delay = t
1338
1339 def setMaxConns(self, m):
1340 self.max_conns = m
1341
1342 def connect_ftp(self, user, passwd, host, port, dirs, timeout):
1343 key = user, host, port, '/'.join(dirs), timeout
1344 if key in self.cache:
1345 self.timeout[key] = time.time() + self.delay
1346 else:
1347 self.cache[key] = ftpwrapper(user, passwd, host, port,
1348 dirs, timeout)
1349 self.timeout[key] = time.time() + self.delay
1350 self.check_cache()
1351 return self.cache[key]
1352
1353 def check_cache(self):
1354 # first check for old ones
1355 t = time.time()
1356 if self.soonest <= t:
1357 for k, v in list(self.timeout.items()):
1358 if v < t:
1359 self.cache[k].close()
1360 del self.cache[k]
1361 del self.timeout[k]
1362 self.soonest = min(list(self.timeout.values()))
1363
1364 # then check the size
1365 if len(self.cache) == self.max_conns:
1366 for k, v in list(self.timeout.items()):
1367 if v == self.soonest:
1368 del self.cache[k]
1369 del self.timeout[k]
1370 break
1371 self.soonest = min(list(self.timeout.values()))
1372
1373# Code move from the old urllib module
1374
1375MAXFTPCACHE = 10 # Trim the ftp cache beyond this size
1376
1377# Helper for non-unix systems
1378if os.name == 'mac':
1379 from macurl2path import url2pathname, pathname2url
1380elif os.name == 'nt':
1381 from nturl2path import url2pathname, pathname2url
1382else:
1383 def url2pathname(pathname):
1384 """OS-specific conversion from a relative URL of the 'file' scheme
1385 to a file system path; not recommended for general use."""
Georg Brandl13e89462008-07-01 19:56:00 +00001386 return unquote(pathname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001387
1388 def pathname2url(pathname):
1389 """OS-specific conversion from a file system path to a relative URL
1390 of the 'file' scheme; not recommended for general use."""
Georg Brandl13e89462008-07-01 19:56:00 +00001391 return quote(pathname)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001392
1393# This really consists of two pieces:
1394# (1) a class which handles opening of all sorts of URLs
1395# (plus assorted utilities etc.)
1396# (2) a set of functions for parsing URLs
1397# XXX Should these be separated out into different modules?
1398
1399
1400ftpcache = {}
1401class URLopener:
1402 """Class to open URLs.
1403 This is a class rather than just a subroutine because we may need
1404 more than one set of global protocol-specific options.
1405 Note -- this is a base class for those who don't want the
1406 automatic handling of errors type 302 (relocated) and 401
1407 (authorization needed)."""
1408
1409 __tempfiles = None
1410
1411 version = "Python-urllib/%s" % __version__
1412
1413 # Constructor
1414 def __init__(self, proxies=None, **x509):
1415 if proxies is None:
1416 proxies = getproxies()
1417 assert hasattr(proxies, 'keys'), "proxies must be a mapping"
1418 self.proxies = proxies
1419 self.key_file = x509.get('key_file')
1420 self.cert_file = x509.get('cert_file')
1421 self.addheaders = [('User-Agent', self.version)]
1422 self.__tempfiles = []
1423 self.__unlink = os.unlink # See cleanup()
1424 self.tempcache = None
1425 # Undocumented feature: if you assign {} to tempcache,
1426 # it is used to cache files retrieved with
1427 # self.retrieve(). This is not enabled by default
1428 # since it does not work for changing documents (and I
1429 # haven't got the logic to check expiration headers
1430 # yet).
1431 self.ftpcache = ftpcache
1432 # Undocumented feature: you can use a different
1433 # ftp cache by assigning to the .ftpcache member;
1434 # in case you want logically independent URL openers
1435 # XXX This is not threadsafe. Bah.
1436
1437 def __del__(self):
1438 self.close()
1439
1440 def close(self):
1441 self.cleanup()
1442
1443 def cleanup(self):
1444 # This code sometimes runs when the rest of this module
1445 # has already been deleted, so it can't use any globals
1446 # or import anything.
1447 if self.__tempfiles:
1448 for file in self.__tempfiles:
1449 try:
1450 self.__unlink(file)
1451 except OSError:
1452 pass
1453 del self.__tempfiles[:]
1454 if self.tempcache:
1455 self.tempcache.clear()
1456
1457 def addheader(self, *args):
1458 """Add a header to be used by the HTTP interface only
1459 e.g. u.addheader('Accept', 'sound/basic')"""
1460 self.addheaders.append(args)
1461
1462 # External interface
1463 def open(self, fullurl, data=None):
1464 """Use URLopener().open(file) instead of open(file, 'r')."""
Georg Brandl13e89462008-07-01 19:56:00 +00001465 fullurl = unwrap(to_bytes(fullurl))
Senthil Kumaran0e7e9ae2010-02-20 22:30:21 +00001466 fullurl = quote(fullurl, safe="%/:=&?~#+!$,;'@()*[]|")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001467 if self.tempcache and fullurl in self.tempcache:
1468 filename, headers = self.tempcache[fullurl]
1469 fp = open(filename, 'rb')
Georg Brandl13e89462008-07-01 19:56:00 +00001470 return addinfourl(fp, headers, fullurl)
1471 urltype, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001472 if not urltype:
1473 urltype = 'file'
1474 if urltype in self.proxies:
1475 proxy = self.proxies[urltype]
Georg Brandl13e89462008-07-01 19:56:00 +00001476 urltype, proxyhost = splittype(proxy)
1477 host, selector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001478 url = (host, fullurl) # Signal special case to open_*()
1479 else:
1480 proxy = None
1481 name = 'open_' + urltype
1482 self.type = urltype
1483 name = name.replace('-', '_')
1484 if not hasattr(self, name):
1485 if proxy:
1486 return self.open_unknown_proxy(proxy, fullurl, data)
1487 else:
1488 return self.open_unknown(fullurl, data)
1489 try:
1490 if data is None:
1491 return getattr(self, name)(url)
1492 else:
1493 return getattr(self, name)(url, data)
1494 except socket.error as msg:
1495 raise IOError('socket error', msg).with_traceback(sys.exc_info()[2])
1496
1497 def open_unknown(self, fullurl, data=None):
1498 """Overridable interface to open unknown URL type."""
Georg Brandl13e89462008-07-01 19:56:00 +00001499 type, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001500 raise IOError('url error', 'unknown url type', type)
1501
1502 def open_unknown_proxy(self, proxy, fullurl, data=None):
1503 """Overridable interface to open unknown URL type."""
Georg Brandl13e89462008-07-01 19:56:00 +00001504 type, url = splittype(fullurl)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001505 raise IOError('url error', 'invalid proxy for %s' % type, proxy)
1506
1507 # External interface
1508 def retrieve(self, url, filename=None, reporthook=None, data=None):
1509 """retrieve(url) returns (filename, headers) for a local object
1510 or (tempfilename, headers) for a remote object."""
Georg Brandl13e89462008-07-01 19:56:00 +00001511 url = unwrap(to_bytes(url))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001512 if self.tempcache and url in self.tempcache:
1513 return self.tempcache[url]
Georg Brandl13e89462008-07-01 19:56:00 +00001514 type, url1 = splittype(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001515 if filename is None and (not type or type == 'file'):
1516 try:
1517 fp = self.open_local_file(url1)
1518 hdrs = fp.info()
1519 del fp
Georg Brandl13e89462008-07-01 19:56:00 +00001520 return url2pathname(splithost(url1)[1]), hdrs
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001521 except IOError as msg:
1522 pass
1523 fp = self.open(url, data)
Benjamin Peterson5f28b7b2009-03-26 21:49:58 +00001524 try:
1525 headers = fp.info()
1526 if filename:
1527 tfp = open(filename, 'wb')
1528 else:
1529 import tempfile
1530 garbage, path = splittype(url)
1531 garbage, path = splithost(path or "")
1532 path, garbage = splitquery(path or "")
1533 path, garbage = splitattr(path or "")
1534 suffix = os.path.splitext(path)[1]
1535 (fd, filename) = tempfile.mkstemp(suffix)
1536 self.__tempfiles.append(filename)
1537 tfp = os.fdopen(fd, 'wb')
1538 try:
1539 result = filename, headers
1540 if self.tempcache is not None:
1541 self.tempcache[url] = result
1542 bs = 1024*8
1543 size = -1
1544 read = 0
1545 blocknum = 0
1546 if reporthook:
1547 if "content-length" in headers:
1548 size = int(headers["Content-Length"])
1549 reporthook(blocknum, bs, size)
1550 while 1:
1551 block = fp.read(bs)
1552 if not block:
1553 break
1554 read += len(block)
1555 tfp.write(block)
1556 blocknum += 1
1557 if reporthook:
1558 reporthook(blocknum, bs, size)
1559 finally:
1560 tfp.close()
1561 finally:
1562 fp.close()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001563 del fp
1564 del tfp
1565
1566 # raise exception if actual size does not match content-length header
1567 if size >= 0 and read < size:
Georg Brandl13e89462008-07-01 19:56:00 +00001568 raise ContentTooShortError(
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001569 "retrieval incomplete: got only %i out of %i bytes"
1570 % (read, size), result)
1571
1572 return result
1573
1574 # Each method named open_<type> knows how to open that type of URL
1575
1576 def _open_generic_http(self, connection_factory, url, data):
1577 """Make an HTTP connection using connection_class.
1578
1579 This is an internal method that should be called from
1580 open_http() or open_https().
1581
1582 Arguments:
1583 - connection_factory should take a host name and return an
1584 HTTPConnection instance.
1585 - url is the url to retrieval or a host, relative-path pair.
1586 - data is payload for a POST request or None.
1587 """
1588
1589 user_passwd = None
1590 proxy_passwd= None
1591 if isinstance(url, str):
Georg Brandl13e89462008-07-01 19:56:00 +00001592 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001593 if host:
Georg Brandl13e89462008-07-01 19:56:00 +00001594 user_passwd, host = splituser(host)
1595 host = unquote(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001596 realhost = host
1597 else:
1598 host, selector = url
1599 # check whether the proxy contains authorization information
Georg Brandl13e89462008-07-01 19:56:00 +00001600 proxy_passwd, host = splituser(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001601 # now we proceed with the url we want to obtain
Georg Brandl13e89462008-07-01 19:56:00 +00001602 urltype, rest = splittype(selector)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001603 url = rest
1604 user_passwd = None
1605 if urltype.lower() != 'http':
1606 realhost = None
1607 else:
Georg Brandl13e89462008-07-01 19:56:00 +00001608 realhost, rest = splithost(rest)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001609 if realhost:
Georg Brandl13e89462008-07-01 19:56:00 +00001610 user_passwd, realhost = splituser(realhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001611 if user_passwd:
1612 selector = "%s://%s%s" % (urltype, realhost, rest)
1613 if proxy_bypass(realhost):
1614 host = realhost
1615
1616 #print "proxy via http:", host, selector
1617 if not host: raise IOError('http error', 'no host given')
1618
1619 if proxy_passwd:
1620 import base64
Senthil Kumaranfe2f4ec2010-08-04 17:49:13 +00001621 proxy_auth = base64.b64encode(proxy_passwd.encode()).decode('ascii')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001622 else:
1623 proxy_auth = None
1624
1625 if user_passwd:
1626 import base64
Senthil Kumaranfe2f4ec2010-08-04 17:49:13 +00001627 auth = base64.b64encode(user_passwd.encode()).decode('ascii')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001628 else:
1629 auth = None
1630 http_conn = connection_factory(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001631 headers = {}
1632 if proxy_auth:
1633 headers["Proxy-Authorization"] = "Basic %s" % proxy_auth
1634 if auth:
1635 headers["Authorization"] = "Basic %s" % auth
1636 if realhost:
1637 headers["Host"] = realhost
1638 for header, value in self.addheaders:
1639 headers[header] = value
1640
1641 if data is not None:
1642 headers["Content-Type"] = "application/x-www-form-urlencoded"
1643 http_conn.request("POST", selector, data, headers)
1644 else:
1645 http_conn.request("GET", selector, headers=headers)
1646
1647 try:
1648 response = http_conn.getresponse()
1649 except http.client.BadStatusLine:
1650 # something went wrong with the HTTP status line
Georg Brandl13e89462008-07-01 19:56:00 +00001651 raise URLError("http protocol error: bad status line")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001652
1653 # According to RFC 2616, "2xx" code indicates that the client's
1654 # request was successfully received, understood, and accepted.
1655 if 200 <= response.status < 300:
Antoine Pitroub353c122009-02-11 00:39:14 +00001656 return addinfourl(response, response.msg, "http:" + url,
Georg Brandl13e89462008-07-01 19:56:00 +00001657 response.status)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001658 else:
1659 return self.http_error(
1660 url, response.fp,
1661 response.status, response.reason, response.msg, data)
1662
1663 def open_http(self, url, data=None):
1664 """Use HTTP protocol."""
1665 return self._open_generic_http(http.client.HTTPConnection, url, data)
1666
1667 def http_error(self, url, fp, errcode, errmsg, headers, data=None):
1668 """Handle http errors.
1669
1670 Derived class can override this, or provide specific handlers
1671 named http_error_DDD where DDD is the 3-digit error code."""
1672 # First check if there's a specific handler for this error
1673 name = 'http_error_%d' % errcode
1674 if hasattr(self, name):
1675 method = getattr(self, name)
1676 if data is None:
1677 result = method(url, fp, errcode, errmsg, headers)
1678 else:
1679 result = method(url, fp, errcode, errmsg, headers, data)
1680 if result: return result
1681 return self.http_error_default(url, fp, errcode, errmsg, headers)
1682
1683 def http_error_default(self, url, fp, errcode, errmsg, headers):
1684 """Default error handler: close the connection and raise IOError."""
1685 void = fp.read()
1686 fp.close()
Georg Brandl13e89462008-07-01 19:56:00 +00001687 raise HTTPError(url, errcode, errmsg, headers, None)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001688
1689 if _have_ssl:
1690 def _https_connection(self, host):
1691 return http.client.HTTPSConnection(host,
1692 key_file=self.key_file,
1693 cert_file=self.cert_file)
1694
1695 def open_https(self, url, data=None):
1696 """Use HTTPS protocol."""
1697 return self._open_generic_http(self._https_connection, url, data)
1698
1699 def open_file(self, url):
1700 """Use local file or FTP depending on form of URL."""
1701 if not isinstance(url, str):
1702 raise URLError('file error', 'proxy support for file protocol currently not implemented')
1703 if url[:2] == '//' and url[2:3] != '/' and url[2:12].lower() != 'localhost/':
1704 return self.open_ftp(url)
1705 else:
1706 return self.open_local_file(url)
1707
1708 def open_local_file(self, url):
1709 """Use local file."""
1710 import mimetypes, email.utils
1711 from io import StringIO
Georg Brandl13e89462008-07-01 19:56:00 +00001712 host, file = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001713 localname = url2pathname(file)
1714 try:
1715 stats = os.stat(localname)
1716 except OSError as e:
1717 raise URLError(e.errno, e.strerror, e.filename)
1718 size = stats.st_size
1719 modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
1720 mtype = mimetypes.guess_type(url)[0]
1721 headers = email.message_from_string(
1722 'Content-Type: %s\nContent-Length: %d\nLast-modified: %s\n' %
1723 (mtype or 'text/plain', size, modified))
1724 if not host:
1725 urlfile = file
1726 if file[:1] == '/':
1727 urlfile = 'file://' + file
Georg Brandl13e89462008-07-01 19:56:00 +00001728 return addinfourl(open(localname, 'rb'), headers, urlfile)
1729 host, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001730 if (not port
Senthil Kumaran88a495d2009-12-27 10:15:45 +00001731 and socket.gethostbyname(host) in (localhost() + thishost())):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001732 urlfile = file
1733 if file[:1] == '/':
1734 urlfile = 'file://' + file
Georg Brandl13e89462008-07-01 19:56:00 +00001735 return addinfourl(open(localname, 'rb'), headers, urlfile)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001736 raise URLError('local file error', 'not on local host')
1737
1738 def open_ftp(self, url):
1739 """Use FTP protocol."""
1740 if not isinstance(url, str):
1741 raise URLError('ftp error', 'proxy support for ftp protocol currently not implemented')
1742 import mimetypes
1743 from io import StringIO
Georg Brandl13e89462008-07-01 19:56:00 +00001744 host, path = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001745 if not host: raise URLError('ftp error', 'no host given')
Georg Brandl13e89462008-07-01 19:56:00 +00001746 host, port = splitport(host)
1747 user, host = splituser(host)
1748 if user: user, passwd = splitpasswd(user)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001749 else: passwd = None
Georg Brandl13e89462008-07-01 19:56:00 +00001750 host = unquote(host)
1751 user = unquote(user or '')
1752 passwd = unquote(passwd or '')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001753 host = socket.gethostbyname(host)
1754 if not port:
1755 import ftplib
1756 port = ftplib.FTP_PORT
1757 else:
1758 port = int(port)
Georg Brandl13e89462008-07-01 19:56:00 +00001759 path, attrs = splitattr(path)
1760 path = unquote(path)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001761 dirs = path.split('/')
1762 dirs, file = dirs[:-1], dirs[-1]
1763 if dirs and not dirs[0]: dirs = dirs[1:]
1764 if dirs and not dirs[0]: dirs[0] = '/'
1765 key = user, host, port, '/'.join(dirs)
1766 # XXX thread unsafe!
1767 if len(self.ftpcache) > MAXFTPCACHE:
1768 # Prune the cache, rather arbitrarily
1769 for k in self.ftpcache.keys():
1770 if k != key:
1771 v = self.ftpcache[k]
1772 del self.ftpcache[k]
1773 v.close()
1774 try:
1775 if not key in self.ftpcache:
1776 self.ftpcache[key] = \
1777 ftpwrapper(user, passwd, host, port, dirs)
1778 if not file: type = 'D'
1779 else: type = 'I'
1780 for attr in attrs:
Georg Brandl13e89462008-07-01 19:56:00 +00001781 attr, value = splitvalue(attr)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001782 if attr.lower() == 'type' and \
1783 value in ('a', 'A', 'i', 'I', 'd', 'D'):
1784 type = value.upper()
1785 (fp, retrlen) = self.ftpcache[key].retrfile(file, type)
1786 mtype = mimetypes.guess_type("ftp:" + url)[0]
1787 headers = ""
1788 if mtype:
1789 headers += "Content-Type: %s\n" % mtype
1790 if retrlen is not None and retrlen >= 0:
1791 headers += "Content-Length: %d\n" % retrlen
1792 headers = email.message_from_string(headers)
Georg Brandl13e89462008-07-01 19:56:00 +00001793 return addinfourl(fp, headers, "ftp:" + url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001794 except ftperrors() as msg:
1795 raise URLError('ftp error', msg).with_traceback(sys.exc_info()[2])
1796
1797 def open_data(self, url, data=None):
1798 """Use "data" URL."""
1799 if not isinstance(url, str):
1800 raise URLError('data error', 'proxy support for data protocol currently not implemented')
1801 # ignore POSTed data
1802 #
1803 # syntax of data URLs:
1804 # dataurl := "data:" [ mediatype ] [ ";base64" ] "," data
1805 # mediatype := [ type "/" subtype ] *( ";" parameter )
1806 # data := *urlchar
1807 # parameter := attribute "=" value
1808 try:
1809 [type, data] = url.split(',', 1)
1810 except ValueError:
1811 raise IOError('data error', 'bad data URL')
1812 if not type:
1813 type = 'text/plain;charset=US-ASCII'
1814 semi = type.rfind(';')
1815 if semi >= 0 and '=' not in type[semi:]:
1816 encoding = type[semi+1:]
1817 type = type[:semi]
1818 else:
1819 encoding = ''
1820 msg = []
Senthil Kumaran5a3bc652010-05-01 08:32:23 +00001821 msg.append('Date: %s'%time.strftime('%a, %d %b %Y %H:%M:%S GMT',
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001822 time.gmtime(time.time())))
1823 msg.append('Content-type: %s' % type)
1824 if encoding == 'base64':
1825 import base64
Georg Brandl706824f2009-06-04 09:42:55 +00001826 # XXX is this encoding/decoding ok?
1827 data = base64.decodebytes(data.encode('ascii')).decode('latin1')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001828 else:
Georg Brandl13e89462008-07-01 19:56:00 +00001829 data = unquote(data)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001830 msg.append('Content-Length: %d' % len(data))
1831 msg.append('')
1832 msg.append(data)
1833 msg = '\n'.join(msg)
Georg Brandl13e89462008-07-01 19:56:00 +00001834 headers = email.message_from_string(msg)
1835 f = io.StringIO(msg)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001836 #f.fileno = None # needed for addinfourl
Georg Brandl13e89462008-07-01 19:56:00 +00001837 return addinfourl(f, headers, url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001838
1839
1840class FancyURLopener(URLopener):
1841 """Derived class with handlers for errors we can handle (perhaps)."""
1842
1843 def __init__(self, *args, **kwargs):
1844 URLopener.__init__(self, *args, **kwargs)
1845 self.auth_cache = {}
1846 self.tries = 0
1847 self.maxtries = 10
1848
1849 def http_error_default(self, url, fp, errcode, errmsg, headers):
1850 """Default error handling -- don't raise an exception."""
Georg Brandl13e89462008-07-01 19:56:00 +00001851 return addinfourl(fp, headers, "http:" + url, errcode)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001852
1853 def http_error_302(self, url, fp, errcode, errmsg, headers, data=None):
1854 """Error 302 -- relocated (temporarily)."""
1855 self.tries += 1
1856 if self.maxtries and self.tries >= self.maxtries:
1857 if hasattr(self, "http_error_500"):
1858 meth = self.http_error_500
1859 else:
1860 meth = self.http_error_default
1861 self.tries = 0
1862 return meth(url, fp, 500,
1863 "Internal Server Error: Redirect Recursion", headers)
1864 result = self.redirect_internal(url, fp, errcode, errmsg, headers,
1865 data)
1866 self.tries = 0
1867 return result
1868
1869 def redirect_internal(self, url, fp, errcode, errmsg, headers, data):
1870 if 'location' in headers:
1871 newurl = headers['location']
1872 elif 'uri' in headers:
1873 newurl = headers['uri']
1874 else:
1875 return
1876 void = fp.read()
1877 fp.close()
guido@google.coma119df92011-03-29 11:41:02 -07001878
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001879 # In case the server sent a relative URL, join with original:
Georg Brandl13e89462008-07-01 19:56:00 +00001880 newurl = urljoin(self.type + ":" + url, newurl)
guido@google.coma119df92011-03-29 11:41:02 -07001881
1882 urlparts = urlparse(newurl)
1883
1884 # For security reasons, we don't allow redirection to anything other
1885 # than http, https and ftp.
1886
1887 # We are using newer HTTPError with older redirect_internal method
1888 # This older method will get deprecated in 3.3
1889
1890 if not urlparts.scheme in ('http', 'https', 'ftp'):
1891 raise HTTPError(newurl, errcode,
1892 errmsg +
1893 " Redirection to url '%s' is not allowed." % newurl,
1894 headers, fp)
1895
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001896 return self.open(newurl)
1897
1898 def http_error_301(self, url, fp, errcode, errmsg, headers, data=None):
1899 """Error 301 -- also relocated (permanently)."""
1900 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1901
1902 def http_error_303(self, url, fp, errcode, errmsg, headers, data=None):
1903 """Error 303 -- also relocated (essentially identical to 302)."""
1904 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1905
1906 def http_error_307(self, url, fp, errcode, errmsg, headers, data=None):
1907 """Error 307 -- relocated, but turn POST into error."""
1908 if data is None:
1909 return self.http_error_302(url, fp, errcode, errmsg, headers, data)
1910 else:
1911 return self.http_error_default(url, fp, errcode, errmsg, headers)
1912
Senthil Kumaranb4d1c2c2010-06-18 15:12:48 +00001913 def http_error_401(self, url, fp, errcode, errmsg, headers, data=None,
1914 retry=False):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001915 """Error 401 -- authentication required.
1916 This function supports Basic authentication only."""
1917 if not 'www-authenticate' in headers:
1918 URLopener.http_error_default(self, url, fp,
1919 errcode, errmsg, headers)
1920 stuff = headers['www-authenticate']
1921 import re
1922 match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
1923 if not match:
1924 URLopener.http_error_default(self, url, fp,
1925 errcode, errmsg, headers)
1926 scheme, realm = match.groups()
1927 if scheme.lower() != 'basic':
1928 URLopener.http_error_default(self, url, fp,
1929 errcode, errmsg, headers)
Senthil Kumaranb4d1c2c2010-06-18 15:12:48 +00001930 if not retry:
1931 URLopener.http_error_default(self, url, fp, errcode, errmsg,
1932 headers)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001933 name = 'retry_' + self.type + '_basic_auth'
1934 if data is None:
1935 return getattr(self,name)(url, realm)
1936 else:
1937 return getattr(self,name)(url, realm, data)
1938
Senthil Kumaranb4d1c2c2010-06-18 15:12:48 +00001939 def http_error_407(self, url, fp, errcode, errmsg, headers, data=None,
1940 retry=False):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001941 """Error 407 -- proxy authentication required.
1942 This function supports Basic authentication only."""
1943 if not 'proxy-authenticate' in headers:
1944 URLopener.http_error_default(self, url, fp,
1945 errcode, errmsg, headers)
1946 stuff = headers['proxy-authenticate']
1947 import re
1948 match = re.match('[ \t]*([^ \t]+)[ \t]+realm="([^"]*)"', stuff)
1949 if not match:
1950 URLopener.http_error_default(self, url, fp,
1951 errcode, errmsg, headers)
1952 scheme, realm = match.groups()
1953 if scheme.lower() != 'basic':
1954 URLopener.http_error_default(self, url, fp,
1955 errcode, errmsg, headers)
Senthil Kumaranb4d1c2c2010-06-18 15:12:48 +00001956 if not retry:
1957 URLopener.http_error_default(self, url, fp, errcode, errmsg,
1958 headers)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001959 name = 'retry_proxy_' + self.type + '_basic_auth'
1960 if data is None:
1961 return getattr(self,name)(url, realm)
1962 else:
1963 return getattr(self,name)(url, realm, data)
1964
1965 def retry_proxy_http_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00001966 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001967 newurl = 'http://' + host + selector
1968 proxy = self.proxies['http']
Georg Brandl13e89462008-07-01 19:56:00 +00001969 urltype, proxyhost = splittype(proxy)
1970 proxyhost, proxyselector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001971 i = proxyhost.find('@') + 1
1972 proxyhost = proxyhost[i:]
1973 user, passwd = self.get_user_passwd(proxyhost, realm, i)
1974 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00001975 proxyhost = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001976 quote(passwd, safe=''), proxyhost)
1977 self.proxies['http'] = 'http://' + proxyhost + proxyselector
1978 if data is None:
1979 return self.open(newurl)
1980 else:
1981 return self.open(newurl, data)
1982
1983 def retry_proxy_https_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00001984 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001985 newurl = 'https://' + host + selector
1986 proxy = self.proxies['https']
Georg Brandl13e89462008-07-01 19:56:00 +00001987 urltype, proxyhost = splittype(proxy)
1988 proxyhost, proxyselector = splithost(proxyhost)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001989 i = proxyhost.find('@') + 1
1990 proxyhost = proxyhost[i:]
1991 user, passwd = self.get_user_passwd(proxyhost, realm, i)
1992 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00001993 proxyhost = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00001994 quote(passwd, safe=''), proxyhost)
1995 self.proxies['https'] = 'https://' + proxyhost + proxyselector
1996 if data is None:
1997 return self.open(newurl)
1998 else:
1999 return self.open(newurl, data)
2000
2001 def retry_http_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002002 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002003 i = host.find('@') + 1
2004 host = host[i:]
2005 user, passwd = self.get_user_passwd(host, realm, i)
2006 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002007 host = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002008 quote(passwd, safe=''), host)
2009 newurl = 'http://' + host + selector
2010 if data is None:
2011 return self.open(newurl)
2012 else:
2013 return self.open(newurl, data)
2014
2015 def retry_https_basic_auth(self, url, realm, data=None):
Georg Brandl13e89462008-07-01 19:56:00 +00002016 host, selector = splithost(url)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002017 i = host.find('@') + 1
2018 host = host[i:]
2019 user, passwd = self.get_user_passwd(host, realm, i)
2020 if not (user or passwd): return None
Georg Brandl13e89462008-07-01 19:56:00 +00002021 host = "%s:%s@%s" % (quote(user, safe=''),
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002022 quote(passwd, safe=''), host)
2023 newurl = 'https://' + host + selector
2024 if data is None:
2025 return self.open(newurl)
2026 else:
2027 return self.open(newurl, data)
2028
Florent Xicluna37ddbb82010-08-14 21:06:29 +00002029 def get_user_passwd(self, host, realm, clear_cache=0):
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002030 key = realm + '@' + host.lower()
2031 if key in self.auth_cache:
2032 if clear_cache:
2033 del self.auth_cache[key]
2034 else:
2035 return self.auth_cache[key]
2036 user, passwd = self.prompt_user_passwd(host, realm)
2037 if user or passwd: self.auth_cache[key] = (user, passwd)
2038 return user, passwd
2039
2040 def prompt_user_passwd(self, host, realm):
2041 """Override this in a GUI environment!"""
2042 import getpass
2043 try:
2044 user = input("Enter username for %s at %s: " % (realm, host))
2045 passwd = getpass.getpass("Enter password for %s in %s at %s: " %
2046 (user, realm, host))
2047 return user, passwd
2048 except KeyboardInterrupt:
2049 print()
2050 return None, None
2051
2052
2053# Utility functions
2054
2055_localhost = None
2056def localhost():
2057 """Return the IP address of the magic hostname 'localhost'."""
2058 global _localhost
2059 if _localhost is None:
2060 _localhost = socket.gethostbyname('localhost')
2061 return _localhost
2062
2063_thishost = None
2064def thishost():
Senthil Kumaran88a495d2009-12-27 10:15:45 +00002065 """Return the IP addresses of the current host."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002066 global _thishost
2067 if _thishost is None:
Senthil Kumaran88a495d2009-12-27 10:15:45 +00002068 _thishost = tuple(socket.gethostbyname_ex(socket.gethostname()[2]))
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002069 return _thishost
2070
2071_ftperrors = None
2072def ftperrors():
2073 """Return the set of errors raised by the FTP class."""
2074 global _ftperrors
2075 if _ftperrors is None:
2076 import ftplib
2077 _ftperrors = ftplib.all_errors
2078 return _ftperrors
2079
2080_noheaders = None
2081def noheaders():
Georg Brandl13e89462008-07-01 19:56:00 +00002082 """Return an empty email Message object."""
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002083 global _noheaders
2084 if _noheaders is None:
Georg Brandl13e89462008-07-01 19:56:00 +00002085 _noheaders = email.message_from_string("")
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002086 return _noheaders
2087
2088
2089# Utility classes
2090
2091class ftpwrapper:
2092 """Class used by open_ftp() for cache of open FTP connections."""
2093
2094 def __init__(self, user, passwd, host, port, dirs, timeout=None):
2095 self.user = user
2096 self.passwd = passwd
2097 self.host = host
2098 self.port = port
2099 self.dirs = dirs
2100 self.timeout = timeout
2101 self.init()
2102
2103 def init(self):
2104 import ftplib
2105 self.busy = 0
2106 self.ftp = ftplib.FTP()
2107 self.ftp.connect(self.host, self.port, self.timeout)
2108 self.ftp.login(self.user, self.passwd)
2109 for dir in self.dirs:
2110 self.ftp.cwd(dir)
2111
2112 def retrfile(self, file, type):
2113 import ftplib
2114 self.endtransfer()
2115 if type in ('d', 'D'): cmd = 'TYPE A'; isdir = 1
2116 else: cmd = 'TYPE ' + type; isdir = 0
2117 try:
2118 self.ftp.voidcmd(cmd)
2119 except ftplib.all_errors:
2120 self.init()
2121 self.ftp.voidcmd(cmd)
2122 conn = None
2123 if file and not isdir:
2124 # Try to retrieve as a file
2125 try:
2126 cmd = 'RETR ' + file
2127 conn = self.ftp.ntransfercmd(cmd)
2128 except ftplib.error_perm as reason:
2129 if str(reason)[:3] != '550':
Georg Brandl13e89462008-07-01 19:56:00 +00002130 raise URLError('ftp error', reason).with_traceback(
2131 sys.exc_info()[2])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002132 if not conn:
2133 # Set transfer mode to ASCII!
2134 self.ftp.voidcmd('TYPE A')
2135 # Try a directory listing. Verify that directory exists.
2136 if file:
2137 pwd = self.ftp.pwd()
2138 try:
2139 try:
2140 self.ftp.cwd(file)
2141 except ftplib.error_perm as reason:
Georg Brandl13e89462008-07-01 19:56:00 +00002142 raise URLError('ftp error', reason) from reason
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002143 finally:
2144 self.ftp.cwd(pwd)
2145 cmd = 'LIST ' + file
2146 else:
2147 cmd = 'LIST'
2148 conn = self.ftp.ntransfercmd(cmd)
2149 self.busy = 1
2150 # Pass back both a suitably decorated object and a retrieval length
Georg Brandl13e89462008-07-01 19:56:00 +00002151 return (addclosehook(conn[0].makefile('rb'), self.endtransfer), conn[1])
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002152 def endtransfer(self):
2153 if not self.busy:
2154 return
2155 self.busy = 0
2156 try:
2157 self.ftp.voidresp()
2158 except ftperrors():
2159 pass
2160
2161 def close(self):
2162 self.endtransfer()
2163 try:
2164 self.ftp.close()
2165 except ftperrors():
2166 pass
2167
2168# Proxy handling
2169def getproxies_environment():
2170 """Return a dictionary of scheme -> proxy server URL mappings.
2171
2172 Scan the environment for variables named <scheme>_proxy;
2173 this seems to be the standard convention. If you need a
2174 different way, you can pass a proxies dictionary to the
2175 [Fancy]URLopener constructor.
2176
2177 """
2178 proxies = {}
2179 for name, value in os.environ.items():
2180 name = name.lower()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002181 if value and name[-6:] == '_proxy':
2182 proxies[name[:-6]] = value
2183 return proxies
2184
2185def proxy_bypass_environment(host):
2186 """Test if proxies should not be used for a particular host.
2187
2188 Checks the environment for a variable named no_proxy, which should
2189 be a list of DNS suffixes separated by commas, or '*' for all hosts.
2190 """
2191 no_proxy = os.environ.get('no_proxy', '') or os.environ.get('NO_PROXY', '')
2192 # '*' is special case for always bypass
2193 if no_proxy == '*':
2194 return 1
2195 # strip port off host
Georg Brandl13e89462008-07-01 19:56:00 +00002196 hostonly, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002197 # check if the host ends with any of the DNS suffixes
2198 for name in no_proxy.split(','):
2199 if name and (hostonly.endswith(name) or host.endswith(name)):
2200 return 1
2201 # otherwise, don't bypass
2202 return 0
2203
2204
Ronald Oussorene72e1612011-03-14 18:15:25 -04002205# This code tests an OSX specific data structure but is testable on all
2206# platforms
2207def _proxy_bypass_macosx_sysconf(host, proxy_settings):
2208 """
2209 Return True iff this host shouldn't be accessed using a proxy
2210
2211 This function uses the MacOSX framework SystemConfiguration
2212 to fetch the proxy information.
2213
2214 proxy_settings come from _scproxy._get_proxy_settings or get mocked ie:
2215 { 'exclude_simple': bool,
2216 'exceptions': ['foo.bar', '*.bar.com', '127.0.0.1', '10.1', '10.0/16']
2217 }
2218 """
2219 import re
2220 import socket
2221 from fnmatch import fnmatch
2222
2223 hostonly, port = splitport(host)
2224
2225 def ip2num(ipAddr):
2226 parts = ipAddr.split('.')
2227 parts = list(map(int, parts))
2228 if len(parts) != 4:
2229 parts = (parts + [0, 0, 0, 0])[:4]
2230 return (parts[0] << 24) | (parts[1] << 16) | (parts[2] << 8) | parts[3]
2231
2232 # Check for simple host names:
2233 if '.' not in host:
2234 if proxy_settings['exclude_simple']:
2235 return True
2236
2237 hostIP = None
2238
2239 for value in proxy_settings.get('exceptions', ()):
2240 # Items in the list are strings like these: *.local, 169.254/16
2241 if not value: continue
2242
2243 m = re.match(r"(\d+(?:\.\d+)*)(/\d+)?", value)
2244 if m is not None:
2245 if hostIP is None:
2246 try:
2247 hostIP = socket.gethostbyname(hostonly)
2248 hostIP = ip2num(hostIP)
2249 except socket.error:
2250 continue
2251
2252 base = ip2num(m.group(1))
2253 mask = m.group(2)
2254 if mask is None:
2255 mask = 8 * (m.group(1).count('.') + 1)
2256 else:
2257 mask = int(mask[1:])
2258 mask = 32 - mask
2259
2260 if (hostIP >> mask) == (base >> mask):
2261 return True
2262
2263 elif fnmatch(host, value):
2264 return True
2265
2266 return False
2267
2268
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002269if sys.platform == 'darwin':
Ronald Oussoren218cc582010-04-18 20:49:34 +00002270 from _scproxy import _get_proxy_settings, _get_proxies
2271
2272 def proxy_bypass_macosx_sysconf(host):
Ronald Oussoren218cc582010-04-18 20:49:34 +00002273 proxy_settings = _get_proxy_settings()
Ronald Oussorene72e1612011-03-14 18:15:25 -04002274 return _proxy_bypass_macosx_sysconf(host, proxy_settings)
Ronald Oussoren218cc582010-04-18 20:49:34 +00002275
2276 def getproxies_macosx_sysconf():
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002277 """Return a dictionary of scheme -> proxy server URL mappings.
2278
Ronald Oussoren218cc582010-04-18 20:49:34 +00002279 This function uses the MacOSX framework SystemConfiguration
2280 to fetch the proxy information.
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002281 """
Ronald Oussoren218cc582010-04-18 20:49:34 +00002282 return _get_proxies()
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002283
Ronald Oussoren218cc582010-04-18 20:49:34 +00002284
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002285
2286 def proxy_bypass(host):
2287 if getproxies_environment():
2288 return proxy_bypass_environment(host)
2289 else:
Ronald Oussoren218cc582010-04-18 20:49:34 +00002290 return proxy_bypass_macosx_sysconf(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002291
2292 def getproxies():
Ronald Oussoren218cc582010-04-18 20:49:34 +00002293 return getproxies_environment() or getproxies_macosx_sysconf()
2294
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002295
2296elif os.name == 'nt':
2297 def getproxies_registry():
2298 """Return a dictionary of scheme -> proxy server URL mappings.
2299
2300 Win32 uses the registry to store proxies.
2301
2302 """
2303 proxies = {}
2304 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002305 import winreg
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002306 except ImportError:
2307 # Std module, so should be around - but you never know!
2308 return proxies
2309 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002310 internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002311 r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002312 proxyEnable = winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002313 'ProxyEnable')[0]
2314 if proxyEnable:
2315 # Returned as Unicode but problems if not converted to ASCII
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002316 proxyServer = str(winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002317 'ProxyServer')[0])
2318 if '=' in proxyServer:
2319 # Per-protocol settings
2320 for p in proxyServer.split(';'):
2321 protocol, address = p.split('=', 1)
2322 # See if address has a type:// prefix
2323 import re
2324 if not re.match('^([^/:]+)://', address):
2325 address = '%s://%s' % (protocol, address)
2326 proxies[protocol] = address
2327 else:
2328 # Use one setting for all protocols
2329 if proxyServer[:5] == 'http:':
2330 proxies['http'] = proxyServer
2331 else:
2332 proxies['http'] = 'http://%s' % proxyServer
Senthil Kumaran1ea57a62010-07-14 20:13:28 +00002333 proxies['https'] = 'https://%s' % proxyServer
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002334 proxies['ftp'] = 'ftp://%s' % proxyServer
2335 internetSettings.Close()
2336 except (WindowsError, ValueError, TypeError):
2337 # Either registry key not found etc, or the value in an
2338 # unexpected format.
2339 # proxies already set up to be empty so nothing to do
2340 pass
2341 return proxies
2342
2343 def getproxies():
2344 """Return a dictionary of scheme -> proxy server URL mappings.
2345
2346 Returns settings gathered from the environment, if specified,
2347 or the registry.
2348
2349 """
2350 return getproxies_environment() or getproxies_registry()
2351
2352 def proxy_bypass_registry(host):
2353 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002354 import winreg
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002355 import re
2356 except ImportError:
2357 # Std modules, so should be around - but you never know!
2358 return 0
2359 try:
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002360 internetSettings = winreg.OpenKey(winreg.HKEY_CURRENT_USER,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002361 r'Software\Microsoft\Windows\CurrentVersion\Internet Settings')
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002362 proxyEnable = winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002363 'ProxyEnable')[0]
Georg Brandl4ed72ac2009-04-01 04:28:33 +00002364 proxyOverride = str(winreg.QueryValueEx(internetSettings,
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002365 'ProxyOverride')[0])
2366 # ^^^^ Returned as Unicode but problems if not converted to ASCII
2367 except WindowsError:
2368 return 0
2369 if not proxyEnable or not proxyOverride:
2370 return 0
2371 # try to make a host list from name and IP address.
Georg Brandl13e89462008-07-01 19:56:00 +00002372 rawHost, port = splitport(host)
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002373 host = [rawHost]
2374 try:
2375 addr = socket.gethostbyname(rawHost)
2376 if addr != rawHost:
2377 host.append(addr)
2378 except socket.error:
2379 pass
2380 try:
2381 fqdn = socket.getfqdn(rawHost)
2382 if fqdn != rawHost:
2383 host.append(fqdn)
2384 except socket.error:
2385 pass
2386 # make a check value list from the registry entry: replace the
2387 # '<local>' string by the localhost entry and the corresponding
2388 # canonical entry.
2389 proxyOverride = proxyOverride.split(';')
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002390 # now check if we match one of the registry values.
2391 for test in proxyOverride:
Senthil Kumaran49476062009-05-01 06:00:23 +00002392 if test == '<local>':
2393 if '.' not in rawHost:
2394 return 1
Jeremy Hylton1afc1692008-06-18 20:49:58 +00002395 test = test.replace(".", r"\.") # mask dots
2396 test = test.replace("*", r".*") # change glob sequence
2397 test = test.replace("?", r".") # change glob char
2398 for val in host:
2399 # print "%s <--> %s" %( test, val )
2400 if re.match(test, val, re.I):
2401 return 1
2402 return 0
2403
2404 def proxy_bypass(host):
2405 """Return a dictionary of scheme -> proxy server URL mappings.
2406
2407 Returns settings gathered from the environment, if specified,
2408 or the registry.
2409
2410 """
2411 if getproxies_environment():
2412 return proxy_bypass_environment(host)
2413 else:
2414 return proxy_bypass_registry(host)
2415
2416else:
2417 # By default use environment variables
2418 getproxies = getproxies_environment
2419 proxy_bypass = proxy_bypass_environment