1 """An extensible library for opening URLs using a variety of protocols
3 The simplest way to use this module is to call the urlopen function,
4 which accepts a string containing a URL or a Request object (described
5 below). It opens the URL and returns the results as file-like
6 object; the returned object has some extra methods described below.
8 The OpenerDirector manages a collection of Handler objects that do
9 all the actual work. Each Handler implements a particular protocol or
10 option. The OpenerDirector is a composite object that invokes the
11 Handlers needed to open the requested URL. For example, the
12 HTTPHandler performs HTTP GET and POST requests and deals with
13 non-error returns. The HTTPRedirectHandler automatically deals with
14 HTTP 301, 302, 303 and 307 redirect errors, and the HTTPDigestAuthHandler
15 deals with digest authentication.
17 urlopen(url, data=None) -- Basic usage is the same as original
18 urllib. pass the url and optionally data to post to an HTTP URL, and
19 get a file-like object back. One difference is that you can also pass
20 a Request instance instead of URL. Raises a URLError (subclass of
21 IOError); for HTTP errors, raises an HTTPError, which can also be
22 treated as a valid response.
24 build_opener -- Function that creates a new OpenerDirector instance.
25 Will install the default handlers. Accepts one or more Handlers as
26 arguments, either instances or Handler classes that it will
27 instantiate. If one of the argument is a subclass of the default
28 handler, the argument will be installed instead of the default.
30 install_opener -- Installs a new opener as the default opener.
35 Request -- An object that encapsulates the state of a request. The
36 state can be as simple as the URL. It can also include extra HTTP
37 headers, e.g. a User-Agent.
42 URLError -- A subclass of IOError, individual protocols have their own
45 HTTPError -- Also a valid HTTP response, so you can treat an HTTP error
46 as an exceptional event or valid response.
49 BaseHandler and parent
50 _call_chain conventions
56 # set up authentication info
57 authinfo = urllib2.HTTPBasicAuthHandler()
58 authinfo.add_password(realm='PDQ Application',
59 uri='https://mahler:8092/site-updates.py',
61 passwd='geheim$parole')
63 proxy_support = urllib2.ProxyHandler({"http" : "http://ahad-haam:3128"})
65 # build a new opener that adds authentication and caching FTP handlers
66 opener = urllib2.build_opener(proxy_support, authinfo, urllib2.CacheFTPHandler)
69 urllib2.install_opener(opener)
71 f = urllib2.urlopen('http://www.python.org/')
77 # If an authentication error handler that tries to perform
78 # authentication for some reason but fails, how should the error be
79 # signalled? The client needs to know the HTTP error code. But if
80 # the handler knows that the problem was, e.g., that it didn't know
81 # that hash algo that requested in the challenge, it would be good to
82 # pass that information along to the client, too.
83 # ftp errors aren't handled cleanly
84 # check digest against correct (i.e. non-apache) implementation
86 # Possible extensions:
87 # complex proxies XXX not sure what exactly was meant by this
88 # abstract factory for opener
105 from io
import StringIO
107 from urllib
import (unwrap
, unquote
, splittype
, splithost
, quote
,
108 addinfourl
, splitport
, splitquery
,
109 splitattr
, ftpwrapper
, noheaders
, splituser
, splitpasswd
, splitvalue
)
111 # support for FileHandler, proxies via environment variables
112 from urllib
import localhost
, url2pathname
, getproxies
114 # used in User-Agent header sent
115 __version__
= sys
.version
[:3]
118 def urlopen(url
, data
=None, timeout
=None):
121 _opener
= build_opener()
122 return _opener
.open(url
, data
, timeout
)
124 def install_opener(opener
):
128 # do these error classes make sense?
129 # make sure all of the IOError stuff is overridden. we just want to be
132 class URLError(IOError):
133 # URLError is a sub-type of IOError, but it doesn't share any of
134 # the implementation. need to override __init__ and __str__.
135 # It sets self.args for compatibility with other EnvironmentError
136 # subclasses, but args doesn't have the typical format with errno in
137 # slot 0 and strerror in slot 1. This may be better than nothing.
138 def __init__(self
, reason
):
143 return '<urlopen error %s>' % self
.reason
145 class HTTPError(URLError
, addinfourl
):
146 """Raised when HTTP error occurs, but also acts like non-error return"""
147 __super_init
= addinfourl
.__init
__
149 def __init__(self
, url
, code
, msg
, hdrs
, fp
):
155 # The addinfourl classes depend on fp being a valid file
156 # object. In some cases, the HTTPError may not have a valid
157 # file object. If this happens, the simplest workaround is to
158 # not initialize the base classes.
160 self
.__super
_init
(fp
, hdrs
, url
, code
)
163 return 'HTTP Error %s: %s' % (self
.code
, self
.msg
)
165 # copied from cookielib.py
166 _cut_port_re
= re
.compile(r
":\d+$")
167 def request_host(request
):
168 """Return request-host, as defined by RFC 2965.
170 Variation from RFC: returned value is lowercased, for convenient
174 url
= request
.get_full_url()
175 host
= urlparse
.urlparse(url
)[1]
177 host
= request
.get_header("Host", "")
179 # remove port, if present
180 host
= _cut_port_re
.sub("", host
, 1)
185 def __init__(self
, url
, data
=None, headers
={},
186 origin_req_host
=None, unverifiable
=False):
187 # unwrap('<URL:type://host/path>') --> 'type://host/path'
188 self
.__original
= unwrap(url
)
190 # self.__r_type is what's left after doing the splittype
195 for key
, value
in headers
.items():
196 self
.add_header(key
, value
)
197 self
.unredirected_hdrs
= {}
198 if origin_req_host
is None:
199 origin_req_host
= request_host(self
)
200 self
.origin_req_host
= origin_req_host
201 self
.unverifiable
= unverifiable
203 def __getattr__(self
, attr
):
204 # XXX this is a fallback mechanism to guard against these
205 # methods getting called in a non-standard order. this may be
206 # too complicated and/or unnecessary.
207 # XXX should the __r_XXX attributes be public?
208 if attr
[:12] == '_Request__r_':
210 if hasattr(Request
, 'get_' + name
):
211 getattr(self
, 'get_' + name
)()
212 return getattr(self
, attr
)
213 raise AttributeError(attr
)
215 def get_method(self
):
221 # XXX these helper methods are lame
223 def add_data(self
, data
):
227 return self
.data
is not None
232 def get_full_url(self
):
233 return self
.__original
236 if self
.type is None:
237 self
.type, self
.__r
_type
= splittype(self
.__original
)
238 if self
.type is None:
239 raise ValueError("unknown url type: %s" % self
.__original
)
243 if self
.host
is None:
244 self
.host
, self
.__r
_host
= splithost(self
.__r
_type
)
246 self
.host
= unquote(self
.host
)
249 def get_selector(self
):
252 def set_proxy(self
, host
, type):
253 self
.host
, self
.type = host
, type
254 self
.__r
_host
= self
.__original
256 def get_origin_req_host(self
):
257 return self
.origin_req_host
259 def is_unverifiable(self
):
260 return self
.unverifiable
262 def add_header(self
, key
, val
):
263 # useful for something like authentication
264 self
.headers
[key
.capitalize()] = val
266 def add_unredirected_header(self
, key
, val
):
267 # will not be added to a redirected request
268 self
.unredirected_hdrs
[key
.capitalize()] = val
270 def has_header(self
, header_name
):
271 return (header_name
in self
.headers
or
272 header_name
in self
.unredirected_hdrs
)
274 def get_header(self
, header_name
, default
=None):
275 return self
.headers
.get(
277 self
.unredirected_hdrs
.get(header_name
, default
))
279 def header_items(self
):
280 hdrs
= self
.unredirected_hdrs
.copy()
281 hdrs
.update(self
.headers
)
282 return list(hdrs
.items())
284 class OpenerDirector
:
286 client_version
= "Python-urllib/%s" % __version__
287 self
.addheaders
= [('User-agent', client_version
)]
288 # manage the individual handlers
290 self
.handle_open
= {}
291 self
.handle_error
= {}
292 self
.process_response
= {}
293 self
.process_request
= {}
295 def add_handler(self
, handler
):
296 if not hasattr(handler
, "add_parent"):
297 raise TypeError("expected BaseHandler instance, got %r" %
301 for meth
in dir(handler
):
302 if meth
in ["redirect_request", "do_open", "proxy_open"]:
303 # oops, coincidental match
308 condition
= meth
[i
+1:]
310 if condition
.startswith("error"):
311 j
= condition
.find("_") + i
+ 1
317 lookup
= self
.handle_error
.get(protocol
, {})
318 self
.handle_error
[protocol
] = lookup
319 elif condition
== "open":
321 lookup
= self
.handle_open
322 elif condition
== "response":
324 lookup
= self
.process_response
325 elif condition
== "request":
327 lookup
= self
.process_request
331 handlers
= lookup
.setdefault(kind
, [])
333 bisect
.insort(handlers
, handler
)
335 handlers
.append(handler
)
339 # the handlers must work in an specific order, the order
340 # is specified in a Handler attribute
341 bisect
.insort(self
.handlers
, handler
)
342 handler
.add_parent(self
)
345 # Only exists for backwards compatibility.
348 def _call_chain(self
, chain
, kind
, meth_name
, *args
):
349 # Handlers raise an exception if no one else should try to handle
350 # the request, or return None if they can't but another handler
351 # could. Otherwise, they return the response.
352 handlers
= chain
.get(kind
, ())
353 for handler
in handlers
:
354 func
= getattr(handler
, meth_name
)
357 if result
is not None:
360 def open(self
, fullurl
, data
=None, timeout
=None):
361 # accept a URL or a Request object
362 if isinstance(fullurl
, str):
363 req
= Request(fullurl
, data
)
369 req
.timeout
= timeout
370 protocol
= req
.get_type()
372 # pre-process request
373 meth_name
= protocol
+"_request"
374 for processor
in self
.process_request
.get(protocol
, []):
375 meth
= getattr(processor
, meth_name
)
378 response
= self
._open
(req
, data
)
380 # post-process response
381 meth_name
= protocol
+"_response"
382 for processor
in self
.process_response
.get(protocol
, []):
383 meth
= getattr(processor
, meth_name
)
384 response
= meth(req
, response
)
388 def _open(self
, req
, data
=None):
389 result
= self
._call
_chain
(self
.handle_open
, 'default',
394 protocol
= req
.get_type()
395 result
= self
._call
_chain
(self
.handle_open
, protocol
, protocol
+
400 return self
._call
_chain
(self
.handle_open
, 'unknown',
403 def error(self
, proto
, *args
):
404 if proto
in ('http', 'https'):
405 # XXX http[s] protocols are special-cased
406 dict = self
.handle_error
['http'] # https is not different than http
407 proto
= args
[2] # YUCK!
408 meth_name
= 'http_error_%s' % proto
412 dict = self
.handle_error
413 meth_name
= proto
+ '_error'
415 args
= (dict, proto
, meth_name
) + args
416 result
= self
._call
_chain
(*args
)
421 args
= (dict, 'default', 'http_error_default') + orig_args
422 return self
._call
_chain
(*args
)
424 # XXX probably also want an abstract factory that knows when it makes
425 # sense to skip a superclass in favor of a subclass and when it might
426 # make sense to include both
428 def build_opener(*handlers
):
429 """Create an opener object from a list of handlers.
431 The opener will use several default handlers, including support
434 If any of the handlers passed as arguments are subclasses of the
435 default handlers, the default handlers will not be used.
438 return isinstance(obj
, type) or hasattr(obj
, "__bases__")
440 opener
= OpenerDirector()
441 default_classes
= [ProxyHandler
, UnknownHandler
, HTTPHandler
,
442 HTTPDefaultErrorHandler
, HTTPRedirectHandler
,
443 FTPHandler
, FileHandler
, HTTPErrorProcessor
]
444 if hasattr(httplib
, 'HTTPS'):
445 default_classes
.append(HTTPSHandler
)
447 for klass
in default_classes
:
448 for check
in handlers
:
450 if issubclass(check
, klass
):
452 elif isinstance(check
, klass
):
455 default_classes
.remove(klass
)
457 for klass
in default_classes
:
458 opener
.add_handler(klass())
463 opener
.add_handler(h
)
469 def add_parent(self
, parent
):
473 # Only exists for backwards compatibility
476 def __lt__(self
, other
):
477 if not hasattr(other
, "handler_order"):
478 # Try to preserve the old behavior of having custom classes
479 # inserted after default ones (works only for custom user
480 # classes which are not aware of handler_order).
482 return self
.handler_order
< other
.handler_order
485 class HTTPErrorProcessor(BaseHandler
):
486 """Process HTTP error responses."""
487 handler_order
= 1000 # after all other processing
489 def http_response(self
, request
, response
):
490 code
, msg
, hdrs
= response
.code
, response
.msg
, response
.info()
492 # According to RFC 2616, "2xx" code indicates that the client's
493 # request was successfully received, understood, and accepted.
494 if not (200 <= code
< 300):
495 response
= self
.parent
.error(
496 'http', request
, response
, code
, msg
, hdrs
)
500 https_response
= http_response
502 class HTTPDefaultErrorHandler(BaseHandler
):
503 def http_error_default(self
, req
, fp
, code
, msg
, hdrs
):
504 raise HTTPError(req
.get_full_url(), code
, msg
, hdrs
, fp
)
506 class HTTPRedirectHandler(BaseHandler
):
507 # maximum number of redirections to any single URL
508 # this is needed because of the state that cookies introduce
510 # maximum total number of redirections (regardless of URL) before
511 # assuming we're in a loop
512 max_redirections
= 10
514 def redirect_request(self
, req
, fp
, code
, msg
, headers
, newurl
):
515 """Return a Request or None in response to a redirect.
517 This is called by the http_error_30x methods when a
518 redirection response is received. If a redirection should
519 take place, return a new Request to allow http_error_30x to
520 perform the redirect. Otherwise, raise HTTPError if no-one
521 else should try to handle this url. Return None if you can't
522 but another Handler might.
525 if (code
in (301, 302, 303, 307) and m
in ("GET", "HEAD")
526 or code
in (301, 302, 303) and m
== "POST"):
527 # Strictly (according to RFC 2616), 301 or 302 in response
528 # to a POST MUST NOT cause a redirection without confirmation
529 # from the user (of urllib2, in this case). In practice,
530 # essentially all clients do redirect in this case, so we
532 # be conciliant with URIs containing a space
533 newurl
= newurl
.replace(' ', '%20')
534 newheaders
= dict((k
,v
) for k
,v
in req
.headers
.items()
535 if k
.lower() not in ("content-length", "content-type")
537 return Request(newurl
,
539 origin_req_host
=req
.get_origin_req_host(),
542 raise HTTPError(req
.get_full_url(), code
, msg
, headers
, fp
)
544 # Implementation note: To avoid the server sending us into an
545 # infinite loop, the request object needs to track what URLs we
546 # have already seen. Do this by adding a handler-specific
547 # attribute to the Request object.
548 def http_error_302(self
, req
, fp
, code
, msg
, headers
):
549 # Some servers (incorrectly) return multiple Location headers
550 # (so probably same goes for URI). Use first header.
551 if 'location' in headers
:
552 newurl
= headers
.getheaders('location')[0]
553 elif 'uri' in headers
:
554 newurl
= headers
.getheaders('uri')[0]
557 newurl
= urlparse
.urljoin(req
.get_full_url(), newurl
)
559 # XXX Probably want to forget about the state of the current
560 # request, although that might interact poorly with other
561 # handlers that also use handler-specific request attributes
562 new
= self
.redirect_request(req
, fp
, code
, msg
, headers
, newurl
)
567 # .redirect_dict has a key url if url was previously visited.
568 if hasattr(req
, 'redirect_dict'):
569 visited
= new
.redirect_dict
= req
.redirect_dict
570 if (visited
.get(newurl
, 0) >= self
.max_repeats
or
571 len(visited
) >= self
.max_redirections
):
572 raise HTTPError(req
.get_full_url(), code
,
573 self
.inf_msg
+ msg
, headers
, fp
)
575 visited
= new
.redirect_dict
= req
.redirect_dict
= {}
576 visited
[newurl
] = visited
.get(newurl
, 0) + 1
578 # Don't close the fp until we are sure that we won't use it
583 return self
.parent
.open(new
)
585 http_error_301
= http_error_303
= http_error_307
= http_error_302
587 inf_msg
= "The HTTP server returned a redirect error that would " \
588 "lead to an infinite loop.\n" \
589 "The last 30x error message was:\n"
592 def _parse_proxy(proxy
):
593 """Return (scheme, user, password, host/port) given a URL or an authority.
595 If a URL is supplied, it must have an authority (host:port) component.
596 According to RFC 3986, having an authority component means the URL must
597 have two slashes after the scheme:
599 >>> _parse_proxy('file:/ftp.example.com/')
600 Traceback (most recent call last):
601 ValueError: proxy URL with no authority: 'file:/ftp.example.com/'
603 The first three items of the returned tuple may be None.
605 Examples of authority parsing:
607 >>> _parse_proxy('proxy.example.com')
608 (None, None, None, 'proxy.example.com')
609 >>> _parse_proxy('proxy.example.com:3128')
610 (None, None, None, 'proxy.example.com:3128')
612 The authority component may optionally include userinfo (assumed to be
615 >>> _parse_proxy('joe:password@proxy.example.com')
616 (None, 'joe', 'password', 'proxy.example.com')
617 >>> _parse_proxy('joe:password@proxy.example.com:3128')
618 (None, 'joe', 'password', 'proxy.example.com:3128')
620 Same examples, but with URLs instead:
622 >>> _parse_proxy('http://proxy.example.com/')
623 ('http', None, None, 'proxy.example.com')
624 >>> _parse_proxy('http://proxy.example.com:3128/')
625 ('http', None, None, 'proxy.example.com:3128')
626 >>> _parse_proxy('http://joe:password@proxy.example.com/')
627 ('http', 'joe', 'password', 'proxy.example.com')
628 >>> _parse_proxy('http://joe:password@proxy.example.com:3128')
629 ('http', 'joe', 'password', 'proxy.example.com:3128')
631 Everything after the authority is ignored:
633 >>> _parse_proxy('ftp://joe:password@proxy.example.com/rubbish:3128')
634 ('ftp', 'joe', 'password', 'proxy.example.com')
636 Test for no trailing '/' case:
638 >>> _parse_proxy('http://joe:password@proxy.example.com')
639 ('http', 'joe', 'password', 'proxy.example.com')
642 scheme
, r_scheme
= splittype(proxy
)
643 if not r_scheme
.startswith("/"):
649 if not r_scheme
.startswith("//"):
650 raise ValueError("proxy URL with no authority: %r" % proxy
)
651 # We have an authority, so for RFC 3986-compliant URLs (by ss 3.
652 # and 3.3.), path is empty or starts with '/'
653 end
= r_scheme
.find("/", 2)
656 authority
= r_scheme
[2:end
]
657 userinfo
, hostport
= splituser(authority
)
658 if userinfo
is not None:
659 user
, password
= splitpasswd(userinfo
)
661 user
= password
= None
662 return scheme
, user
, password
, hostport
664 class ProxyHandler(BaseHandler
):
665 # Proxies must be in front
668 def __init__(self
, proxies
=None):
670 proxies
= getproxies()
671 assert hasattr(proxies
, 'keys'), "proxies must be a mapping"
672 self
.proxies
= proxies
673 for type, url
in proxies
.items():
674 setattr(self
, '%s_open' % type,
675 lambda r
, proxy
=url
, type=type, meth
=self
.proxy_open
: \
676 meth(r
, proxy
, type))
678 def proxy_open(self
, req
, proxy
, type):
679 orig_type
= req
.get_type()
680 proxy_type
, user
, password
, hostport
= _parse_proxy(proxy
)
681 if proxy_type
is None:
682 proxy_type
= orig_type
683 if user
and password
:
684 user_pass
= '%s:%s' % (unquote(user
), unquote(password
))
685 creds
= base64
.b64encode(user_pass
.encode()).decode("ascii")
686 req
.add_header('Proxy-authorization', 'Basic ' + creds
)
687 hostport
= unquote(hostport
)
688 req
.set_proxy(hostport
, proxy_type
)
689 if orig_type
== proxy_type
:
690 # let other handlers take care of it
693 # need to start over, because the other handlers don't
694 # grok the proxy's URL type
695 # e.g. if we have a constructor arg proxies like so:
696 # {'http': 'ftp://proxy.example.com'}, we may end up turning
697 # a request for http://acme.example.com/a into one for
698 # ftp://proxy.example.com/a
699 return self
.parent
.open(req
)
701 class HTTPPasswordMgr
:
706 def add_password(self
, realm
, uri
, user
, passwd
):
707 # uri could be a single URI or a sequence
708 if isinstance(uri
, str):
710 if not realm
in self
.passwd
:
711 self
.passwd
[realm
] = {}
712 for default_port
in True, False:
714 [self
.reduce_uri(u
, default_port
) for u
in uri
])
715 self
.passwd
[realm
][reduced_uri
] = (user
, passwd
)
717 def find_user_password(self
, realm
, authuri
):
718 domains
= self
.passwd
.get(realm
, {})
719 for default_port
in True, False:
720 reduced_authuri
= self
.reduce_uri(authuri
, default_port
)
721 for uris
, authinfo
in domains
.items():
723 if self
.is_suburi(uri
, reduced_authuri
):
727 def reduce_uri(self
, uri
, default_port
=True):
728 """Accept authority or URI and extract only the authority and path."""
729 # note HTTP URLs do not have a userinfo component
730 parts
= urlparse
.urlsplit(uri
)
735 path
= parts
[2] or '/'
741 host
, port
= splitport(authority
)
742 if default_port
and port
is None and scheme
is not None:
746 if dport
is not None:
747 authority
= "%s:%d" % (host
, dport
)
748 return authority
, path
750 def is_suburi(self
, base
, test
):
751 """Check if test is below base in a URI tree
753 Both args must be URIs in reduced form.
757 if base
[0] != test
[0]:
759 common
= posixpath
.commonprefix((base
[1], test
[1]))
760 if len(common
) == len(base
[1]):
765 class HTTPPasswordMgrWithDefaultRealm(HTTPPasswordMgr
):
767 def find_user_password(self
, realm
, authuri
):
768 user
, password
= HTTPPasswordMgr
.find_user_password(self
, realm
,
771 return user
, password
772 return HTTPPasswordMgr
.find_user_password(self
, None, authuri
)
775 class AbstractBasicAuthHandler
:
777 # XXX this allows for multiple auth-schemes, but will stupidly pick
778 # the last one with a realm specified.
780 # allow for double- and single-quoted realm values
781 # (single quotes are a violation of the RFC, but appear in the wild)
782 rx
= re
.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
783 'realm=(["\'])(.*?)\\2', re
.I
)
785 # XXX could pre-emptively send auth info already accepted (RFC 2617,
786 # end of section 2, and section 1.2 immediately after "credentials"
789 def __init__(self
, password_mgr
=None):
790 if password_mgr
is None:
791 password_mgr
= HTTPPasswordMgr()
792 self
.passwd
= password_mgr
793 self
.add_password
= self
.passwd
.add_password
795 def http_error_auth_reqed(self
, authreq
, host
, req
, headers
):
796 # host may be an authority (without userinfo) or a URL with an
798 # XXX could be multiple headers
799 authreq
= headers
.get(authreq
, None)
801 mo
= AbstractBasicAuthHandler
.rx
.search(authreq
)
803 scheme
, quote
, realm
= mo
.groups()
804 if scheme
.lower() == 'basic':
805 return self
.retry_http_basic_auth(host
, req
, realm
)
807 def retry_http_basic_auth(self
, host
, req
, realm
):
808 user
, pw
= self
.passwd
.find_user_password(realm
, host
)
810 raw
= "%s:%s" % (user
, pw
)
811 auth
= "Basic " + base64
.b64encode(raw
.encode()).decode("ascii")
812 if req
.headers
.get(self
.auth_header
, None) == auth
:
814 req
.add_header(self
.auth_header
, auth
)
815 return self
.parent
.open(req
)
820 class HTTPBasicAuthHandler(AbstractBasicAuthHandler
, BaseHandler
):
822 auth_header
= 'Authorization'
824 def http_error_401(self
, req
, fp
, code
, msg
, headers
):
825 url
= req
.get_full_url()
826 return self
.http_error_auth_reqed('www-authenticate',
830 class ProxyBasicAuthHandler(AbstractBasicAuthHandler
, BaseHandler
):
832 auth_header
= 'Proxy-authorization'
834 def http_error_407(self
, req
, fp
, code
, msg
, headers
):
835 # http_error_auth_reqed requires that there is no userinfo component in
836 # authority. Assume there isn't one, since urllib2 does not (and
837 # should not, RFC 3986 s. 3.2.1) support requests for URLs containing
839 authority
= req
.get_host()
840 return self
.http_error_auth_reqed('proxy-authenticate',
841 authority
, req
, headers
)
845 """Return n random bytes."""
848 class AbstractDigestAuthHandler
:
849 # Digest authentication is specified in RFC 2617.
851 # XXX The client does not inspect the Authentication-Info header
852 # in a successful response.
854 # XXX It should be possible to test this implementation against
855 # a mock server that just generates a static set of challenges.
857 # XXX qop="auth-int" supports is shaky
859 def __init__(self
, passwd
=None):
861 passwd
= HTTPPasswordMgr()
863 self
.add_password
= self
.passwd
.add_password
867 def reset_retry_count(self
):
870 def http_error_auth_reqed(self
, auth_header
, host
, req
, headers
):
871 authreq
= headers
.get(auth_header
, None)
873 # Don't fail endlessly - if we failed once, we'll probably
874 # fail a second time. Hm. Unless the Password Manager is
875 # prompting for the information. Crap. This isn't great
876 # but it's better than the current 'repeat until recursion
877 # depth exceeded' approach <wink>
878 raise HTTPError(req
.get_full_url(), 401, "digest auth failed",
883 scheme
= authreq
.split()[0]
884 if scheme
.lower() == 'digest':
885 return self
.retry_http_digest_auth(req
, authreq
)
887 def retry_http_digest_auth(self
, req
, auth
):
888 token
, challenge
= auth
.split(' ', 1)
889 chal
= parse_keqv_list(parse_http_list(challenge
))
890 auth
= self
.get_authorization(req
, chal
)
892 auth_val
= 'Digest %s' % auth
893 if req
.headers
.get(self
.auth_header
, None) == auth_val
:
895 req
.add_unredirected_header(self
.auth_header
, auth_val
)
896 resp
= self
.parent
.open(req
)
899 def get_cnonce(self
, nonce
):
900 # The cnonce-value is an opaque
901 # quoted string value provided by the client and used by both client
902 # and server to avoid chosen plaintext attacks, to provide mutual
903 # authentication, and to provide some message integrity protection.
904 # This isn't a fabulous effort, but it's probably Good Enough.
905 s
= "%s:%s:%s:" % (self
.nonce_count
, nonce
, time
.ctime())
906 b
= s
.encode("ascii") + randombytes(8)
907 dig
= hashlib
.sha1(b
).hexdigest()
910 def get_authorization(self
, req
, chal
):
912 realm
= chal
['realm']
913 nonce
= chal
['nonce']
914 qop
= chal
.get('qop')
915 algorithm
= chal
.get('algorithm', 'MD5')
916 # mod_digest doesn't send an opaque, even though it isn't
917 # supposed to be optional
918 opaque
= chal
.get('opaque', None)
922 H
, KD
= self
.get_algorithm_impls(algorithm
)
926 user
, pw
= self
.passwd
.find_user_password(realm
, req
.get_full_url())
930 # XXX not implemented yet
932 entdig
= self
.get_entity_digest(req
.get_data(), chal
)
936 A1
= "%s:%s:%s" % (user
, realm
, pw
)
937 A2
= "%s:%s" % (req
.get_method(),
938 # XXX selector: what about proxies and full urls
941 self
.nonce_count
+= 1
942 ncvalue
= '%08x' % self
.nonce_count
943 cnonce
= self
.get_cnonce(nonce
)
944 noncebit
= "%s:%s:%s:%s:%s" % (nonce
, ncvalue
, cnonce
, qop
, H(A2
))
945 respdig
= KD(H(A1
), noncebit
)
947 respdig
= KD(H(A1
), "%s:%s" % (nonce
, H(A2
)))
949 # XXX handle auth-int.
950 raise URLError("qop '%s' is not supported." % qop
)
952 # XXX should the partial digests be encoded too?
954 base
= 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
955 'response="%s"' % (user
, realm
, nonce
, req
.get_selector(),
958 base
+= ', opaque="%s"' % opaque
960 base
+= ', digest="%s"' % entdig
961 base
+= ', algorithm="%s"' % algorithm
963 base
+= ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue
, cnonce
)
966 def get_algorithm_impls(self
, algorithm
):
967 # algorithm should be case-insensitive according to RFC2617
968 algorithm
= algorithm
.upper()
969 # lambdas assume digest modules are imported at the top level
970 if algorithm
== 'MD5':
971 H
= lambda x
: hashlib
.md5(x
.encode("ascii")).hexdigest()
972 elif algorithm
== 'SHA':
973 H
= lambda x
: hashlib
.sha1(x
.encode("ascii")).hexdigest()
975 KD
= lambda s
, d
: H("%s:%s" % (s
, d
))
978 def get_entity_digest(self
, data
, chal
):
979 # XXX not implemented yet
983 class HTTPDigestAuthHandler(BaseHandler
, AbstractDigestAuthHandler
):
984 """An authentication protocol defined by RFC 2069
986 Digest authentication improves on basic authentication because it
987 does not transmit passwords in the clear.
990 auth_header
= 'Authorization'
991 handler_order
= 490 # before Basic auth
993 def http_error_401(self
, req
, fp
, code
, msg
, headers
):
994 host
= urlparse
.urlparse(req
.get_full_url())[1]
995 retry
= self
.http_error_auth_reqed('www-authenticate',
997 self
.reset_retry_count()
1001 class ProxyDigestAuthHandler(BaseHandler
, AbstractDigestAuthHandler
):
1003 auth_header
= 'Proxy-Authorization'
1004 handler_order
= 490 # before Basic auth
1006 def http_error_407(self
, req
, fp
, code
, msg
, headers
):
1007 host
= req
.get_host()
1008 retry
= self
.http_error_auth_reqed('proxy-authenticate',
1010 self
.reset_retry_count()
1013 class AbstractHTTPHandler(BaseHandler
):
1015 def __init__(self
, debuglevel
=0):
1016 self
._debuglevel
= debuglevel
1018 def set_http_debuglevel(self
, level
):
1019 self
._debuglevel
= level
1021 def do_request_(self
, request
):
1022 host
= request
.get_host()
1024 raise URLError('no host given')
1026 if request
.has_data(): # POST
1027 data
= request
.get_data()
1028 if not request
.has_header('Content-type'):
1029 request
.add_unredirected_header(
1031 'application/x-www-form-urlencoded')
1032 if not request
.has_header('Content-length'):
1033 request
.add_unredirected_header(
1034 'Content-length', '%d' % len(data
))
1036 scheme
, sel
= splittype(request
.get_selector())
1037 sel_host
, sel_path
= splithost(sel
)
1038 if not request
.has_header('Host'):
1039 request
.add_unredirected_header('Host', sel_host
or host
)
1040 for name
, value
in self
.parent
.addheaders
:
1041 name
= name
.capitalize()
1042 if not request
.has_header(name
):
1043 request
.add_unredirected_header(name
, value
)
1047 def do_open(self
, http_class
, req
):
1048 """Return an addinfourl object for the request, using http_class.
1050 http_class must implement the HTTPConnection API from httplib.
1051 The addinfourl return value is a file-like object. It also
1052 has methods and attributes including:
1053 - info(): return a mimetools.Message object for the headers
1054 - geturl(): return the original request URL
1055 - code: HTTP status code
1057 host
= req
.get_host()
1059 raise URLError('no host given')
1061 h
= http_class(host
, timeout
=req
.timeout
) # will parse host:port
1062 h
.set_debuglevel(self
._debuglevel
)
1064 headers
= dict(req
.headers
)
1065 headers
.update(req
.unredirected_hdrs
)
1066 # We want to make an HTTP/1.1 request, but the addinfourl
1067 # class isn't prepared to deal with a persistent connection.
1068 # It will try to read all remaining data from the socket,
1069 # which will block while the server waits for the next request.
1070 # So make sure the connection gets closed after the (only)
1072 headers
["Connection"] = "close"
1074 (name
.title(), val
) for name
, val
in headers
.items())
1076 h
.request(req
.get_method(), req
.get_selector(), req
.data
, headers
)
1078 except socket
.error
as err
: # XXX what error?
1081 # Pick apart the HTTPResponse object to get the addinfourl
1082 # object initialized properly.
1084 # XXX Should an HTTPResponse object really be passed to
1085 # BufferedReader? If so, we should change httplib to support
1086 # this use directly.
1088 # Add some fake methods to the reader to satisfy BufferedReader.
1089 r
.readable
= lambda: True
1090 r
.writable
= r
.seekable
= lambda: False
1091 r
._checkReadable
= lambda: True
1092 r
._checkWritable
= lambda: False
1093 fp
= io
.BufferedReader(r
)
1095 resp
= addinfourl(fp
, r
.msg
, req
.get_full_url())
1096 resp
.code
= r
.status
1101 class HTTPHandler(AbstractHTTPHandler
):
1103 def http_open(self
, req
):
1104 return self
.do_open(httplib
.HTTPConnection
, req
)
1106 http_request
= AbstractHTTPHandler
.do_request_
1108 if hasattr(httplib
, 'HTTPS'):
1109 class HTTPSHandler(AbstractHTTPHandler
):
1111 def https_open(self
, req
):
1112 return self
.do_open(httplib
.HTTPSConnection
, req
)
1114 https_request
= AbstractHTTPHandler
.do_request_
1116 class HTTPCookieProcessor(BaseHandler
):
1117 def __init__(self
, cookiejar
=None):
1119 if cookiejar
is None:
1120 cookiejar
= cookielib
.CookieJar()
1121 self
.cookiejar
= cookiejar
1123 def http_request(self
, request
):
1124 self
.cookiejar
.add_cookie_header(request
)
1127 def http_response(self
, request
, response
):
1128 self
.cookiejar
.extract_cookies(response
, request
)
1131 https_request
= http_request
1132 https_response
= http_response
1134 class UnknownHandler(BaseHandler
):
1135 def unknown_open(self
, req
):
1136 type = req
.get_type()
1137 raise URLError('unknown url type: %s' % type)
1139 def parse_keqv_list(l
):
1140 """Parse list of key=value strings where keys are not duplicated."""
1143 k
, v
= elt
.split('=', 1)
1144 if v
[0] == '"' and v
[-1] == '"':
1149 def parse_http_list(s
):
1150 """Parse lists as described by RFC 2068 Section 2.
1152 In particular, parse comma-separated lists where the elements of
1153 the list may include quoted-strings. A quoted-string could
1154 contain a comma. A non-quoted string could have quotes in the
1155 middle. Neither commas nor quotes count if they are escaped.
1156 Only double-quotes count, not single-quotes.
1161 escape
= quote
= False
1190 return [part
.strip() for part
in res
]
1192 class FileHandler(BaseHandler
):
1193 # Use local file or FTP depending on form of URL
1194 def file_open(self
, req
):
1195 url
= req
.get_selector()
1196 if url
[:2] == '//' and url
[2:3] != '/':
1198 return self
.parent
.open(req
)
1200 return self
.open_local_file(req
)
1202 # names for the localhost
1204 def get_names(self
):
1205 if FileHandler
.names
is None:
1207 FileHandler
.names
= (socket
.gethostbyname('localhost'),
1208 socket
.gethostbyname(socket
.gethostname()))
1209 except socket
.gaierror
:
1210 FileHandler
.names
= (socket
.gethostbyname('localhost'),)
1211 return FileHandler
.names
1213 # not entirely sure what the rules are here
1214 def open_local_file(self
, req
):
1217 host
= req
.get_host()
1218 file = req
.get_selector()
1219 localfile
= url2pathname(file)
1221 stats
= os
.stat(localfile
)
1222 size
= stats
.st_size
1223 modified
= email
.utils
.formatdate(stats
.st_mtime
, usegmt
=True)
1224 mtype
= mimetypes
.guess_type(file)[0]
1225 headers
= mimetools
.Message(StringIO(
1226 'Content-type: %s\nContent-length: %d\nLast-modified: %s\n' %
1227 (mtype
or 'text/plain', size
, modified
)))
1229 host
, port
= splitport(host
)
1231 (not port
and _safe_gethostbyname(host
) in self
.get_names()):
1232 return addinfourl(open(localfile
, 'rb'),
1233 headers
, 'file:'+file)
1234 except OSError as msg
:
1235 # urllib2 users shouldn't expect OSErrors coming from urlopen()
1237 raise URLError('file not on local host')
1239 def _safe_gethostbyname(host
):
1241 return socket
.gethostbyname(host
)
1242 except socket
.gaierror
:
1245 class FTPHandler(BaseHandler
):
1246 def ftp_open(self
, req
):
1249 host
= req
.get_host()
1251 raise URLError('ftp error: no host given')
1252 host
, port
= splitport(host
)
1254 port
= ftplib
.FTP_PORT
1258 # username/password handling
1259 user
, host
= splituser(host
)
1261 user
, passwd
= splitpasswd(user
)
1264 host
= unquote(host
)
1265 user
= unquote(user
or '')
1266 passwd
= unquote(passwd
or '')
1269 host
= socket
.gethostbyname(host
)
1270 except socket
.error
as msg
:
1272 path
, attrs
= splitattr(req
.get_selector())
1273 dirs
= path
.split('/')
1274 dirs
= list(map(unquote
, dirs
))
1275 dirs
, file = dirs
[:-1], dirs
[-1]
1276 if dirs
and not dirs
[0]:
1279 fw
= self
.connect_ftp(user
, passwd
, host
, port
, dirs
, req
.timeout
)
1280 type = file and 'I' or 'D'
1282 attr
, value
= splitvalue(attr
)
1283 if attr
.lower() == 'type' and \
1284 value
in ('a', 'A', 'i', 'I', 'd', 'D'):
1285 type = value
.upper()
1286 fp
, retrlen
= fw
.retrfile(file, type)
1288 mtype
= mimetypes
.guess_type(req
.get_full_url())[0]
1290 headers
+= "Content-type: %s\n" % mtype
1291 if retrlen
is not None and retrlen
>= 0:
1292 headers
+= "Content-length: %d\n" % retrlen
1293 sf
= StringIO(headers
)
1294 headers
= mimetools
.Message(sf
)
1295 return addinfourl(fp
, headers
, req
.get_full_url())
1296 except ftplib
.all_errors
as msg
:
1297 raise URLError('ftp error: %s' % msg
).with_traceback(sys
.exc_info()[2])
1299 def connect_ftp(self
, user
, passwd
, host
, port
, dirs
, timeout
):
1300 fw
= ftpwrapper(user
, passwd
, host
, port
, dirs
, timeout
)
1303 class CacheFTPHandler(FTPHandler
):
1304 # XXX would be nice to have pluggable cache strategies
1305 # XXX this stuff is definitely not thread safe
1313 def setTimeout(self
, t
):
1316 def setMaxConns(self
, m
):
1319 def connect_ftp(self
, user
, passwd
, host
, port
, dirs
, timeout
):
1320 key
= user
, host
, port
, '/'.join(dirs
), timeout
1321 if key
in self
.cache
:
1322 self
.timeout
[key
] = time
.time() + self
.delay
1324 self
.cache
[key
] = ftpwrapper(user
, passwd
, host
, port
, dirs
, timeout
)
1325 self
.timeout
[key
] = time
.time() + self
.delay
1327 return self
.cache
[key
]
1329 def check_cache(self
):
1330 # first check for old ones
1332 if self
.soonest
<= t
:
1333 for k
, v
in list(self
.timeout
.items()):
1335 self
.cache
[k
].close()
1338 self
.soonest
= min(list(self
.timeout
.values()))
1340 # then check the size
1341 if len(self
.cache
) == self
.max_conns
:
1342 for k
, v
in list(self
.timeout
.items()):
1343 if v
== self
.soonest
:
1347 self
.soonest
= min(list(self
.timeout
.values()))