IMP: Upgraded requests to 2.7.0

This commit is contained in:
evilhero 2015-05-19 13:38:36 -04:00
parent 826ef79622
commit 83847d1ef9
24 changed files with 689 additions and 333 deletions

View File

@ -1,4 +1,4 @@
Copyright 2014 Kenneth Reitz Copyright 2015 Kenneth Reitz
Licensed under the Apache License, Version 2.0 (the "License"); Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License. you may not use this file except in compliance with the License.

View File

@ -1,11 +1,13 @@
Requests: HTTP for Humans Requests: HTTP for Humans
========================= =========================
.. image:: https://badge.fury.io/py/requests.png .. image:: https://img.shields.io/pypi/v/requests.svg
:target: http://badge.fury.io/py/requests :target: https://pypi.python.org/pypi/requests
.. image:: https://img.shields.io/pypi/dm/requests.svg
:target: https://pypi.python.org/pypi/requests
.. image:: https://pypip.in/d/requests/badge.png
:target: https://crate.io/packages/requests/
Requests is an Apache2 Licensed HTTP library, written in Python, for human Requests is an Apache2 Licensed HTTP library, written in Python, for human

View File

@ -6,7 +6,7 @@
# / # /
""" """
requests HTTP library Requests HTTP library
~~~~~~~~~~~~~~~~~~~~~ ~~~~~~~~~~~~~~~~~~~~~
Requests is an HTTP library, written in Python, for human beings. Basic GET Requests is an HTTP library, written in Python, for human beings. Basic GET
@ -36,17 +36,17 @@ usage:
The other HTTP methods are supported - see `requests.api`. Full documentation The other HTTP methods are supported - see `requests.api`. Full documentation
is at <http://python-requests.org>. is at <http://python-requests.org>.
:copyright: (c) 2014 by Kenneth Reitz. :copyright: (c) 2015 by Kenneth Reitz.
:license: Apache 2.0, see LICENSE for more details. :license: Apache 2.0, see LICENSE for more details.
""" """
__title__ = 'requests' __title__ = 'requests'
__version__ = '2.5.1' __version__ = '2.7.0'
__build__ = 0x020501 __build__ = 0x020700
__author__ = 'Kenneth Reitz' __author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0' __license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2014 Kenneth Reitz' __copyright__ = 'Copyright 2015 Kenneth Reitz'
# Attempt to enable urllib3's SNI support, if possible # Attempt to enable urllib3's SNI support, if possible
try: try:

View File

@ -11,10 +11,10 @@ and maintain connections.
import socket import socket
from .models import Response from .models import Response
from .packages.urllib3 import Retry
from .packages.urllib3.poolmanager import PoolManager, proxy_from_url from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
from .packages.urllib3.response import HTTPResponse from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.util import Timeout as TimeoutSauce from .packages.urllib3.util import Timeout as TimeoutSauce
from .packages.urllib3.util.retry import Retry
from .compat import urlparse, basestring from .compat import urlparse, basestring
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers, from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
prepend_scheme_if_needed, get_auth_from_url, urldefragauth) prepend_scheme_if_needed, get_auth_from_url, urldefragauth)

View File

@ -16,7 +16,6 @@ from . import sessions
def request(method, url, **kwargs): def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`. """Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
:param method: method for the new :class:`Request` object. :param method: method for the new :class:`Request` object.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
@ -37,6 +36,8 @@ def request(method, url, **kwargs):
:param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided. :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
:param stream: (optional) if ``False``, the response content will be immediately downloaded. :param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair. :param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
:rtype: requests.Response
Usage:: Usage::
@ -54,22 +55,27 @@ def request(method, url, **kwargs):
return response return response
def get(url, **kwargs): def get(url, params=None, **kwargs):
"""Sends a GET request. Returns :class:`Response` object. """Sends a GET request.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('allow_redirects', True)
return request('get', url, **kwargs) return request('get', url, params=params, **kwargs)
def options(url, **kwargs): def options(url, **kwargs):
"""Sends a OPTIONS request. Returns :class:`Response` object. """Sends a OPTIONS request.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
""" """
kwargs.setdefault('allow_redirects', True) kwargs.setdefault('allow_redirects', True)
@ -77,10 +83,12 @@ def options(url, **kwargs):
def head(url, **kwargs): def head(url, **kwargs):
"""Sends a HEAD request. Returns :class:`Response` object. """Sends a HEAD request.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
""" """
kwargs.setdefault('allow_redirects', False) kwargs.setdefault('allow_redirects', False)
@ -88,44 +96,52 @@ def head(url, **kwargs):
def post(url, data=None, json=None, **kwargs): def post(url, data=None, json=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object. """Sends a POST request.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param json: (optional) json data to send in the body of the :class:`Request`. :param json: (optional) json data to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
""" """
return request('post', url, data=data, json=json, **kwargs) return request('post', url, data=data, json=json, **kwargs)
def put(url, data=None, **kwargs): def put(url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object. """Sends a PUT request.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
""" """
return request('put', url, data=data, **kwargs) return request('put', url, data=data, **kwargs)
def patch(url, data=None, **kwargs): def patch(url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object. """Sends a PATCH request.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`. :param data: (optional) Dictionary, bytes, or file-like object to send in the body of the :class:`Request`.
:param \*\*kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
""" """
return request('patch', url, data=data, **kwargs) return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs): def delete(url, **kwargs):
"""Sends a DELETE request. Returns :class:`Response` object. """Sends a DELETE request.
:param url: URL for the new :class:`Request` object. :param url: URL for the new :class:`Request` object.
:param \*\*kwargs: Optional arguments that ``request`` takes. :param \*\*kwargs: Optional arguments that ``request`` takes.
:return: :class:`Response <Response>` object
:rtype: requests.Response
""" """
return request('delete', url, **kwargs) return request('delete', url, **kwargs)

View File

@ -103,7 +103,8 @@ class HTTPDigestAuth(AuthBase):
# XXX not implemented yet # XXX not implemented yet
entdig = None entdig = None
p_parsed = urlparse(url) p_parsed = urlparse(url)
path = p_parsed.path #: path is request-uri defined in RFC 2616 which should not be empty
path = p_parsed.path or "/"
if p_parsed.query: if p_parsed.query:
path += '?' + p_parsed.query path += '?' + p_parsed.query
@ -124,13 +125,15 @@ class HTTPDigestAuth(AuthBase):
s += os.urandom(8) s += os.urandom(8)
cnonce = (hashlib.sha1(s).hexdigest()[:16]) cnonce = (hashlib.sha1(s).hexdigest()[:16])
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, HA2)
if _algorithm == 'MD5-SESS': if _algorithm == 'MD5-SESS':
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce)) HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
if qop is None: if qop is None:
respdig = KD(HA1, "%s:%s" % (nonce, HA2)) respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == 'auth' or 'auth' in qop.split(','): elif qop == 'auth' or 'auth' in qop.split(','):
noncebit = "%s:%s:%s:%s:%s" % (
nonce, ncvalue, cnonce, 'auth', HA2
)
respdig = KD(HA1, noncebit) respdig = KD(HA1, noncebit)
else: else:
# XXX handle auth-int. # XXX handle auth-int.
@ -176,7 +179,7 @@ class HTTPDigestAuth(AuthBase):
# Consume content and release the original connection # Consume content and release the original connection
# to allow our new request to reuse the same one. # to allow our new request to reuse the same one.
r.content r.content
r.raw.release_conn() r.close()
prep = r.request.copy() prep = r.request.copy()
extract_cookies_to_jar(prep._cookies, r.request, r.raw) extract_cookies_to_jar(prep._cookies, r.request, r.raw)
prep.prepare_cookies(prep._cookies) prep.prepare_cookies(prep._cookies)

View File

@ -21,58 +21,6 @@ is_py2 = (_ver[0] == 2)
#: Python 3.x? #: Python 3.x?
is_py3 = (_ver[0] == 3) is_py3 = (_ver[0] == 3)
#: Python 3.0.x
is_py30 = (is_py3 and _ver[1] == 0)
#: Python 3.1.x
is_py31 = (is_py3 and _ver[1] == 1)
#: Python 3.2.x
is_py32 = (is_py3 and _ver[1] == 2)
#: Python 3.3.x
is_py33 = (is_py3 and _ver[1] == 3)
#: Python 3.4.x
is_py34 = (is_py3 and _ver[1] == 4)
#: Python 2.7.x
is_py27 = (is_py2 and _ver[1] == 7)
#: Python 2.6.x
is_py26 = (is_py2 and _ver[1] == 6)
#: Python 2.5.x
is_py25 = (is_py2 and _ver[1] == 5)
#: Python 2.4.x
is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
# ---------
# Platforms
# ---------
# Syntax sugar.
_ver = sys.version.lower()
is_pypy = ('pypy' in _ver)
is_jython = ('jython' in _ver)
is_ironpython = ('iron' in _ver)
# Assume CPython, if nothing else.
is_cpython = not any((is_pypy, is_jython, is_ironpython))
# Windows-based system.
is_windows = 'win32' in str(sys.platform).lower()
# Standard Linux 2+ system.
is_linux = ('linux' in str(sys.platform).lower())
is_osx = ('darwin' in str(sys.platform).lower())
is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
try: try:
import simplejson as json import simplejson as json
except (ImportError, SyntaxError): except (ImportError, SyntaxError):
@ -99,7 +47,6 @@ if is_py2:
basestring = basestring basestring = basestring
numeric_types = (int, long, float) numeric_types = (int, long, float)
elif is_py3: elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass from urllib.request import parse_http_list, getproxies, proxy_bypass

View File

@ -6,6 +6,7 @@ Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports. requests.utils imports from here, so be careful with imports.
""" """
import copy
import time import time
import collections import collections
from .compat import cookielib, urlparse, urlunparse, Morsel from .compat import cookielib, urlparse, urlunparse, Morsel
@ -157,26 +158,28 @@ class CookieConflictError(RuntimeError):
class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping): class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict interface. """Compatibility class; is a cookielib.CookieJar, but exposes a dict
interface.
This is the CookieJar we create by default for requests and sessions that This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations. session.cookies to support dict operations.
Don't use the dict interface internally; it's just for compatibility with Requests does not use the dict interface internally; it's just for
with external client code. All `requests` code should work out of the box compatibility with external client code. All requests code should work
with externally provided instances of CookieJar, e.g., LWPCookieJar and out of the box with externally provided instances of ``CookieJar``, e.g.
FileCookieJar. ``LWPCookieJar`` and ``FileCookieJar``.
Caution: dictionary operations that are normally O(1) may be O(n).
Unlike a regular CookieJar, this class is pickleable. Unlike a regular CookieJar, this class is pickleable.
"""
.. warning:: dictionary operations that are normally O(1) may be O(n).
"""
def get(self, name, default=None, domain=None, path=None): def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in """Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over order to resolve naming collisions from using one cookie jar over
multiple domains. Caution: operation is O(n), not O(1).""" multiple domains.
.. warning:: operation is O(n), not O(1)."""
try: try:
return self._find_no_duplicates(name, domain, path) return self._find_no_duplicates(name, domain, path)
except KeyError: except KeyError:
@ -199,37 +202,38 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return c return c
def iterkeys(self): def iterkeys(self):
"""Dict-like iterkeys() that returns an iterator of names of cookies from the jar. """Dict-like iterkeys() that returns an iterator of names of cookies
See itervalues() and iteritems().""" from the jar. See itervalues() and iteritems()."""
for cookie in iter(self): for cookie in iter(self):
yield cookie.name yield cookie.name
def keys(self): def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the jar. """Dict-like keys() that returns a list of names of cookies from the
See values() and items().""" jar. See values() and items()."""
return list(self.iterkeys()) return list(self.iterkeys())
def itervalues(self): def itervalues(self):
"""Dict-like itervalues() that returns an iterator of values of cookies from the jar. """Dict-like itervalues() that returns an iterator of values of cookies
See iterkeys() and iteritems().""" from the jar. See iterkeys() and iteritems()."""
for cookie in iter(self): for cookie in iter(self):
yield cookie.value yield cookie.value
def values(self): def values(self):
"""Dict-like values() that returns a list of values of cookies from the jar. """Dict-like values() that returns a list of values of cookies from the
See keys() and items().""" jar. See keys() and items()."""
return list(self.itervalues()) return list(self.itervalues())
def iteritems(self): def iteritems(self):
"""Dict-like iteritems() that returns an iterator of name-value tuples from the jar. """Dict-like iteritems() that returns an iterator of name-value tuples
See iterkeys() and itervalues().""" from the jar. See iterkeys() and itervalues()."""
for cookie in iter(self): for cookie in iter(self):
yield cookie.name, cookie.value yield cookie.name, cookie.value
def items(self): def items(self):
"""Dict-like items() that returns a list of name-value tuples from the jar. """Dict-like items() that returns a list of name-value tuples from the
See keys() and values(). Allows client-code to call "dict(RequestsCookieJar) jar. See keys() and values(). Allows client-code to call
and get a vanilla python dict of key value pairs.""" ``dict(RequestsCookieJar)`` and get a vanilla python dict of key value
pairs."""
return list(self.iteritems()) return list(self.iteritems())
def list_domains(self): def list_domains(self):
@ -259,8 +263,9 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return False # there is only one domain in jar return False # there is only one domain in jar
def get_dict(self, domain=None, path=None): def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain old """Takes as an argument an optional domain and path and returns a plain
Python dict of name-value pairs of cookies that meet the requirements.""" old Python dict of name-value pairs of cookies that meet the
requirements."""
dictionary = {} dictionary = {}
for cookie in iter(self): for cookie in iter(self):
if (domain is None or cookie.domain == domain) and (path is None if (domain is None or cookie.domain == domain) and (path is None
@ -269,21 +274,24 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return dictionary return dictionary
def __getitem__(self, name): def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws exception """Dict-like __getitem__() for compatibility with client code. Throws
if there are more than one cookie with name. In that case, use the more exception if there are more than one cookie with name. In that case,
explicit get() method instead. Caution: operation is O(n), not O(1).""" use the more explicit get() method instead.
.. warning:: operation is O(n), not O(1)."""
return self._find_no_duplicates(name) return self._find_no_duplicates(name)
def __setitem__(self, name, value): def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws exception """Dict-like __setitem__ for compatibility with client code. Throws
if there is already a cookie of that name in the jar. In that case, use the more exception if there is already a cookie of that name in the jar. In that
explicit set() method instead.""" case, use the more explicit set() method instead."""
self.set(name, value) self.set(name, value)
def __delitem__(self, name): def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name().""" """Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
``remove_cookie_by_name()``."""
remove_cookie_by_name(self, name) remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs): def set_cookie(self, cookie, *args, **kwargs):
@ -295,15 +303,16 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Updates this jar with cookies from another CookieJar or dict-like""" """Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar): if isinstance(other, cookielib.CookieJar):
for cookie in other: for cookie in other:
self.set_cookie(cookie) self.set_cookie(copy.copy(cookie))
else: else:
super(RequestsCookieJar, self).update(other) super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None): def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. Takes as args name """Requests uses this method internally to get cookie values. Takes as
and optional domain and path. Returns a cookie.value. If there are conflicting cookies, args name and optional domain and path. Returns a cookie.value. If
_find arbitrarily chooses one. See _find_no_duplicates if you want an exception thrown there are conflicting cookies, _find arbitrarily chooses one. See
if there are conflicting cookies.""" _find_no_duplicates if you want an exception thrown if there are
conflicting cookies."""
for cookie in iter(self): for cookie in iter(self):
if cookie.name == name: if cookie.name == name:
if domain is None or cookie.domain == domain: if domain is None or cookie.domain == domain:
@ -313,10 +322,11 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path)) raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None): def _find_no_duplicates(self, name, domain=None, path=None):
"""__get_item__ and get call _find_no_duplicates -- never used in Requests internally. """Both ``__get_item__`` and ``get`` call this function: it's never
Takes as args name and optional domain and path. Returns a cookie.value. used elsewhere in Requests. Takes as args name and optional domain and
Throws KeyError if cookie is not found and CookieConflictError if there are path. Returns a cookie.value. Throws KeyError if cookie is not found
multiple cookies that match name and optionally domain and path.""" and CookieConflictError if there are multiple cookies that match name
and optionally domain and path."""
toReturn = None toReturn = None
for cookie in iter(self): for cookie in iter(self):
if cookie.name == name: if cookie.name == name:
@ -350,6 +360,21 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
return new_cj return new_cj
def _copy_cookie_jar(jar):
if jar is None:
return None
if hasattr(jar, 'copy'):
# We're dealing with an instane of RequestsCookieJar
return jar.copy()
# We're dealing with a generic CookieJar instance
new_jar = copy.copy(jar)
new_jar.clear()
for cookie in jar:
new_jar.set_cookie(copy.copy(cookie))
return new_jar
def create_cookie(name, value, **kwargs): def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters. """Make a cookie from underspecified parameters.
@ -390,11 +415,14 @@ def morsel_to_cookie(morsel):
expires = None expires = None
if morsel['max-age']: if morsel['max-age']:
expires = time.time() + morsel['max-age'] try:
expires = int(time.time() + int(morsel['max-age']))
except ValueError:
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
elif morsel['expires']: elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT' time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = time.mktime( expires = int(time.mktime(
time.strptime(morsel['expires'], time_template)) - time.timezone time.strptime(morsel['expires'], time_template)) - time.timezone)
return create_cookie( return create_cookie(
comment=morsel['comment'], comment=morsel['comment'],
comment_url=bool(morsel['comment']), comment_url=bool(morsel['comment']),
@ -440,7 +468,7 @@ def merge_cookies(cookiejar, cookies):
""" """
if not isinstance(cookiejar, cookielib.CookieJar): if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar') raise ValueError('You can only merge into CookieJar')
if isinstance(cookies, dict): if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict( cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False) cookies, cookiejar=cookiejar, overwrite=False)

View File

@ -15,7 +15,7 @@ from .hooks import default_hooks
from .structures import CaseInsensitiveDict from .structures import CaseInsensitiveDict
from .auth import HTTPBasicAuth from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header from .cookies import cookiejar_from_dict, get_cookie_header, _copy_cookie_jar
from .packages.urllib3.fields import RequestField from .packages.urllib3.fields import RequestField
from .packages.urllib3.filepost import encode_multipart_formdata from .packages.urllib3.filepost import encode_multipart_formdata
from .packages.urllib3.util import parse_url from .packages.urllib3.util import parse_url
@ -30,7 +30,8 @@ from .utils import (
iter_slices, guess_json_utf, super_len, to_native_string) iter_slices, guess_json_utf, super_len, to_native_string)
from .compat import ( from .compat import (
cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO, cookielib, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring) is_py2, chardet, builtin_str, basestring)
from .compat import json as complexjson
from .status_codes import codes from .status_codes import codes
#: The set of HTTP status codes that indicate an automatically #: The set of HTTP status codes that indicate an automatically
@ -42,12 +43,11 @@ REDIRECT_STATI = (
codes.temporary_redirect, # 307 codes.temporary_redirect, # 307
codes.permanent_redirect, # 308 codes.permanent_redirect, # 308
) )
DEFAULT_REDIRECT_LIMIT = 30 DEFAULT_REDIRECT_LIMIT = 30
CONTENT_CHUNK_SIZE = 10 * 1024 CONTENT_CHUNK_SIZE = 10 * 1024
ITER_CHUNK_SIZE = 512 ITER_CHUNK_SIZE = 512
json_dumps = json.dumps
class RequestEncodingMixin(object): class RequestEncodingMixin(object):
@property @property
@ -143,13 +143,13 @@ class RequestEncodingMixin(object):
else: else:
fn = guess_filename(v) or k fn = guess_filename(v) or k
fp = v fp = v
if isinstance(fp, str):
fp = StringIO(fp)
if isinstance(fp, bytes):
fp = BytesIO(fp)
rf = RequestField(name=k, data=fp.read(), if isinstance(fp, (str, bytes, bytearray)):
filename=fn, headers=fh) fdata = fp
else:
fdata = fp.read()
rf = RequestField(name=k, data=fdata, filename=fn, headers=fh)
rf.make_multipart(content_type=ft) rf.make_multipart(content_type=ft)
new_fields.append(rf) new_fields.append(rf)
@ -206,17 +206,8 @@ class Request(RequestHooksMixin):
<PreparedRequest [GET]> <PreparedRequest [GET]>
""" """
def __init__(self, def __init__(self, method=None, url=None, headers=None, files=None,
method=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
url=None,
headers=None,
files=None,
data=None,
params=None,
auth=None,
cookies=None,
hooks=None,
json=None):
# Default empty dicts for dict params. # Default empty dicts for dict params.
data = [] if data is None else data data = [] if data is None else data
@ -295,8 +286,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.hooks = default_hooks() self.hooks = default_hooks()
def prepare(self, method=None, url=None, headers=None, files=None, def prepare(self, method=None, url=None, headers=None, files=None,
data=None, params=None, auth=None, cookies=None, hooks=None, data=None, params=None, auth=None, cookies=None, hooks=None, json=None):
json=None):
"""Prepares the entire request with the given parameters.""" """Prepares the entire request with the given parameters."""
self.prepare_method(method) self.prepare_method(method)
@ -305,6 +295,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_cookies(cookies) self.prepare_cookies(cookies)
self.prepare_body(data, files, json) self.prepare_body(data, files, json)
self.prepare_auth(auth, url) self.prepare_auth(auth, url)
# Note that prepare_auth must be last to enable authentication schemes # Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request. # such as OAuth to work on a fully prepared request.
@ -319,7 +310,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
p.method = self.method p.method = self.method
p.url = self.url p.url = self.url
p.headers = self.headers.copy() if self.headers is not None else None p.headers = self.headers.copy() if self.headers is not None else None
p._cookies = self._cookies.copy() if self._cookies is not None else None p._cookies = _copy_cookie_jar(self._cookies)
p.body = self.body p.body = self.body
p.hooks = self.hooks p.hooks = self.hooks
return p return p
@ -356,8 +347,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise InvalidURL(*e.args) raise InvalidURL(*e.args)
if not scheme: if not scheme:
raise MissingSchema("Invalid URL {0!r}: No schema supplied. " error = ("Invalid URL {0!r}: No schema supplied. Perhaps you meant http://{0}?")
"Perhaps you meant http://{0}?".format(url)) error = error.format(to_native_string(url, 'utf8'))
raise MissingSchema(error)
if not host: if not host:
raise InvalidURL("Invalid URL %r: No host supplied" % url) raise InvalidURL("Invalid URL %r: No host supplied" % url)
@ -423,7 +416,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if json is not None: if json is not None:
content_type = 'application/json' content_type = 'application/json'
body = json_dumps(json) body = complexjson.dumps(json)
is_stream = all([ is_stream = all([
hasattr(data, '__iter__'), hasattr(data, '__iter__'),
@ -500,7 +493,15 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.prepare_content_length(self.body) self.prepare_content_length(self.body)
def prepare_cookies(self, cookies): def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data.""" """Prepares the given HTTP cookie data.
This function eventually generates a ``Cookie`` header from the
given cookies using cookielib. Due to cookielib's design, the header
will not be regenerated if it already exists, meaning this function
can only be called once for the life of the
:class:`PreparedRequest <PreparedRequest>` object. Any subsequent calls
to ``prepare_cookies`` will have no actual effect, unless the "Cookie"
header is removed beforehand."""
if isinstance(cookies, cookielib.CookieJar): if isinstance(cookies, cookielib.CookieJar):
self._cookies = cookies self._cookies = cookies
@ -513,6 +514,10 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
def prepare_hooks(self, hooks): def prepare_hooks(self, hooks):
"""Prepares the given hooks.""" """Prepares the given hooks."""
# hooks can be passed as None to the prepare method and to this
# method. To prevent iterating over None, simply use an empty list
# if hooks is False-y
hooks = hooks or []
for event in hooks: for event in hooks:
self.register_hook(event, hooks[event]) self.register_hook(event, hooks[event])
@ -523,16 +528,8 @@ class Response(object):
""" """
__attrs__ = [ __attrs__ = [
'_content', '_content', 'status_code', 'headers', 'url', 'history',
'status_code', 'encoding', 'reason', 'cookies', 'elapsed', 'request'
'headers',
'url',
'history',
'encoding',
'reason',
'cookies',
'elapsed',
'request',
] ]
def __init__(self): def __init__(self):
@ -572,7 +569,11 @@ class Response(object):
self.cookies = cookiejar_from_dict({}) self.cookies = cookiejar_from_dict({})
#: The amount of time elapsed between sending the request #: The amount of time elapsed between sending the request
#: and the arrival of the response (as a timedelta) #: and the arrival of the response (as a timedelta).
#: This property specifically measures the time taken between sending
#: the first byte of the request and finishing parsing the headers. It
#: is therefore unaffected by consuming the response content or the
#: value of the ``stream`` keyword argument.
self.elapsed = datetime.timedelta(0) self.elapsed = datetime.timedelta(0)
#: The :class:`PreparedRequest <PreparedRequest>` object to which this #: The :class:`PreparedRequest <PreparedRequest>` object to which this
@ -648,9 +649,10 @@ class Response(object):
If decode_unicode is True, content will be decoded using the best If decode_unicode is True, content will be decoded using the best
available encoding based on the response. available encoding based on the response.
""" """
def generate(): def generate():
try: # Special case for urllib3.
# Special case for urllib3. if hasattr(self.raw, 'stream'):
try: try:
for chunk in self.raw.stream(chunk_size, decode_content=True): for chunk in self.raw.stream(chunk_size, decode_content=True):
yield chunk yield chunk
@ -660,7 +662,7 @@ class Response(object):
raise ContentDecodingError(e) raise ContentDecodingError(e)
except ReadTimeoutError as e: except ReadTimeoutError as e:
raise ConnectionError(e) raise ConnectionError(e)
except AttributeError: else:
# Standard file-like object. # Standard file-like object.
while True: while True:
chunk = self.raw.read(chunk_size) chunk = self.raw.read(chunk_size)
@ -688,6 +690,8 @@ class Response(object):
"""Iterates over the response data, one line at a time. When """Iterates over the response data, one line at a time. When
stream=True is set on the request, this avoids reading the stream=True is set on the request, this avoids reading the
content at once into memory for large responses. content at once into memory for large responses.
.. note:: This method is not reentrant safe.
""" """
pending = None pending = None
@ -789,14 +793,16 @@ class Response(object):
encoding = guess_json_utf(self.content) encoding = guess_json_utf(self.content)
if encoding is not None: if encoding is not None:
try: try:
return json.loads(self.content.decode(encoding), **kwargs) return complexjson.loads(
self.content.decode(encoding), **kwargs
)
except UnicodeDecodeError: except UnicodeDecodeError:
# Wrong UTF codec detected; usually because it's not UTF-8 # Wrong UTF codec detected; usually because it's not UTF-8
# but some other 8-bit codec. This is an RFC violation, # but some other 8-bit codec. This is an RFC violation,
# and the server didn't bother to tell us what codec *was* # and the server didn't bother to tell us what codec *was*
# used. # used.
pass pass
return json.loads(self.text, **kwargs) return complexjson.loads(self.text, **kwargs)
@property @property
def links(self): def links(self):

View File

@ -0,0 +1,8 @@
If you are planning to submit a pull request to requests with any changes in
this library do not go any further. These are independent libraries which we
vendor into requests. Any changes necessary to these libraries must be made in
them and submitted as separate pull requests to those libraries.
urllib3 pull requests go here: https://github.com/shazow/urllib3
chardet pull requests go here: https://github.com/chardet/chardet

View File

@ -4,7 +4,7 @@ urllib3 - Thread-safe connection pooling and re-using.
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)' __author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT' __license__ = 'MIT'
__version__ = 'dev' __version__ = '1.10.4'
from .connectionpool import ( from .connectionpool import (
@ -55,9 +55,12 @@ def add_stderr_logger(level=logging.DEBUG):
del NullHandler del NullHandler
# Set security warning to only go off once by default.
import warnings import warnings
warnings.simplefilter('always', exceptions.SecurityWarning) # SecurityWarning's always go off by default.
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
append=True)
def disable_warnings(category=exceptions.HTTPWarning): def disable_warnings(category=exceptions.HTTPWarning):
""" """

View File

@ -1,7 +1,7 @@
from collections import Mapping, MutableMapping from collections import Mapping, MutableMapping
try: try:
from threading import RLock from threading import RLock
except ImportError: # Platform-specific: No threads available except ImportError: # Platform-specific: No threads available
class RLock: class RLock:
def __enter__(self): def __enter__(self):
pass pass
@ -10,11 +10,11 @@ except ImportError: # Platform-specific: No threads available
pass pass
try: # Python 2.7+ try: # Python 2.7+
from collections import OrderedDict from collections import OrderedDict
except ImportError: except ImportError:
from .packages.ordered_dict import OrderedDict from .packages.ordered_dict import OrderedDict
from .packages.six import iterkeys, itervalues from .packages.six import iterkeys, itervalues, PY3
__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict'] __all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
@ -97,7 +97,14 @@ class RecentlyUsedContainer(MutableMapping):
return list(iterkeys(self._container)) return list(iterkeys(self._container))
class HTTPHeaderDict(MutableMapping): _dict_setitem = dict.__setitem__
_dict_getitem = dict.__getitem__
_dict_delitem = dict.__delitem__
_dict_contains = dict.__contains__
_dict_setdefault = dict.setdefault
class HTTPHeaderDict(dict):
""" """
:param headers: :param headers:
An iterable of field-value pairs. Must not contain multiple field names An iterable of field-value pairs. Must not contain multiple field names
@ -129,25 +136,75 @@ class HTTPHeaderDict(MutableMapping):
'foo=bar, baz=quxx' 'foo=bar, baz=quxx'
>>> headers['Content-Length'] >>> headers['Content-Length']
'7' '7'
If you want to access the raw headers with their original casing
for debugging purposes you can access the private ``._data`` attribute
which is a normal python ``dict`` that maps the case-insensitive key to a
list of tuples stored as (case-sensitive-original-name, value). Using the
structure from above as our example:
>>> headers._data
{'set-cookie': [('Set-Cookie', 'foo=bar'), ('set-cookie', 'baz=quxx')],
'content-length': [('content-length', '7')]}
""" """
def __init__(self, headers=None, **kwargs): def __init__(self, headers=None, **kwargs):
self._data = {} dict.__init__(self)
if headers is None: if headers is not None:
headers = {} if isinstance(headers, HTTPHeaderDict):
self.update(headers, **kwargs) self._copy_from(headers)
else:
self.extend(headers)
if kwargs:
self.extend(kwargs)
def add(self, key, value): def __setitem__(self, key, val):
return _dict_setitem(self, key.lower(), (key, val))
def __getitem__(self, key):
val = _dict_getitem(self, key.lower())
return ', '.join(val[1:])
def __delitem__(self, key):
return _dict_delitem(self, key.lower())
def __contains__(self, key):
return _dict_contains(self, key.lower())
def __eq__(self, other):
if not isinstance(other, Mapping) and not hasattr(other, 'keys'):
return False
if not isinstance(other, type(self)):
other = type(self)(other)
return dict((k1, self[k1]) for k1 in self) == dict((k2, other[k2]) for k2 in other)
def __ne__(self, other):
return not self.__eq__(other)
values = MutableMapping.values
get = MutableMapping.get
update = MutableMapping.update
if not PY3: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
__marker = object()
def pop(self, key, default=__marker):
'''D.pop(k[,d]) -> v, remove specified key and return the corresponding value.
If key is not found, d is returned if given, otherwise KeyError is raised.
'''
# Using the MutableMapping function directly fails due to the private marker.
# Using ordinary dict.pop would expose the internal structures.
# So let's reinvent the wheel.
try:
value = self[key]
except KeyError:
if default is self.__marker:
raise
return default
else:
del self[key]
return value
def discard(self, key):
try:
del self[key]
except KeyError:
pass
def add(self, key, val):
"""Adds a (name, value) pair, doesn't overwrite the value if it already """Adds a (name, value) pair, doesn't overwrite the value if it already
exists. exists.
@ -156,43 +213,111 @@ class HTTPHeaderDict(MutableMapping):
>>> headers['foo'] >>> headers['foo']
'bar, baz' 'bar, baz'
""" """
self._data.setdefault(key.lower(), []).append((key, value)) key_lower = key.lower()
new_vals = key, val
# Keep the common case aka no item present as fast as possible
vals = _dict_setdefault(self, key_lower, new_vals)
if new_vals is not vals:
# new_vals was not inserted, as there was a previous one
if isinstance(vals, list):
# If already several items got inserted, we have a list
vals.append(val)
else:
# vals should be a tuple then, i.e. only one item so far
# Need to convert the tuple to list for further extension
_dict_setitem(self, key_lower, [vals[0], vals[1], val])
def extend(self, *args, **kwargs):
"""Generic import function for any type of header-like object.
Adapted version of MutableMapping.update in order to insert items
with self.add instead of self.__setitem__
"""
if len(args) > 1:
raise TypeError("extend() takes at most 1 positional "
"arguments ({} given)".format(len(args)))
other = args[0] if len(args) >= 1 else ()
if isinstance(other, HTTPHeaderDict):
for key, val in other.iteritems():
self.add(key, val)
elif isinstance(other, Mapping):
for key in other:
self.add(key, other[key])
elif hasattr(other, "keys"):
for key in other.keys():
self.add(key, other[key])
else:
for key, value in other:
self.add(key, value)
for key, value in kwargs.items():
self.add(key, value)
def getlist(self, key): def getlist(self, key):
"""Returns a list of all the values for the named field. Returns an """Returns a list of all the values for the named field. Returns an
empty list if the key doesn't exist.""" empty list if the key doesn't exist."""
return self[key].split(', ') if key in self else [] try:
vals = _dict_getitem(self, key.lower())
except KeyError:
return []
else:
if isinstance(vals, tuple):
return [vals[1]]
else:
return vals[1:]
def copy(self): # Backwards compatibility for httplib
h = HTTPHeaderDict() getheaders = getlist
for key in self._data: getallmatchingheaders = getlist
for rawkey, value in self._data[key]: iget = getlist
h.add(rawkey, value)
return h
def __eq__(self, other):
if not isinstance(other, Mapping):
return False
other = HTTPHeaderDict(other)
return dict((k1, self[k1]) for k1 in self._data) == \
dict((k2, other[k2]) for k2 in other._data)
def __getitem__(self, key):
values = self._data[key.lower()]
return ', '.join(value[1] for value in values)
def __setitem__(self, key, value):
self._data[key.lower()] = [(key, value)]
def __delitem__(self, key):
del self._data[key.lower()]
def __len__(self):
return len(self._data)
def __iter__(self):
for headers in itervalues(self._data):
yield headers[0][0]
def __repr__(self): def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, dict(self.items())) return "%s(%s)" % (type(self).__name__, dict(self.itermerged()))
def _copy_from(self, other):
for key in other:
val = _dict_getitem(other, key)
if isinstance(val, list):
# Don't need to convert tuples
val = list(val)
_dict_setitem(self, key, val)
def copy(self):
clone = type(self)()
clone._copy_from(self)
return clone
def iteritems(self):
"""Iterate over all header lines, including duplicate ones."""
for key in self:
vals = _dict_getitem(self, key)
for val in vals[1:]:
yield vals[0], val
def itermerged(self):
"""Iterate over all headers, merging duplicate ones together."""
for key in self:
val = _dict_getitem(self, key)
yield val[0], ', '.join(val[1:])
def items(self):
return list(self.iteritems())
@classmethod
def from_httplib(cls, message): # Python 2
"""Read headers from a Python 2 httplib message object."""
# python2.7 does not expose a proper API for exporting multiheaders
# efficiently. This function re-reads raw lines from the message
# object and extracts the multiheaders properly.
headers = []
for line in message.headers:
if line.startswith((' ', '\t')):
key, value = headers[-1]
headers[-1] = (key, value + '\r\n' + line.rstrip())
continue
key, value = line.split(':', 1)
headers.append((key, value.strip()))
return cls(headers)

View File

@ -260,3 +260,5 @@ if ssl:
# Make a copy for testing. # Make a copy for testing.
UnverifiedHTTPSConnection = HTTPSConnection UnverifiedHTTPSConnection = HTTPSConnection
HTTPSConnection = VerifiedHTTPSConnection HTTPSConnection = VerifiedHTTPSConnection
else:
HTTPSConnection = DummyConnection

View File

@ -72,6 +72,21 @@ class ConnectionPool(object):
return '%s(host=%r, port=%r)' % (type(self).__name__, return '%s(host=%r, port=%r)' % (type(self).__name__,
self.host, self.port) self.host, self.port)
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
# Return False to re-raise any potential exceptions
return False
def close():
"""
Close all pooled connections and disable the pool.
"""
pass
# This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252 # This is taken from http://hg.python.org/cpython/file/7aaba721ebc0/Lib/socket.py#l252
_blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK]) _blocking_errnos = set([errno.EAGAIN, errno.EWOULDBLOCK])
@ -266,6 +281,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
""" """
pass pass
def _prepare_proxy(self, conn):
# Nothing to do for HTTP connections.
pass
def _get_timeout(self, timeout): def _get_timeout(self, timeout):
""" Helper that always returns a :class:`urllib3.util.Timeout` """ """ Helper that always returns a :class:`urllib3.util.Timeout` """
if timeout is _Default: if timeout is _Default:
@ -349,7 +368,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# Receive the response from the server # Receive the response from the server
try: try:
try: # Python 2.7+, use buffering of HTTP responses try: # Python 2.7, use buffering of HTTP responses
httplib_response = conn.getresponse(buffering=True) httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older except TypeError: # Python 2.6 and older
httplib_response = conn.getresponse() httplib_response = conn.getresponse()
@ -510,11 +529,18 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
try: try:
# Request a connection from the queue. # Request a connection from the queue.
timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout) conn = self._get_conn(timeout=pool_timeout)
conn.timeout = timeout_obj.connect_timeout
is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
if is_new_proxy_conn:
self._prepare_proxy(conn)
# Make the request on the httplib connection object. # Make the request on the httplib connection object.
httplib_response = self._make_request(conn, method, url, httplib_response = self._make_request(conn, method, url,
timeout=timeout, timeout=timeout_obj,
body=body, headers=headers) body=body, headers=headers)
# If we're going to release the connection in ``finally:``, then # If we're going to release the connection in ``finally:``, then
@ -547,6 +573,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn = None conn = None
raise SSLError(e) raise SSLError(e)
except SSLError:
# Treat SSLError separately from BaseSSLError to preserve
# traceback.
if conn:
conn.close()
conn = None
raise
except (TimeoutError, HTTPException, SocketError, ConnectionError) as e: except (TimeoutError, HTTPException, SocketError, ConnectionError) as e:
if conn: if conn:
# Discard the connection for these exceptions. It will be # Discard the connection for these exceptions. It will be
@ -554,14 +588,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn.close() conn.close()
conn = None conn = None
stacktrace = sys.exc_info()[2]
if isinstance(e, SocketError) and self.proxy: if isinstance(e, SocketError) and self.proxy:
e = ProxyError('Cannot connect to proxy.', e) e = ProxyError('Cannot connect to proxy.', e)
elif isinstance(e, (SocketError, HTTPException)): elif isinstance(e, (SocketError, HTTPException)):
e = ProtocolError('Connection aborted.', e) e = ProtocolError('Connection aborted.', e)
retries = retries.increment(method, url, error=e, retries = retries.increment(method, url, error=e, _pool=self,
_pool=self, _stacktrace=stacktrace) _stacktrace=sys.exc_info()[2])
retries.sleep() retries.sleep()
# Keep track of the error for the retry warning. # Keep track of the error for the retry warning.
@ -673,24 +706,26 @@ class HTTPSConnectionPool(HTTPConnectionPool):
assert_fingerprint=self.assert_fingerprint) assert_fingerprint=self.assert_fingerprint)
conn.ssl_version = self.ssl_version conn.ssl_version = self.ssl_version
if self.proxy is not None:
# Python 2.7+
try:
set_tunnel = conn.set_tunnel
except AttributeError: # Platform-specific: Python 2.6
set_tunnel = conn._set_tunnel
if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
set_tunnel(self.host, self.port)
else:
set_tunnel(self.host, self.port, self.proxy_headers)
# Establish tunnel connection early, because otherwise httplib
# would improperly set Host: header to proxy's IP:port.
conn.connect()
return conn return conn
def _prepare_proxy(self, conn):
"""
Establish tunnel connection early, because otherwise httplib
would improperly set Host: header to proxy's IP:port.
"""
# Python 2.7+
try:
set_tunnel = conn.set_tunnel
except AttributeError: # Platform-specific: Python 2.6
set_tunnel = conn._set_tunnel
if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
set_tunnel(self.host, self.port)
else:
set_tunnel(self.host, self.port, self.proxy_headers)
conn.connect()
def _new_conn(self): def _new_conn(self):
""" """
Return a fresh :class:`httplib.HTTPSConnection`. Return a fresh :class:`httplib.HTTPSConnection`.
@ -700,7 +735,6 @@ class HTTPSConnectionPool(HTTPConnectionPool):
% (self.num_connections, self.host)) % (self.num_connections, self.host))
if not self.ConnectionCls or self.ConnectionCls is DummyConnection: if not self.ConnectionCls or self.ConnectionCls is DummyConnection:
# Platform-specific: Python without ssl
raise SSLError("Can't connect to HTTPS URL because the SSL " raise SSLError("Can't connect to HTTPS URL because the SSL "
"module is not available.") "module is not available.")

View File

@ -38,8 +38,6 @@ Module Variables
---------------- ----------------
:var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites. :var DEFAULT_SSL_CIPHER_LIST: The list of supported SSL/TLS cipher suites.
Default: ``ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:
ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:!aNULL:!MD5:!DSS``
.. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication .. _sni: https://en.wikipedia.org/wiki/Server_Name_Indication
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit) .. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
@ -85,22 +83,7 @@ _openssl_verify = {
+ OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT, + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
} }
# A secure default. DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
# Sources for more information on TLS ciphers:
#
# - https://wiki.mozilla.org/Security/Server_Side_TLS
# - https://www.ssllabs.com/projects/best-practices/index.html
# - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
#
# The general intent is:
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
# - prefer ECDHE over DHE for better performance,
# - prefer any AES-GCM over any AES-CBC for better performance and security,
# - use 3DES as fallback which is secure but slow,
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
DEFAULT_SSL_CIPHER_LIST = "ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:" + \
"ECDH+AES128:DH+AES:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+3DES:" + \
"!aNULL:!MD5:!DSS"
orig_util_HAS_SNI = util.HAS_SNI orig_util_HAS_SNI = util.HAS_SNI
@ -191,6 +174,11 @@ class WrappedSocket(object):
return b'' return b''
else: else:
raise raise
except OpenSSL.SSL.ZeroReturnError as e:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return b''
else:
raise
except OpenSSL.SSL.WantReadError: except OpenSSL.SSL.WantReadError:
rd, wd, ed = select.select( rd, wd, ed = select.select(
[self.socket], [], [], self.socket.gettimeout()) [self.socket], [], [], self.socket.gettimeout())
@ -294,7 +282,9 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
try: try:
cnx.do_handshake() cnx.do_handshake()
except OpenSSL.SSL.WantReadError: except OpenSSL.SSL.WantReadError:
select.select([sock], [], []) rd, _, _ = select.select([sock], [], [], sock.gettimeout())
if not rd:
raise timeout('select timed out')
continue continue
except OpenSSL.SSL.Error as e: except OpenSSL.SSL.Error as e:
raise ssl.SSLError('bad handshake', e) raise ssl.SSLError('bad handshake', e)

View File

@ -157,3 +157,13 @@ class InsecureRequestWarning(SecurityWarning):
class SystemTimeWarning(SecurityWarning): class SystemTimeWarning(SecurityWarning):
"Warned when system time is suspected to be wrong" "Warned when system time is suspected to be wrong"
pass pass
class InsecurePlatformWarning(SecurityWarning):
"Warned when certain SSL configuration is not available on a platform."
pass
class ResponseNotChunked(ProtocolError, ValueError):
"Response needs to be chunked in order to read it as chunks."
pass

View File

@ -8,7 +8,7 @@ except ImportError:
from ._collections import RecentlyUsedContainer from ._collections import RecentlyUsedContainer
from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
from .connectionpool import port_by_scheme from .connectionpool import port_by_scheme
from .exceptions import LocationValueError from .exceptions import LocationValueError, MaxRetryError
from .request import RequestMethods from .request import RequestMethods
from .util.url import parse_url from .util.url import parse_url
from .util.retry import Retry from .util.retry import Retry
@ -64,6 +64,14 @@ class PoolManager(RequestMethods):
self.pools = RecentlyUsedContainer(num_pools, self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close()) dispose_func=lambda p: p.close())
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.clear()
# Return False to re-raise any potential exceptions
return False
def _new_pool(self, scheme, host, port): def _new_pool(self, scheme, host, port):
""" """
Create a new :class:`ConnectionPool` based on host, port and scheme. Create a new :class:`ConnectionPool` based on host, port and scheme.
@ -167,7 +175,14 @@ class PoolManager(RequestMethods):
if not isinstance(retries, Retry): if not isinstance(retries, Retry):
retries = Retry.from_int(retries, redirect=redirect) retries = Retry.from_int(retries, redirect=redirect)
kw['retries'] = retries.increment(method, redirect_location) try:
retries = retries.increment(method, url, response=response, _pool=conn)
except MaxRetryError:
if retries.raise_on_redirect:
raise
return response
kw['retries'] = retries
kw['redirect'] = redirect kw['redirect'] = redirect
log.info("Redirecting %s -> %s" % (url, redirect_location)) log.info("Redirecting %s -> %s" % (url, redirect_location))

View File

@ -1,15 +1,20 @@
try:
import http.client as httplib
except ImportError:
import httplib
import zlib import zlib
import io import io
from socket import timeout as SocketTimeout from socket import timeout as SocketTimeout
from ._collections import HTTPHeaderDict from ._collections import HTTPHeaderDict
from .exceptions import ProtocolError, DecodeError, ReadTimeoutError from .exceptions import (
from .packages.six import string_types as basestring, binary_type ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
)
from .packages.six import string_types as basestring, binary_type, PY3
from .connection import HTTPException, BaseSSLError from .connection import HTTPException, BaseSSLError
from .util.response import is_fp_closed from .util.response import is_fp_closed
class DeflateDecoder(object): class DeflateDecoder(object):
def __init__(self): def __init__(self):
@ -21,6 +26,9 @@ class DeflateDecoder(object):
return getattr(self._obj, name) return getattr(self._obj, name)
def decompress(self, data): def decompress(self, data):
if not data:
return data
if not self._first_try: if not self._first_try:
return self._obj.decompress(data) return self._obj.decompress(data)
@ -36,9 +44,23 @@ class DeflateDecoder(object):
self._data = None self._data = None
class GzipDecoder(object):
def __init__(self):
self._obj = zlib.decompressobj(16 + zlib.MAX_WBITS)
def __getattr__(self, name):
return getattr(self._obj, name)
def decompress(self, data):
if not data:
return data
return self._obj.decompress(data)
def _get_decoder(mode): def _get_decoder(mode):
if mode == 'gzip': if mode == 'gzip':
return zlib.decompressobj(16 + zlib.MAX_WBITS) return GzipDecoder()
return DeflateDecoder() return DeflateDecoder()
@ -76,9 +98,10 @@ class HTTPResponse(io.IOBase):
strict=0, preload_content=True, decode_content=True, strict=0, preload_content=True, decode_content=True,
original_response=None, pool=None, connection=None): original_response=None, pool=None, connection=None):
self.headers = HTTPHeaderDict() if isinstance(headers, HTTPHeaderDict):
if headers: self.headers = headers
self.headers.update(headers) else:
self.headers = HTTPHeaderDict(headers)
self.status = status self.status = status
self.version = version self.version = version
self.reason = reason self.reason = reason
@ -100,7 +123,17 @@ class HTTPResponse(io.IOBase):
if hasattr(body, 'read'): if hasattr(body, 'read'):
self._fp = body self._fp = body
if preload_content and not self._body: # Are we using the chunked-style of transfer encoding?
self.chunked = False
self.chunk_left = None
tr_enc = self.headers.get('transfer-encoding', '').lower()
# Don't incur the penalty of creating a list and then discarding it
encodings = (enc.strip() for enc in tr_enc.split(","))
if "chunked" in encodings:
self.chunked = True
# We certainly don't want to preload content when the response is chunked.
if not self.chunked and preload_content and not self._body:
self._body = self.read(decode_content=decode_content) self._body = self.read(decode_content=decode_content)
def get_redirect_location(self): def get_redirect_location(self):
@ -140,6 +173,35 @@ class HTTPResponse(io.IOBase):
""" """
return self._fp_bytes_read return self._fp_bytes_read
def _init_decoder(self):
"""
Set-up the _decoder attribute if necessar.
"""
# Note: content-encoding value should be case-insensitive, per RFC 7230
# Section 3.2
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None and content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
def _decode(self, data, decode_content, flush_decoder):
"""
Decode the data passed in and potentially flush the decoder.
"""
try:
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
content_encoding = self.headers.get('content-encoding', '').lower()
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding, e)
if flush_decoder and decode_content and self._decoder:
buf = self._decoder.decompress(binary_type())
data += buf + self._decoder.flush()
return data
def read(self, amt=None, decode_content=None, cache_content=False): def read(self, amt=None, decode_content=None, cache_content=False):
""" """
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
@ -161,12 +223,7 @@ class HTTPResponse(io.IOBase):
after having ``.read()`` the file object. (Overridden if ``amt`` is after having ``.read()`` the file object. (Overridden if ``amt`` is
set.) set.)
""" """
# Note: content-encoding value should be case-insensitive, per RFC 7230 self._init_decoder()
# Section 3.2
content_encoding = self.headers.get('content-encoding', '').lower()
if self._decoder is None:
if content_encoding in self.CONTENT_DECODERS:
self._decoder = _get_decoder(content_encoding)
if decode_content is None: if decode_content is None:
decode_content = self.decode_content decode_content = self.decode_content
@ -202,7 +259,7 @@ class HTTPResponse(io.IOBase):
except BaseSSLError as e: except BaseSSLError as e:
# FIXME: Is there a better way to differentiate between SSLErrors? # FIXME: Is there a better way to differentiate between SSLErrors?
if not 'read operation timed out' in str(e): # Defensive: if 'read operation timed out' not in str(e): # Defensive:
# This shouldn't happen but just in case we're missing an edge # This shouldn't happen but just in case we're missing an edge
# case, let's avoid swallowing SSL errors. # case, let's avoid swallowing SSL errors.
raise raise
@ -215,17 +272,7 @@ class HTTPResponse(io.IOBase):
self._fp_bytes_read += len(data) self._fp_bytes_read += len(data)
try: data = self._decode(data, decode_content, flush_decoder)
if decode_content and self._decoder:
data = self._decoder.decompress(data)
except (IOError, zlib.error) as e:
raise DecodeError(
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding, e)
if flush_decoder and decode_content and self._decoder:
buf = self._decoder.decompress(binary_type())
data += buf + self._decoder.flush()
if cache_content: if cache_content:
self._body = data self._body = data
@ -252,11 +299,15 @@ class HTTPResponse(io.IOBase):
If True, will attempt to decode the body based on the If True, will attempt to decode the body based on the
'content-encoding' header. 'content-encoding' header.
""" """
while not is_fp_closed(self._fp): if self.chunked:
data = self.read(amt=amt, decode_content=decode_content) for line in self.read_chunked(amt, decode_content=decode_content):
yield line
else:
while not is_fp_closed(self._fp):
data = self.read(amt=amt, decode_content=decode_content)
if data: if data:
yield data yield data
@classmethod @classmethod
def from_httplib(ResponseCls, r, **response_kw): def from_httplib(ResponseCls, r, **response_kw):
@ -267,14 +318,16 @@ class HTTPResponse(io.IOBase):
Remaining parameters are passed to the HTTPResponse constructor, along Remaining parameters are passed to the HTTPResponse constructor, along
with ``original_response=r``. with ``original_response=r``.
""" """
headers = r.msg
headers = HTTPHeaderDict() if not isinstance(headers, HTTPHeaderDict):
for k, v in r.getheaders(): if PY3: # Python 3
headers.add(k, v) headers = HTTPHeaderDict(headers.items())
else: # Python 2
headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute # HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0) strict = getattr(r, 'strict', 0)
return ResponseCls(body=r, resp = ResponseCls(body=r,
headers=headers, headers=headers,
status=r.status, status=r.status,
version=r.version, version=r.version,
@ -282,6 +335,7 @@ class HTTPResponse(io.IOBase):
strict=strict, strict=strict,
original_response=r, original_response=r,
**response_kw) **response_kw)
return resp
# Backwards-compatibility methods for httplib.HTTPResponse # Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self): def getheaders(self):
@ -331,3 +385,82 @@ class HTTPResponse(io.IOBase):
else: else:
b[:len(temp)] = temp b[:len(temp)] = temp
return len(temp) return len(temp)
def _update_chunk_length(self):
# First, we'll figure out length of a chunk and then
# we'll try to read it from socket.
if self.chunk_left is not None:
return
line = self._fp.fp.readline()
line = line.split(b';', 1)[0]
try:
self.chunk_left = int(line, 16)
except ValueError:
# Invalid chunked protocol response, abort.
self.close()
raise httplib.IncompleteRead(line)
def _handle_chunk(self, amt):
returned_chunk = None
if amt is None:
chunk = self._fp._safe_read(self.chunk_left)
returned_chunk = chunk
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
elif amt < self.chunk_left:
value = self._fp._safe_read(amt)
self.chunk_left = self.chunk_left - amt
returned_chunk = value
elif amt == self.chunk_left:
value = self._fp._safe_read(amt)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
returned_chunk = value
else: # amt > self.chunk_left
returned_chunk = self._fp._safe_read(self.chunk_left)
self._fp._safe_read(2) # Toss the CRLF at the end of the chunk.
self.chunk_left = None
return returned_chunk
def read_chunked(self, amt=None, decode_content=None):
"""
Similar to :meth:`HTTPResponse.read`, but with an additional
parameter: ``decode_content``.
:param decode_content:
If True, will attempt to decode the body based on the
'content-encoding' header.
"""
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
raise ResponseNotChunked("Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing.")
if self._original_response and self._original_response._method.upper() == 'HEAD':
# Don't bother reading the body of a HEAD request.
# FIXME: Can we do this somehow without accessing private httplib _method?
self._original_response.close()
return
while True:
self._update_chunk_length()
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
yield self._decode(chunk, decode_content=decode_content,
flush_decoder=True)
# Chunk content ends with \r\n: discard it.
while True:
line = self._fp.fp.readline()
if not line:
# Some sites may not end with '\r\n'.
break
if line == b'\r\n':
break
# We read everything; close the "file".
if self._original_response:
self._original_response.close()
self.release_conn()

View File

@ -82,6 +82,7 @@ def create_connection(address, timeout=socket._GLOBAL_DEFAULT_TIMEOUT,
err = _ err = _
if sock is not None: if sock is not None:
sock.close() sock.close()
sock = None
if err is not None: if err is not None:
raise err raise err

View File

@ -190,7 +190,7 @@ class Retry(object):
return isinstance(err, (ReadTimeoutError, ProtocolError)) return isinstance(err, (ReadTimeoutError, ProtocolError))
def is_forced_retry(self, method, status_code): def is_forced_retry(self, method, status_code):
""" Is this method/response retryable? (Based on method/codes whitelists) """ Is this method/status code retryable? (Based on method/codes whitelists)
""" """
if self.method_whitelist and method.upper() not in self.method_whitelist: if self.method_whitelist and method.upper() not in self.method_whitelist:
return False return False

View File

@ -1,7 +1,7 @@
from binascii import hexlify, unhexlify from binascii import hexlify, unhexlify
from hashlib import md5, sha1 from hashlib import md5, sha1, sha256
from ..exceptions import SSLError from ..exceptions import SSLError, InsecurePlatformWarning
SSLContext = None SSLContext = None
@ -9,9 +9,10 @@ HAS_SNI = False
create_default_context = None create_default_context = None
import errno import errno
import ssl import warnings
try: # Test for SSL features try: # Test for SSL features
import ssl
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23 from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
from ssl import HAS_SNI # Has SNI? from ssl import HAS_SNI # Has SNI?
except ImportError: except ImportError:
@ -24,14 +25,24 @@ except ImportError:
OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000 OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
OP_NO_COMPRESSION = 0x20000 OP_NO_COMPRESSION = 0x20000
try: # A secure default.
from ssl import _DEFAULT_CIPHERS # Sources for more information on TLS ciphers:
except ImportError: #
_DEFAULT_CIPHERS = ( # - https://wiki.mozilla.org/Security/Server_Side_TLS
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:' # - https://www.ssllabs.com/projects/best-practices/index.html
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:' # - https://hynek.me/articles/hardening-your-web-servers-ssl-ciphers/
'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5' #
) # The general intent is:
# - Prefer cipher suites that offer perfect forward secrecy (DHE/ECDHE),
# - prefer ECDHE over DHE for better performance,
# - prefer any AES-GCM over any AES-CBC for better performance and security,
# - use 3DES as fallback which is secure but slow,
# - disable NULL authentication, MD5 MACs and DSS for security reasons.
DEFAULT_CIPHERS = (
'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:!aNULL:'
'!eNULL:!MD5'
)
try: try:
from ssl import SSLContext # Modern SSL? from ssl import SSLContext # Modern SSL?
@ -39,7 +50,8 @@ except ImportError:
import sys import sys
class SSLContext(object): # Platform-specific: Python 2 & 3.1 class SSLContext(object): # Platform-specific: Python 2 & 3.1
supports_set_ciphers = sys.version_info >= (2, 7) supports_set_ciphers = ((2, 7) <= sys.version_info < (3,) or
(3, 2) <= sys.version_info)
def __init__(self, protocol_version): def __init__(self, protocol_version):
self.protocol = protocol_version self.protocol = protocol_version
@ -69,6 +81,14 @@ except ImportError:
self.ciphers = cipher_suite self.ciphers = cipher_suite
def wrap_socket(self, socket, server_hostname=None): def wrap_socket(self, socket, server_hostname=None):
warnings.warn(
'A true SSLContext object is not available. This prevents '
'urllib3 from configuring SSL appropriately and may cause '
'certain SSL connections to fail. For more information, see '
'https://urllib3.readthedocs.org/en/latest/security.html'
'#insecureplatformwarning.',
InsecurePlatformWarning
)
kwargs = { kwargs = {
'keyfile': self.keyfile, 'keyfile': self.keyfile,
'certfile': self.certfile, 'certfile': self.certfile,
@ -96,7 +116,8 @@ def assert_fingerprint(cert, fingerprint):
# this digest. # this digest.
hashfunc_map = { hashfunc_map = {
16: md5, 16: md5,
20: sha1 20: sha1,
32: sha256,
} }
fingerprint = fingerprint.replace(':', '').lower() fingerprint = fingerprint.replace(':', '').lower()
@ -157,7 +178,7 @@ def resolve_ssl_version(candidate):
return candidate return candidate
def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED, def create_urllib3_context(ssl_version=None, cert_reqs=None,
options=None, ciphers=None): options=None, ciphers=None):
"""All arguments have the same meaning as ``ssl_wrap_socket``. """All arguments have the same meaning as ``ssl_wrap_socket``.
@ -194,6 +215,9 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
""" """
context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23) context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
# Setting the default here, as we may have no ssl module on import
cert_reqs = ssl.CERT_REQUIRED if cert_reqs is None else cert_reqs
if options is None: if options is None:
options = 0 options = 0
# SSLv2 is easily broken and is considered harmful and dangerous # SSLv2 is easily broken and is considered harmful and dangerous
@ -207,11 +231,13 @@ def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
context.options |= options context.options |= options
if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6 if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
context.set_ciphers(ciphers or _DEFAULT_CIPHERS) context.set_ciphers(ciphers or DEFAULT_CIPHERS)
context.verify_mode = cert_reqs context.verify_mode = cert_reqs
if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2 if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
context.check_hostname = (context.verify_mode == ssl.CERT_REQUIRED) # We do our own verification, including fingerprints and alternative
# hostnames. So disable it here
context.check_hostname = False
return context return context

View File

@ -15,6 +15,8 @@ class Url(namedtuple('Url', url_attrs)):
def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None, def __new__(cls, scheme=None, auth=None, host=None, port=None, path=None,
query=None, fragment=None): query=None, fragment=None):
if path and not path.startswith('/'):
path = '/' + path
return super(Url, cls).__new__(cls, scheme, auth, host, port, path, return super(Url, cls).__new__(cls, scheme, auth, host, port, path,
query, fragment) query, fragment)

View File

@ -90,7 +90,7 @@ def merge_hooks(request_hooks, session_hooks, dict_class=OrderedDict):
class SessionRedirectMixin(object): class SessionRedirectMixin(object):
def resolve_redirects(self, resp, req, stream=False, timeout=None, def resolve_redirects(self, resp, req, stream=False, timeout=None,
verify=True, cert=None, proxies=None): verify=True, cert=None, proxies=None, **adapter_kwargs):
"""Receives a Response. Returns a generator of Responses.""" """Receives a Response. Returns a generator of Responses."""
i = 0 i = 0
@ -171,7 +171,10 @@ class SessionRedirectMixin(object):
except KeyError: except KeyError:
pass pass
extract_cookies_to_jar(prepared_request._cookies, prepared_request, resp.raw) # Extract any cookies sent on the response to the cookiejar
# in the new request. Because we've mutated our copied prepared
# request, use the old one that we haven't yet touched.
extract_cookies_to_jar(prepared_request._cookies, req, resp.raw)
prepared_request._cookies.update(self.cookies) prepared_request._cookies.update(self.cookies)
prepared_request.prepare_cookies(prepared_request._cookies) prepared_request.prepare_cookies(prepared_request._cookies)
@ -190,6 +193,7 @@ class SessionRedirectMixin(object):
cert=cert, cert=cert,
proxies=proxies, proxies=proxies,
allow_redirects=False, allow_redirects=False,
**adapter_kwargs
) )
extract_cookies_to_jar(self.cookies, prepared_request, resp.raw) extract_cookies_to_jar(self.cookies, prepared_request, resp.raw)
@ -557,10 +561,6 @@ class Session(SessionRedirectMixin):
# Set up variables needed for resolve_redirects and dispatching of hooks # Set up variables needed for resolve_redirects and dispatching of hooks
allow_redirects = kwargs.pop('allow_redirects', True) allow_redirects = kwargs.pop('allow_redirects', True)
stream = kwargs.get('stream') stream = kwargs.get('stream')
timeout = kwargs.get('timeout')
verify = kwargs.get('verify')
cert = kwargs.get('cert')
proxies = kwargs.get('proxies')
hooks = request.hooks hooks = request.hooks
# Get the appropriate adapter to use # Get the appropriate adapter to use
@ -588,12 +588,7 @@ class Session(SessionRedirectMixin):
extract_cookies_to_jar(self.cookies, request, r.raw) extract_cookies_to_jar(self.cookies, request, r.raw)
# Redirect resolving generator. # Redirect resolving generator.
gen = self.resolve_redirects(r, request, gen = self.resolve_redirects(r, request, **kwargs)
stream=stream,
timeout=timeout,
verify=verify,
cert=cert,
proxies=proxies)
# Resolve redirects if allowed. # Resolve redirects if allowed.
history = [resp for resp in gen] if allow_redirects else [] history = [resp for resp in gen] if allow_redirects else []

View File

@ -25,7 +25,8 @@ from . import __version__
from . import certs from . import certs
from .compat import parse_http_list as _parse_list_header from .compat import parse_http_list as _parse_list_header
from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2, from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,
builtin_str, getproxies, proxy_bypass, urlunparse) builtin_str, getproxies, proxy_bypass, urlunparse,
basestring)
from .cookies import RequestsCookieJar, cookiejar_from_dict from .cookies import RequestsCookieJar, cookiejar_from_dict
from .structures import CaseInsensitiveDict from .structures import CaseInsensitiveDict
from .exceptions import InvalidURL from .exceptions import InvalidURL
@ -115,7 +116,8 @@ def get_netrc_auth(url):
def guess_filename(obj): def guess_filename(obj):
"""Tries to guess the filename of the given object.""" """Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None) name = getattr(obj, 'name', None)
if name and isinstance(name, builtin_str) and name[0] != '<' and name[-1] != '>': if (name and isinstance(name, basestring) and name[0] != '<' and
name[-1] != '>'):
return os.path.basename(name) return os.path.basename(name)
@ -418,10 +420,18 @@ def requote_uri(uri):
This function passes the given URI through an unquote/quote cycle to This function passes the given URI through an unquote/quote cycle to
ensure that it is fully and consistently quoted. ensure that it is fully and consistently quoted.
""" """
# Unquote only the unreserved characters safe_with_percent = "!#$%&'()*+,/:;=?@[]~"
# Then quote only illegal characters (do not quote reserved, unreserved, safe_without_percent = "!#$&'()*+,/:;=?@[]~"
# or '%') try:
return quote(unquote_unreserved(uri), safe="!#$%&'()*+,/:;=?@[]~") # Unquote only the unreserved characters
# Then quote only illegal characters (do not quote reserved,
# unreserved, or '%')
return quote(unquote_unreserved(uri), safe=safe_with_percent)
except InvalidURL:
# We couldn't unquote the given URI, so let's try quoting it, but
# there may be unquoted '%'s in the URI. We need to make sure they're
# properly quoted so they do not cause issues elsewhere.
return quote(uri, safe=safe_without_percent)
def address_in_network(ip, net): def address_in_network(ip, net):