aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorChristopher Baines <mail@cbaines.net>2015-12-22 13:46:21 +0000
committerChristopher Baines <mail@cbaines.net>2015-12-22 13:46:21 +0000
commit1f19c06843e6d266368e3b570352bdf7d789a0de (patch)
tree042bc99c162d671c2b2fb7cc5cb4400ef26c1bed
parentcb40ec082506c0d9eb05978839bed2f12541af35 (diff)
downloadpython-requests-1f19c06843e6d266368e3b570352bdf7d789a0de.tar
python-requests-1f19c06843e6d266368e3b570352bdf7d789a0de.tar.gz
Import requests_2.9.1.orig.tar.gzupstream/2.9.1upstream
-rw-r--r--HISTORY.rst54
-rw-r--r--PKG-INFO58
-rw-r--r--requests.egg-info/PKG-INFO58
-rw-r--r--requests.egg-info/requires.txt2
-rw-r--r--requests/__init__.py12
-rw-r--r--requests/adapters.py20
-rw-r--r--requests/api.py14
-rw-r--r--requests/auth.py2
-rw-r--r--requests/cookies.py19
-rw-r--r--requests/exceptions.py15
-rw-r--r--requests/models.py9
-rw-r--r--requests/packages/urllib3/__init__.py32
-rw-r--r--requests/packages/urllib3/_collections.py7
-rw-r--r--requests/packages/urllib3/connection.py39
-rw-r--r--requests/packages/urllib3/connectionpool.py34
-rw-r--r--requests/packages/urllib3/contrib/appengine.py23
-rw-r--r--requests/packages/urllib3/contrib/ntlmpool.py1
-rw-r--r--requests/packages/urllib3/contrib/pyopenssl.py35
-rw-r--r--requests/packages/urllib3/exceptions.py14
-rw-r--r--requests/packages/urllib3/fields.py1
-rw-r--r--requests/packages/urllib3/filepost.py1
-rw-r--r--requests/packages/urllib3/packages/__init__.py1
-rw-r--r--requests/packages/urllib3/poolmanager.py3
-rw-r--r--requests/packages/urllib3/request.py4
-rw-r--r--requests/packages/urllib3/response.py53
-rw-r--r--requests/packages/urllib3/util/__init__.py20
-rw-r--r--requests/packages/urllib3/util/connection.py1
-rw-r--r--requests/packages/urllib3/util/request.py1
-rw-r--r--requests/packages/urllib3/util/response.py3
-rw-r--r--requests/packages/urllib3/util/retry.py7
-rw-r--r--requests/packages/urllib3/util/ssl_.py39
-rw-r--r--requests/packages/urllib3/util/timeout.py8
-rw-r--r--requests/packages/urllib3/util/url.py5
-rw-r--r--requests/sessions.py12
-rw-r--r--requests/status_codes.py1
-rw-r--r--requests/utils.py47
-rw-r--r--requirements.txt2
-rwxr-xr-xsetup.py6
-rwxr-xr-xtest_requests.py295
39 files changed, 667 insertions, 291 deletions
diff --git a/HISTORY.rst b/HISTORY.rst
index 02593a3..f8c1a54 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -3,6 +3,54 @@
Release History
---------------
+2.9.1 (2015-12-21)
+++++++++++++++++++
+
+**Bugfixes**
+
+- Resolve regression introduced in 2.9.0 that made it impossible to send binary
+ strings as bodies in Python 3.
+- Fixed errors when calculating cookie expiration dates in certain locales.
+
+**Miscellaneous**
+
+- Updated bundled urllib3 to 1.13.1.
+
+2.9.0 (2015-12-15)
+++++++++++++++++++
+
+**Minor Improvements** (Backwards compatible)
+
+- The ``verify`` keyword argument now supports being passed a path to a
+ directory of CA certificates, not just a single-file bundle.
+- Warnings are now emitted when sending files opened in text mode.
+- Added the 511 Network Authentication Required status code to the status code
+ registry.
+
+**Bugfixes**
+
+- For file-like objects that are not seeked to the very beginning, we now
+ send the content length for the number of bytes we will actually read, rather
+ than the total size of the file, allowing partial file uploads.
+- When uploading file-like objects, if they are empty or have no obvious
+ content length we set ``Transfer-Encoding: chunked`` rather than
+ ``Content-Length: 0``.
+- We correctly receive the response in buffered mode when uploading chunked
+ bodies.
+- We now handle being passed a query string as a bytestring on Python 3, by
+ decoding it as UTF-8.
+- Sessions are now closed in all cases (exceptional and not) when using the
+ functional API rather than leaking and waiting for the garbage collector to
+ clean them up.
+- Correctly handle digest auth headers with a malformed ``qop`` directive that
+ contains no token, by treating it the same as if no ``qop`` directive was
+ provided at all.
+- Minor performance improvements when removing specific cookies by name.
+
+**Miscellaneous**
+
+- Updated urllib3 to 1.13.
+
2.8.1 (2015-10-13)
++++++++++++++++++
@@ -42,7 +90,7 @@ Release History
- The ``json`` parameter to ``post()`` and friends will now only be used if
neither ``data`` nor ``files`` are present, consistent with the
documentation.
-- We now ignore empty fields in the ``NO_PROXY`` enviroment variable.
+- We now ignore empty fields in the ``NO_PROXY`` environment variable.
- Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
``stream=True`` with ``contextlib.closing``.
- Prevented bugs where we would attempt to return the same connection back to
@@ -525,7 +573,7 @@ This is not a backwards compatible change.
- Improved mime-compatible JSON handling
- Proxy fixes
- Path hack fixes
-- Case-Insensistive Content-Encoding headers
+- Case-Insensitive Content-Encoding headers
- Support for CJK parameters in form posts
@@ -612,7 +660,7 @@ This is not a backwards compatible change.
+++++++++++++++++++
- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
-- Allow disabling of cookie persistiance.
+- Allow disabling of cookie persistence.
- New implementation of safe_mode
- cookies.get now supports default argument
- Session cookies not saved when Session.request is called with return_response=False
diff --git a/PKG-INFO b/PKG-INFO
index 6522e96..d75ebac 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: requests
-Version: 2.8.1
+Version: 2.9.1
Summary: Python HTTP for Humans.
Home-page: http://python-requests.org
Author: Kenneth Reitz
@@ -99,6 +99,54 @@ Description: Requests: HTTP for Humans
Release History
---------------
+ 2.9.1 (2015-12-21)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Resolve regression introduced in 2.9.0 that made it impossible to send binary
+ strings as bodies in Python 3.
+ - Fixed errors when calculating cookie expiration dates in certain locales.
+
+ **Miscellaneous**
+
+ - Updated bundled urllib3 to 1.13.1.
+
+ 2.9.0 (2015-12-15)
+ ++++++++++++++++++
+
+ **Minor Improvements** (Backwards compatible)
+
+ - The ``verify`` keyword argument now supports being passed a path to a
+ directory of CA certificates, not just a single-file bundle.
+ - Warnings are now emitted when sending files opened in text mode.
+ - Added the 511 Network Authentication Required status code to the status code
+ registry.
+
+ **Bugfixes**
+
+ - For file-like objects that are not seeked to the very beginning, we now
+ send the content length for the number of bytes we will actually read, rather
+ than the total size of the file, allowing partial file uploads.
+ - When uploading file-like objects, if they are empty or have no obvious
+ content length we set ``Transfer-Encoding: chunked`` rather than
+ ``Content-Length: 0``.
+ - We correctly receive the response in buffered mode when uploading chunked
+ bodies.
+ - We now handle being passed a query string as a bytestring on Python 3, by
+ decoding it as UTF-8.
+ - Sessions are now closed in all cases (exceptional and not) when using the
+ functional API rather than leaking and waiting for the garbage collector to
+ clean them up.
+ - Correctly handle digest auth headers with a malformed ``qop`` directive that
+ contains no token, by treating it the same as if no ``qop`` directive was
+ provided at all.
+ - Minor performance improvements when removing specific cookies by name.
+
+ **Miscellaneous**
+
+ - Updated urllib3 to 1.13.
+
2.8.1 (2015-10-13)
++++++++++++++++++
@@ -138,7 +186,7 @@ Description: Requests: HTTP for Humans
- The ``json`` parameter to ``post()`` and friends will now only be used if
neither ``data`` nor ``files`` are present, consistent with the
documentation.
- - We now ignore empty fields in the ``NO_PROXY`` enviroment variable.
+ - We now ignore empty fields in the ``NO_PROXY`` environment variable.
- Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
``stream=True`` with ``contextlib.closing``.
- Prevented bugs where we would attempt to return the same connection back to
@@ -621,7 +669,7 @@ Description: Requests: HTTP for Humans
- Improved mime-compatible JSON handling
- Proxy fixes
- Path hack fixes
- - Case-Insensistive Content-Encoding headers
+ - Case-Insensitive Content-Encoding headers
- Support for CJK parameters in form posts
@@ -708,7 +756,7 @@ Description: Requests: HTTP for Humans
+++++++++++++++++++
- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
- - Allow disabling of cookie persistiance.
+ - Allow disabling of cookie persistence.
- New implementation of safe_mode
- cookies.get now supports default argument
- Session cookies not saved when Session.request is called with return_response=False
@@ -1183,8 +1231,8 @@ Classifier: Intended Audience :: Developers
Classifier: Natural Language :: English
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/requests.egg-info/PKG-INFO b/requests.egg-info/PKG-INFO
index 6522e96..d75ebac 100644
--- a/requests.egg-info/PKG-INFO
+++ b/requests.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: requests
-Version: 2.8.1
+Version: 2.9.1
Summary: Python HTTP for Humans.
Home-page: http://python-requests.org
Author: Kenneth Reitz
@@ -99,6 +99,54 @@ Description: Requests: HTTP for Humans
Release History
---------------
+ 2.9.1 (2015-12-21)
+ ++++++++++++++++++
+
+ **Bugfixes**
+
+ - Resolve regression introduced in 2.9.0 that made it impossible to send binary
+ strings as bodies in Python 3.
+ - Fixed errors when calculating cookie expiration dates in certain locales.
+
+ **Miscellaneous**
+
+ - Updated bundled urllib3 to 1.13.1.
+
+ 2.9.0 (2015-12-15)
+ ++++++++++++++++++
+
+ **Minor Improvements** (Backwards compatible)
+
+ - The ``verify`` keyword argument now supports being passed a path to a
+ directory of CA certificates, not just a single-file bundle.
+ - Warnings are now emitted when sending files opened in text mode.
+ - Added the 511 Network Authentication Required status code to the status code
+ registry.
+
+ **Bugfixes**
+
+ - For file-like objects that are not seeked to the very beginning, we now
+ send the content length for the number of bytes we will actually read, rather
+ than the total size of the file, allowing partial file uploads.
+ - When uploading file-like objects, if they are empty or have no obvious
+ content length we set ``Transfer-Encoding: chunked`` rather than
+ ``Content-Length: 0``.
+ - We correctly receive the response in buffered mode when uploading chunked
+ bodies.
+ - We now handle being passed a query string as a bytestring on Python 3, by
+ decoding it as UTF-8.
+ - Sessions are now closed in all cases (exceptional and not) when using the
+ functional API rather than leaking and waiting for the garbage collector to
+ clean them up.
+ - Correctly handle digest auth headers with a malformed ``qop`` directive that
+ contains no token, by treating it the same as if no ``qop`` directive was
+ provided at all.
+ - Minor performance improvements when removing specific cookies by name.
+
+ **Miscellaneous**
+
+ - Updated urllib3 to 1.13.
+
2.8.1 (2015-10-13)
++++++++++++++++++
@@ -138,7 +186,7 @@ Description: Requests: HTTP for Humans
- The ``json`` parameter to ``post()`` and friends will now only be used if
neither ``data`` nor ``files`` are present, consistent with the
documentation.
- - We now ignore empty fields in the ``NO_PROXY`` enviroment variable.
+ - We now ignore empty fields in the ``NO_PROXY`` environment variable.
- Fixed problem where ``httplib.BadStatusLine`` would get raised if combining
``stream=True`` with ``contextlib.closing``.
- Prevented bugs where we would attempt to return the same connection back to
@@ -621,7 +669,7 @@ Description: Requests: HTTP for Humans
- Improved mime-compatible JSON handling
- Proxy fixes
- Path hack fixes
- - Case-Insensistive Content-Encoding headers
+ - Case-Insensitive Content-Encoding headers
- Support for CJK parameters in form posts
@@ -708,7 +756,7 @@ Description: Requests: HTTP for Humans
+++++++++++++++++++
- Removal of Requests.async in favor of `grequests <https://github.com/kennethreitz/grequests>`_
- - Allow disabling of cookie persistiance.
+ - Allow disabling of cookie persistence.
- New implementation of safe_mode
- cookies.get now supports default argument
- Session cookies not saved when Session.request is called with return_response=False
@@ -1183,8 +1231,8 @@ Classifier: Intended Audience :: Developers
Classifier: Natural Language :: English
Classifier: License :: OSI Approved :: Apache Software License
Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.3
Classifier: Programming Language :: Python :: 3.4
+Classifier: Programming Language :: Python :: 3.5
diff --git a/requests.egg-info/requires.txt b/requests.egg-info/requires.txt
index 99755a4..34ddab5 100644
--- a/requests.egg-info/requires.txt
+++ b/requests.egg-info/requires.txt
@@ -1,5 +1,5 @@
[security]
-pyOpenSSL
+pyOpenSSL>=0.13
ndg-httpsclient
pyasn1
diff --git a/requests/__init__.py b/requests/__init__.py
index 3d8188a..bd5b5b9 100644
--- a/requests/__init__.py
+++ b/requests/__init__.py
@@ -42,8 +42,8 @@ is at <http://python-requests.org>.
"""
__title__ = 'requests'
-__version__ = '2.8.1'
-__build__ = 0x020801
+__version__ = '2.9.1'
+__build__ = 0x020901
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2015 Kenneth Reitz'
@@ -62,7 +62,8 @@ from .sessions import session, Session
from .status_codes import codes
from .exceptions import (
RequestException, Timeout, URLRequired,
- TooManyRedirects, HTTPError, ConnectionError
+ TooManyRedirects, HTTPError, ConnectionError,
+ FileModeWarning,
)
# Set default logging handler to avoid "No handler found" warnings.
@@ -75,3 +76,8 @@ except ImportError:
pass
logging.getLogger(__name__).addHandler(NullHandler())
+
+import warnings
+
+# FileModeWarnings go off per the default.
+warnings.simplefilter('default', FileModeWarning, append=True)
diff --git a/requests/adapters.py b/requests/adapters.py
index 7682db0..6266d5b 100644
--- a/requests/adapters.py
+++ b/requests/adapters.py
@@ -8,6 +8,7 @@ This module contains the transport adapters that Requests uses to define
and maintain connections.
"""
+import os.path
import socket
from .models import Response
@@ -107,7 +108,7 @@ class HTTPAdapter(BaseAdapter):
def __setstate__(self, state):
# Can't handle by adding 'proxy_manager' to self.__attrs__ because
- # because self.poolmanager uses a lambda function, which isn't pickleable.
+ # self.poolmanager uses a lambda function, which isn't pickleable.
self.proxy_manager = {}
self.config = {}
@@ -185,10 +186,15 @@ class HTTPAdapter(BaseAdapter):
raise Exception("Could not find a suitable SSL CA certificate bundle.")
conn.cert_reqs = 'CERT_REQUIRED'
- conn.ca_certs = cert_loc
+
+ if not os.path.isdir(cert_loc):
+ conn.ca_certs = cert_loc
+ else:
+ conn.ca_cert_dir = cert_loc
else:
conn.cert_reqs = 'CERT_NONE'
conn.ca_certs = None
+ conn.ca_cert_dir = None
if cert:
if not isinstance(cert, basestring):
@@ -394,7 +400,15 @@ class HTTPAdapter(BaseAdapter):
low_conn.send(b'\r\n')
low_conn.send(b'0\r\n\r\n')
- r = low_conn.getresponse()
+ # Receive the response from the server
+ try:
+ # For Python 2.7+ versions, use buffering of HTTP
+ # responses
+ r = low_conn.getresponse(buffering=True)
+ except TypeError:
+ # For compatibility with Python 2.6 versions and back
+ r = low_conn.getresponse()
+
resp = HTTPResponse.from_httplib(
r,
pool=conn,
diff --git a/requests/api.py b/requests/api.py
index 72a777b..b21a1a4 100644
--- a/requests/api.py
+++ b/requests/api.py
@@ -33,7 +33,7 @@ def request(method, url, **kwargs):
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:type allow_redirects: bool
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
- :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
+ :param verify: (optional) whether the SSL cert will be verified. A CA_BUNDLE path can also be provided. Defaults to ``True``.
:param stream: (optional) if ``False``, the response content will be immediately downloaded.
:param cert: (optional) if String, path to ssl client cert file (.pem). If Tuple, ('cert', 'key') pair.
:return: :class:`Response <Response>` object
@@ -46,13 +46,11 @@ def request(method, url, **kwargs):
<Response [200]>
"""
- session = sessions.Session()
- response = session.request(method=method, url=url, **kwargs)
- # By explicitly closing the session, we avoid leaving sockets open which
- # can trigger a ResourceWarning in some cases, and look like a memory leak
- # in others.
- session.close()
- return response
+ # By using the 'with' statement we are sure the session is closed, thus we
+ # avoid leaving sockets open which can trigger a ResourceWarning in some
+ # cases, and look like a memory leak in others.
+ with sessions.Session() as session:
+ return session.request(method=method, url=url, **kwargs)
def get(url, params=None, **kwargs):
diff --git a/requests/auth.py b/requests/auth.py
index 8c4e847..2af55fb 100644
--- a/requests/auth.py
+++ b/requests/auth.py
@@ -136,7 +136,7 @@ class HTTPDigestAuth(AuthBase):
if _algorithm == 'MD5-SESS':
HA1 = hash_utf8('%s:%s:%s' % (HA1, nonce, cnonce))
- if qop is None:
+ if not qop:
respdig = KD(HA1, "%s:%s" % (nonce, HA2))
elif qop == 'auth' or 'auth' in qop.split(','):
noncebit = "%s:%s:%s:%s:%s" % (
diff --git a/requests/cookies.py b/requests/cookies.py
index 88b478c..b85fd2b 100644
--- a/requests/cookies.py
+++ b/requests/cookies.py
@@ -8,6 +8,7 @@ requests.utils imports from here, so be careful with imports.
import copy
import time
+import calendar
import collections
from .compat import cookielib, urlparse, urlunparse, Morsel
@@ -143,10 +144,13 @@ def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""
clearables = []
for cookie in cookiejar:
- if cookie.name == name:
- if domain is None or domain == cookie.domain:
- if path is None or path == cookie.path:
- clearables.append((cookie.domain, cookie.path, cookie.name))
+ if cookie.name != name:
+ continue
+ if domain is not None and domain != cookie.domain:
+ continue
+ if path is not None and path != cookie.path:
+ continue
+ clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
@@ -365,7 +369,7 @@ def _copy_cookie_jar(jar):
return None
if hasattr(jar, 'copy'):
- # We're dealing with an instane of RequestsCookieJar
+ # We're dealing with an instance of RequestsCookieJar
return jar.copy()
# We're dealing with a generic CookieJar instance
new_jar = copy.copy(jar)
@@ -421,8 +425,9 @@ def morsel_to_cookie(morsel):
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
- expires = int(time.mktime(
- time.strptime(morsel['expires'], time_template)) - time.timezone)
+ expires = calendar.timegm(
+ time.strptime(morsel['expires'], time_template)
+ )
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
diff --git a/requests/exceptions.py b/requests/exceptions.py
index 89135a8..ba0b910 100644
--- a/requests/exceptions.py
+++ b/requests/exceptions.py
@@ -97,3 +97,18 @@ class StreamConsumedError(RequestException, TypeError):
class RetryError(RequestException):
"""Custom retries logic failed"""
+
+
+# Warnings
+
+
+class RequestsWarning(Warning):
+ """Base warning for Requests."""
+ pass
+
+
+class FileModeWarning(RequestsWarning, DeprecationWarning):
+ """
+ A file was opened in text mode, but Requests determined its binary length.
+ """
+ pass
diff --git a/requests/models.py b/requests/models.py
index 2727bee..4bcbc54 100644
--- a/requests/models.py
+++ b/requests/models.py
@@ -324,7 +324,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
def prepare_url(self, url, params):
"""Prepares the given HTTP URL."""
#: Accept objects that have string representations.
- #: We're unable to blindy call unicode/str functions
+ #: We're unable to blindly call unicode/str functions
#: as this will include the bytestring indicator (b'')
#: on python 3.x.
#: https://github.com/kennethreitz/requests/pull/2238
@@ -385,6 +385,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if isinstance(fragment, str):
fragment = fragment.encode('utf-8')
+ if isinstance(params, (str, bytes)):
+ params = to_native_string(params)
+
enc_params = self._encode_params(params)
if enc_params:
if query:
@@ -434,7 +437,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if files:
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
- if length is not None:
+ if length:
self.headers['Content-Length'] = builtin_str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
@@ -631,7 +634,7 @@ class Response(object):
@property
def is_permanent_redirect(self):
- """True if this Response one of the permanant versions of redirect"""
+ """True if this Response one of the permanent versions of redirect"""
return ('location' in self.headers and self.status_code in (codes.moved_permanently, codes.permanent_redirect))
@property
diff --git a/requests/packages/urllib3/__init__.py b/requests/packages/urllib3/__init__.py
index 86bb71d..e43991a 100644
--- a/requests/packages/urllib3/__init__.py
+++ b/requests/packages/urllib3/__init__.py
@@ -2,10 +2,8 @@
urllib3 - Thread-safe connection pooling and re-using.
"""
-__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
-__license__ = 'MIT'
-__version__ = '1.12'
-
+from __future__ import absolute_import
+import warnings
from .connectionpool import (
HTTPConnectionPool,
@@ -32,8 +30,30 @@ except ImportError:
def emit(self, record):
pass
+__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
+__license__ = 'MIT'
+__version__ = '1.13.1'
+
+__all__ = (
+ 'HTTPConnectionPool',
+ 'HTTPSConnectionPool',
+ 'PoolManager',
+ 'ProxyManager',
+ 'HTTPResponse',
+ 'Retry',
+ 'Timeout',
+ 'add_stderr_logger',
+ 'connection_from_url',
+ 'disable_warnings',
+ 'encode_multipart_formdata',
+ 'get_host',
+ 'make_headers',
+ 'proxy_from_url',
+)
+
logging.getLogger(__name__).addHandler(NullHandler())
+
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
@@ -55,7 +75,6 @@ def add_stderr_logger(level=logging.DEBUG):
del NullHandler
-import warnings
# SecurityWarning's always go off by default.
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# SubjectAltNameWarning's should go off once per host
@@ -63,6 +82,9 @@ warnings.simplefilter('default', exceptions.SubjectAltNameWarning)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
append=True)
+# SNIMissingWarnings should go off only once.
+warnings.simplefilter('default', exceptions.SNIMissingWarning)
+
def disable_warnings(category=exceptions.HTTPWarning):
"""
diff --git a/requests/packages/urllib3/_collections.py b/requests/packages/urllib3/_collections.py
index b68b9a5..67f3ce9 100644
--- a/requests/packages/urllib3/_collections.py
+++ b/requests/packages/urllib3/_collections.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
from collections import Mapping, MutableMapping
try:
from threading import RLock
@@ -167,7 +168,7 @@ class HTTPHeaderDict(MutableMapping):
def __ne__(self, other):
return not self.__eq__(other)
- if not PY3: # Python 2
+ if not PY3: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
@@ -234,7 +235,7 @@ class HTTPHeaderDict(MutableMapping):
"""
if len(args) > 1:
raise TypeError("extend() takes at most 1 positional "
- "arguments ({} given)".format(len(args)))
+ "arguments ({0} given)".format(len(args)))
other = args[0] if len(args) >= 1 else ()
if isinstance(other, HTTPHeaderDict):
@@ -304,7 +305,7 @@ class HTTPHeaderDict(MutableMapping):
return list(self.iteritems())
@classmethod
- def from_httplib(cls, message): # Python 2
+ def from_httplib(cls, message): # Python 2
"""Read headers from a Python 2 httplib message object."""
# python2.7 does not expose a proper API for exporting multiheaders
# efficiently. This function re-reads raw lines from the message
diff --git a/requests/packages/urllib3/connection.py b/requests/packages/urllib3/connection.py
index 3eab1e2..1e4cd41 100644
--- a/requests/packages/urllib3/connection.py
+++ b/requests/packages/urllib3/connection.py
@@ -1,4 +1,6 @@
+from __future__ import absolute_import
import datetime
+import os
import sys
import socket
from socket import error as SocketError, timeout as SocketTimeout
@@ -6,18 +8,13 @@ import warnings
from .packages import six
try: # Python 3
- from http.client import HTTPConnection as _HTTPConnection, HTTPException
+ from http.client import HTTPConnection as _HTTPConnection
+ from http.client import HTTPException # noqa: unused in this module
except ImportError:
- from httplib import HTTPConnection as _HTTPConnection, HTTPException
-
-
-class DummyConnection(object):
- "Used to detect a failed ConnectionCls import."
- pass
-
+ from httplib import HTTPConnection as _HTTPConnection
+ from httplib import HTTPException # noqa: unused in this module
try: # Compiled with SSL?
- HTTPSConnection = DummyConnection
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
@@ -61,6 +58,11 @@ port_by_scheme = {
RECENT_DATE = datetime.date(2014, 1, 1)
+class DummyConnection(object):
+ """Used to detect a failed ConnectionCls import."""
+ pass
+
+
class HTTPConnection(_HTTPConnection, object):
"""
Based on httplib.HTTPConnection but provides an extra constructor
@@ -205,10 +207,10 @@ class VerifiedHTTPSConnection(HTTPSConnection):
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
- self.ca_certs = ca_certs
- self.ca_cert_dir = ca_cert_dir
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
def connect(self):
# Add certificate verification
@@ -263,10 +265,19 @@ class VerifiedHTTPSConnection(HTTPSConnection):
'for details.)'.format(hostname)),
SubjectAltNameWarning
)
- match_hostname(cert, self.assert_hostname or hostname)
- self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED
- or self.assert_fingerprint is not None)
+ # In case the hostname is an IPv6 address, strip the square
+ # brackets from it before using it to validate. This is because
+ # a certificate with an IPv6 address in it won't have square
+ # brackets around that address. Sadly, match_hostname won't do this
+ # for us: it expects the plain host part without any extra work
+ # that might have been done to make it palatable to httplib.
+ asserted_hostname = self.assert_hostname or hostname
+ asserted_hostname = asserted_hostname.strip('[]')
+ match_hostname(cert, asserted_hostname)
+
+ self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
+ self.assert_fingerprint is not None)
if ssl:
diff --git a/requests/packages/urllib3/connectionpool.py b/requests/packages/urllib3/connectionpool.py
index b38ac68..995b416 100644
--- a/requests/packages/urllib3/connectionpool.py
+++ b/requests/packages/urllib3/connectionpool.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import errno
import logging
import sys
@@ -10,7 +11,8 @@ try: # Python 3
from queue import LifoQueue, Empty, Full
except ImportError:
from Queue import LifoQueue, Empty, Full
- import Queue as _ # Platform-specific: Windows
+ # Queue is imported for side effects on MS Windows
+ import Queue as _unused_module_Queue # noqa: unused
from .exceptions import (
@@ -22,7 +24,6 @@ from .exceptions import (
LocationValueError,
MaxRetryError,
ProxyError,
- ConnectTimeoutError,
ReadTimeoutError,
SSLError,
TimeoutError,
@@ -35,7 +36,7 @@ from .connection import (
port_by_scheme,
DummyConnection,
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
- HTTPException, BaseSSLError, ConnectionError
+ HTTPException, BaseSSLError,
)
from .request import RequestMethods
from .response import HTTPResponse
@@ -54,7 +55,7 @@ log = logging.getLogger(__name__)
_Default = object()
-## Pool objects
+# Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
@@ -68,8 +69,7 @@ class ConnectionPool(object):
if not host:
raise LocationValueError("No host specified.")
- # httplib doesn't like it when we include brackets in ipv6 addresses
- self.host = host.strip('[]')
+ self.host = host
self.port = port
def __str__(self):
@@ -645,22 +645,24 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
return response
log.info("Redirecting %s -> %s" % (url, redirect_location))
- return self.urlopen(method, redirect_location, body, headers,
- retries=retries, redirect=redirect,
- assert_same_host=assert_same_host,
- timeout=timeout, pool_timeout=pool_timeout,
- release_conn=release_conn, **response_kw)
+ return self.urlopen(
+ method, redirect_location, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
# Check if we should retry the HTTP response.
if retries.is_forced_retry(method, status_code=response.status):
retries = retries.increment(method, url, response=response, _pool=self)
retries.sleep()
log.info("Forced retry: %s" % url)
- return self.urlopen(method, url, body, headers,
- retries=retries, redirect=redirect,
- assert_same_host=assert_same_host,
- timeout=timeout, pool_timeout=pool_timeout,
- release_conn=release_conn, **response_kw)
+ return self.urlopen(
+ method, url, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
return response
diff --git a/requests/packages/urllib3/contrib/appengine.py b/requests/packages/urllib3/contrib/appengine.py
index ed9d8b8..884cdb2 100644
--- a/requests/packages/urllib3/contrib/appengine.py
+++ b/requests/packages/urllib3/contrib/appengine.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import logging
import os
import warnings
@@ -60,7 +61,7 @@ class AppEngineManager(RequestMethods):
raise AppEnginePlatformError(
"URLFetch is not available in this environment.")
- if is_prod_appengine_v2():
+ if is_prod_appengine_mvms():
raise AppEnginePlatformError(
"Use normal urllib3.PoolManager instead of AppEngineManager"
"on Managed VMs, as using URLFetch is not necessary in "
@@ -108,14 +109,14 @@ class AppEngineManager(RequestMethods):
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
- if 'too large' in e.message:
+ if 'too large' in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
"supports requests up to 10mb in size.", e)
raise ProtocolError(e)
except urlfetch.DownloadError as e:
- if 'Too many redirects' in e.message:
+ if 'Too many redirects' in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
@@ -155,7 +156,7 @@ class AppEngineManager(RequestMethods):
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
- if is_prod_appengine_v1():
+ if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
content_encoding = urlfetch_resp.headers.get('content-encoding')
@@ -176,7 +177,7 @@ class AppEngineManager(RequestMethods):
if timeout is Timeout.DEFAULT_TIMEOUT:
return 5 # 5s is the default timeout for URLFetch.
if isinstance(timeout, Timeout):
- if not timeout.read is timeout.connect:
+ if timeout.read is not timeout.connect:
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total timeout.", AppEnginePlatformWarning)
@@ -199,12 +200,12 @@ class AppEngineManager(RequestMethods):
def is_appengine():
return (is_local_appengine() or
- is_prod_appengine_v1() or
- is_prod_appengine_v2())
+ is_prod_appengine() or
+ is_prod_appengine_mvms())
def is_appengine_sandbox():
- return is_appengine() and not is_prod_appengine_v2()
+ return is_appengine() and not is_prod_appengine_mvms()
def is_local_appengine():
@@ -212,11 +213,11 @@ def is_local_appengine():
'Development/' in os.environ['SERVER_SOFTWARE'])
-def is_prod_appengine_v1():
+def is_prod_appengine():
return ('APPENGINE_RUNTIME' in os.environ and
'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
- not is_prod_appengine_v2())
+ not is_prod_appengine_mvms())
-def is_prod_appengine_v2():
+def is_prod_appengine_mvms():
return os.environ.get('GAE_VM', False) == 'true'
diff --git a/requests/packages/urllib3/contrib/ntlmpool.py b/requests/packages/urllib3/contrib/ntlmpool.py
index c6b266f..c136a23 100644
--- a/requests/packages/urllib3/contrib/ntlmpool.py
+++ b/requests/packages/urllib3/contrib/ntlmpool.py
@@ -3,6 +3,7 @@ NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
+from __future__ import absolute_import
try:
from http.client import HTTPSConnection
diff --git a/requests/packages/urllib3/contrib/pyopenssl.py b/requests/packages/urllib3/contrib/pyopenssl.py
index c20ae46..5996153 100644
--- a/requests/packages/urllib3/contrib/pyopenssl.py
+++ b/requests/packages/urllib3/contrib/pyopenssl.py
@@ -43,6 +43,7 @@ Module Variables
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
'''
+from __future__ import absolute_import
try:
from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
@@ -53,7 +54,7 @@ except SyntaxError as e:
import OpenSSL.SSL
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.type import univ, constraint
-from socket import _fileobject, timeout
+from socket import _fileobject, timeout, error as SocketError
import ssl
import select
@@ -71,6 +72,12 @@ _openssl_versions = {
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
+if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+
+if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
+
try:
_openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
except AttributeError:
@@ -79,8 +86,8 @@ except AttributeError:
_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
- ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
- + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+ ssl.CERT_REQUIRED:
+ OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
@@ -88,12 +95,6 @@ DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
# OpenSSL will only write 16K at a time
SSL_WRITE_BLOCKSIZE = 16384
-try:
- _ = memoryview
- has_memoryview = True
-except NameError:
- has_memoryview = False
-
orig_util_HAS_SNI = util.HAS_SNI
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
@@ -112,7 +113,7 @@ def extract_from_urllib3():
util.HAS_SNI = orig_util_HAS_SNI
-### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
+# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
class SubjectAltName(BaseSubjectAltName):
'''ASN.1 implementation for subjectAltNames support'''
@@ -123,7 +124,7 @@ class SubjectAltName(BaseSubjectAltName):
constraint.ValueSizeConstraint(1, 1024)
-### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
+# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
def get_subj_alt_name(peer_cert):
# Search through extensions
dns_name = []
@@ -181,7 +182,7 @@ class WrappedSocket(object):
if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
return b''
else:
- raise
+ raise SocketError(e)
except OpenSSL.SSL.ZeroReturnError as e:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return b''
@@ -212,12 +213,9 @@ class WrappedSocket(object):
continue
def sendall(self, data):
- if has_memoryview and not isinstance(data, memoryview):
- data = memoryview(data)
-
total_sent = 0
while total_sent < len(data):
- sent = self._send_until_done(data[total_sent:total_sent+SSL_WRITE_BLOCKSIZE])
+ sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
total_sent += sent
def shutdown(self):
@@ -226,7 +224,10 @@ class WrappedSocket(object):
def close(self):
if self._makefile_refs < 1:
- return self.connection.close()
+ try:
+ return self.connection.close()
+ except OpenSSL.SSL.Error:
+ return
else:
self._makefile_refs -= 1
diff --git a/requests/packages/urllib3/exceptions.py b/requests/packages/urllib3/exceptions.py
index 9607d65..8e07eb6 100644
--- a/requests/packages/urllib3/exceptions.py
+++ b/requests/packages/urllib3/exceptions.py
@@ -1,16 +1,17 @@
+from __future__ import absolute_import
+# Base Exceptions
-## Base Exceptions
class HTTPError(Exception):
"Base exception used by this module."
pass
+
class HTTPWarning(Warning):
"Base warning used by this module."
pass
-
class PoolError(HTTPError):
"Base exception for errors caused within a pool."
def __init__(self, pool, message):
@@ -57,7 +58,7 @@ class ProtocolError(HTTPError):
ConnectionError = ProtocolError
-## Leaf Exceptions
+# Leaf Exceptions
class MaxRetryError(RequestError):
"""Raised when the maximum number of retries is exceeded.
@@ -112,10 +113,12 @@ class ConnectTimeoutError(TimeoutError):
"Raised when a socket timeout occurs while connecting to a server"
pass
+
class NewConnectionError(ConnectTimeoutError, PoolError):
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
pass
+
class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
@@ -172,6 +175,11 @@ class InsecurePlatformWarning(SecurityWarning):
pass
+class SNIMissingWarning(HTTPWarning):
+ "Warned when making a HTTPS request without SNI available."
+ pass
+
+
class ResponseNotChunked(ProtocolError, ValueError):
"Response needs to be chunked in order to read it as chunks."
pass
diff --git a/requests/packages/urllib3/fields.py b/requests/packages/urllib3/fields.py
index c853f8d..c7d4811 100644
--- a/requests/packages/urllib3/fields.py
+++ b/requests/packages/urllib3/fields.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import email.utils
import mimetypes
diff --git a/requests/packages/urllib3/filepost.py b/requests/packages/urllib3/filepost.py
index 0fbf488..97a2843 100644
--- a/requests/packages/urllib3/filepost.py
+++ b/requests/packages/urllib3/filepost.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import codecs
from uuid import uuid4
diff --git a/requests/packages/urllib3/packages/__init__.py b/requests/packages/urllib3/packages/__init__.py
index 37e8351..170e974 100644
--- a/requests/packages/urllib3/packages/__init__.py
+++ b/requests/packages/urllib3/packages/__init__.py
@@ -2,3 +2,4 @@ from __future__ import absolute_import
from . import ssl_match_hostname
+__all__ = ('ssl_match_hostname', )
diff --git a/requests/packages/urllib3/poolmanager.py b/requests/packages/urllib3/poolmanager.py
index 76b6a12..f13e673 100644
--- a/requests/packages/urllib3/poolmanager.py
+++ b/requests/packages/urllib3/poolmanager.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import logging
try: # Python 3
@@ -25,7 +26,7 @@ pool_classes_by_scheme = {
log = logging.getLogger(__name__)
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
- 'ssl_version')
+ 'ssl_version', 'ca_cert_dir')
class PoolManager(RequestMethods):
diff --git a/requests/packages/urllib3/request.py b/requests/packages/urllib3/request.py
index a1a12bc..d5aa62d 100644
--- a/requests/packages/urllib3/request.py
+++ b/requests/packages/urllib3/request.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
try:
from urllib.parse import urlencode
except ImportError:
@@ -133,7 +134,8 @@ class RequestMethods(object):
if fields:
if 'body' in urlopen_kw:
- raise TypeError('request got values for both \'fields\' and \'body\', can only specify one.')
+ raise TypeError(
+ "request got values for both 'fields' and 'body', can only specify one.")
if encode_multipart:
body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
diff --git a/requests/packages/urllib3/response.py b/requests/packages/urllib3/response.py
index 788eb6c..8f2a1b5 100644
--- a/requests/packages/urllib3/response.py
+++ b/requests/packages/urllib3/response.py
@@ -1,7 +1,9 @@
+from __future__ import absolute_import
from contextlib import contextmanager
import zlib
import io
from socket import timeout as SocketTimeout
+from socket import error as SocketError
from ._collections import HTTPHeaderDict
from .exceptions import (
@@ -130,8 +132,8 @@ class HTTPResponse(io.IOBase):
if "chunked" in encodings:
self.chunked = True
- # We certainly don't want to preload content when the response is chunked.
- if not self.chunked and preload_content and not self._body:
+ # If requested, preload the body.
+ if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
@@ -194,12 +196,22 @@ class HTTPResponse(io.IOBase):
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding, e)
- if flush_decoder and decode_content and self._decoder:
- buf = self._decoder.decompress(binary_type())
- data += buf + self._decoder.flush()
+ if flush_decoder and decode_content:
+ data += self._flush_decoder()
return data
+ def _flush_decoder(self):
+ """
+ Flushes the decoder. Should only be called if the decoder is actually
+ being used.
+ """
+ if self._decoder:
+ buf = self._decoder.decompress(b'')
+ return buf + self._decoder.flush()
+
+ return b''
+
@contextmanager
def _error_catcher(self):
"""
@@ -227,15 +239,22 @@ class HTTPResponse(io.IOBase):
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
- except HTTPException as e:
+ except (HTTPException, SocketError) as e:
# This includes IncompleteRead.
raise ProtocolError('Connection broken: %r' % e, e)
+
except Exception:
# The response may not be closed but we're not going to use it anymore
# so close it now to ensure that the connection is released back to the pool.
if self._original_response and not self._original_response.isclosed():
self._original_response.close()
+ # Closing the response may not actually be sufficient to close
+ # everything, so if we have a hold of the connection close that
+ # too.
+ if self._connection is not None:
+ self._connection.close()
+
raise
finally:
if self._original_response and self._original_response.isclosed():
@@ -301,7 +320,6 @@ class HTTPResponse(io.IOBase):
return data
-
def stream(self, amt=2**16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
@@ -340,9 +358,9 @@ class HTTPResponse(io.IOBase):
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
- if PY3: # Python 3
+ if PY3: # Python 3
headers = HTTPHeaderDict(headers.items())
- else: # Python 2
+ else: # Python 2
headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute
@@ -454,7 +472,8 @@ class HTTPResponse(io.IOBase):
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
- raise ResponseNotChunked("Response is not chunked. "
+ raise ResponseNotChunked(
+ "Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing.")
# Don't bother reading the body of a HEAD request.
@@ -468,8 +487,18 @@ class HTTPResponse(io.IOBase):
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
- yield self._decode(chunk, decode_content=decode_content,
- flush_decoder=True)
+ decoded = self._decode(chunk, decode_content=decode_content,
+ flush_decoder=False)
+ if decoded:
+ yield decoded
+
+ if decode_content:
+ # On CPython and PyPy, we should never need to flush the
+ # decoder. However, on Jython we *might* need to, so
+ # lets defensively do it anyway.
+ decoded = self._flush_decoder()
+ if decoded: # Platform-specific: Jython.
+ yield decoded
# Chunk content ends with \r\n: discard it.
while True:
diff --git a/requests/packages/urllib3/util/__init__.py b/requests/packages/urllib3/util/__init__.py
index 8becc81..c6c6243 100644
--- a/requests/packages/urllib3/util/__init__.py
+++ b/requests/packages/urllib3/util/__init__.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
# For backwards compatibility, provide imports that used to be here.
from .connection import is_connection_dropped
from .request import make_headers
@@ -22,3 +23,22 @@ from .url import (
split_first,
Url,
)
+
+__all__ = (
+ 'HAS_SNI',
+ 'SSLContext',
+ 'Retry',
+ 'Timeout',
+ 'Url',
+ 'assert_fingerprint',
+ 'current_time',
+ 'is_connection_dropped',
+ 'is_fp_closed',
+ 'get_host',
+ 'parse_url',
+ 'make_headers',
+ 'resolve_cert_reqs',
+ 'resolve_ssl_version',
+ 'split_first',
+ 'ssl_wrap_socket',
+)
diff --git a/requests/packages/urllib3/util/connection.py b/requests/packages/urllib3/util/connection.py
index 4f2f0f1..01a4812 100644
--- a/requests/packages/urllib3/util/connection.py
+++ b/requests/packages/urllib3/util/connection.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import socket
try:
from select import poll, POLLIN
diff --git a/requests/packages/urllib3/util/request.py b/requests/packages/urllib3/util/request.py
index bc64f6b..7377931 100644
--- a/requests/packages/urllib3/util/request.py
+++ b/requests/packages/urllib3/util/request.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
from base64 import b64encode
from ..packages.six import b
diff --git a/requests/packages/urllib3/util/response.py b/requests/packages/urllib3/util/response.py
index 2c1de15..bc72327 100644
--- a/requests/packages/urllib3/util/response.py
+++ b/requests/packages/urllib3/util/response.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
from ..packages.six.moves import http_client as httplib
from ..exceptions import HeaderParsingError
@@ -44,7 +45,7 @@ def assert_header_parsing(headers):
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
- raise TypeError('expected httplib.Message, got {}.'.format(
+ raise TypeError('expected httplib.Message, got {0}.'.format(
type(headers)))
defects = getattr(headers, 'defects', None)
diff --git a/requests/packages/urllib3/util/retry.py b/requests/packages/urllib3/util/retry.py
index 1fb1f23..03a0124 100644
--- a/requests/packages/urllib3/util/retry.py
+++ b/requests/packages/urllib3/util/retry.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import time
import logging
@@ -126,7 +127,7 @@ class Retry(object):
self.method_whitelist = method_whitelist
self.backoff_factor = backoff_factor
self.raise_on_redirect = raise_on_redirect
- self._observed_errors = _observed_errors # TODO: use .history instead?
+ self._observed_errors = _observed_errors # TODO: use .history instead?
def new(self, **kw):
params = dict(
@@ -206,7 +207,8 @@ class Retry(object):
return min(retry_counts) < 0
- def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None):
+ def increment(self, method=None, url=None, response=None, error=None,
+ _pool=None, _stacktrace=None):
""" Return a new Retry object with incremented retry counters.
:param response: A response object, or None, if the server did not
@@ -274,7 +276,6 @@ class Retry(object):
return new_retry
-
def __repr__(self):
return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
'read={self.read}, redirect={self.redirect})').format(
diff --git a/requests/packages/urllib3/util/ssl_.py b/requests/packages/urllib3/util/ssl_.py
index 47b817e..67f8344 100644
--- a/requests/packages/urllib3/util/ssl_.py
+++ b/requests/packages/urllib3/util/ssl_.py
@@ -1,7 +1,12 @@
+from __future__ import absolute_import
+import errno
+import warnings
+import hmac
+
from binascii import hexlify, unhexlify
from hashlib import md5, sha1, sha256
-from ..exceptions import SSLError, InsecurePlatformWarning
+from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
SSLContext = None
@@ -15,8 +20,23 @@ HASHFUNC_MAP = {
64: sha256,
}
-import errno
-import warnings
+
+def _const_compare_digest_backport(a, b):
+ """
+ Compare two digests of equal length in constant time.
+
+ The digests must be of type str/bytes.
+ Returns True if the digests match, and False otherwise.
+ """
+ result = abs(len(a) - len(b))
+ for l, r in zip(bytearray(a), bytearray(b)):
+ result |= l ^ r
+ return result == 0
+
+
+_const_compare_digest = getattr(hmac, 'compare_digest',
+ _const_compare_digest_backport)
+
try: # Test for SSL features
import ssl
@@ -134,7 +154,7 @@ def assert_fingerprint(cert, fingerprint):
cert_digest = hashfunc(cert).digest()
- if cert_digest != fingerprint_bytes:
+ if not _const_compare_digest(cert_digest, fingerprint_bytes):
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
.format(fingerprint, hexlify(cert_digest)))
@@ -283,4 +303,15 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
context.load_cert_chain(certfile, keyfile)
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
return context.wrap_socket(sock, server_hostname=server_hostname)
+
+ warnings.warn(
+ 'An HTTPS request has been made, but the SNI (Subject Name '
+ 'Indication) extension to TLS is not available on this platform. '
+ 'This may cause the server to present an incorrect TLS '
+ 'certificate, which can cause validation failures. For more '
+ 'information, see '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'
+ '#snimissingwarning.',
+ SNIMissingWarning
+ )
return context.wrap_socket(sock)
diff --git a/requests/packages/urllib3/util/timeout.py b/requests/packages/urllib3/util/timeout.py
index ea7027f..ff62f47 100644
--- a/requests/packages/urllib3/util/timeout.py
+++ b/requests/packages/urllib3/util/timeout.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
@@ -9,6 +10,7 @@ from ..exceptions import TimeoutStateError
# urllib3
_Default = object()
+
def current_time():
"""
Retrieve the current time. This function is mocked out in unit testing.
@@ -226,9 +228,9 @@ class Timeout(object):
has not yet been called on this object.
"""
if (self.total is not None and
- self.total is not self.DEFAULT_TIMEOUT and
- self._read is not None and
- self._read is not self.DEFAULT_TIMEOUT):
+ self.total is not self.DEFAULT_TIMEOUT and
+ self._read is not None and
+ self._read is not self.DEFAULT_TIMEOUT):
# In case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
diff --git a/requests/packages/urllib3/util/url.py b/requests/packages/urllib3/util/url.py
index e58050c..e996204 100644
--- a/requests/packages/urllib3/util/url.py
+++ b/requests/packages/urllib3/util/url.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
from collections import namedtuple
from ..exceptions import LocationParseError
@@ -85,6 +86,7 @@ class Url(namedtuple('Url', url_attrs)):
def __str__(self):
return self.url
+
def split_first(s, delims):
"""
Given a string and an iterable of delimiters, split on the first found
@@ -115,7 +117,7 @@ def split_first(s, delims):
if min_idx is None or min_idx < 0:
return s, '', None
- return s[:min_idx], s[min_idx+1:], min_delim
+ return s[:min_idx], s[min_idx + 1:], min_delim
def parse_url(url):
@@ -206,6 +208,7 @@ def parse_url(url):
return Url(scheme, auth, host, port, path, query, fragment)
+
def get_host(url):
"""
Deprecated. Use :func:`.parse_url` instead.
diff --git a/requests/sessions.py b/requests/sessions.py
index ad63902..9eaa36a 100644
--- a/requests/sessions.py
+++ b/requests/sessions.py
@@ -273,13 +273,13 @@ class Session(SessionRedirectMixin):
>>> import requests
>>> s = requests.Session()
>>> s.get('http://httpbin.org/get')
- 200
+ <Response [200]>
Or as a context manager::
>>> with requests.Session() as s:
>>> s.get('http://httpbin.org/get')
- 200
+ <Response [200]>
"""
__attrs__ = [
@@ -325,7 +325,7 @@ class Session(SessionRedirectMixin):
#: limit, a :class:`TooManyRedirects` exception is raised.
self.max_redirects = DEFAULT_REDIRECT_LIMIT
- #: Trust environement settings for proxy configuration, default
+ #: Trust environment settings for proxy configuration, default
#: authentication and similar.
self.trust_env = True
@@ -433,8 +433,8 @@ class Session(SessionRedirectMixin):
hostname to the URL of the proxy.
:param stream: (optional) whether to immediately download the response
content. Defaults to ``False``.
- :param verify: (optional) if ``True``, the SSL cert will be verified.
- A CA_BUNDLE path can also be provided.
+ :param verify: (optional) whether the SSL cert will be verified.
+ A CA_BUNDLE path can also be provided. Defaults to ``True``.
:param cert: (optional) if String, path to ssl client cert file (.pem).
If Tuple, ('cert', 'key') pair.
"""
@@ -634,7 +634,7 @@ class Session(SessionRedirectMixin):
'cert': cert}
def get_adapter(self, url):
- """Returns the appropriate connnection adapter for the given URL."""
+ """Returns the appropriate connection adapter for the given URL."""
for (prefix, adapter) in self.adapters.items():
if url.lower().startswith(prefix):
diff --git a/requests/status_codes.py b/requests/status_codes.py
index 1db7fc0..a852574 100644
--- a/requests/status_codes.py
+++ b/requests/status_codes.py
@@ -78,6 +78,7 @@ _codes = {
507: ('insufficient_storage',),
509: ('bandwidth_limit_exceeded', 'bandwidth'),
510: ('not_extended',),
+ 511: ('network_authentication_required', 'network_auth', 'network_authentication'),
}
codes = LookupDict(name='status_codes')
diff --git a/requests/utils.py b/requests/utils.py
index 4a8c6d7..c5c3fd0 100644
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -29,7 +29,7 @@ from .compat import (quote, urlparse, bytes, str, OrderedDict, unquote, is_py2,
basestring)
from .cookies import RequestsCookieJar, cookiejar_from_dict
from .structures import CaseInsensitiveDict
-from .exceptions import InvalidURL
+from .exceptions import InvalidURL, FileModeWarning
_hush_pyflakes = (RequestsCookieJar,)
@@ -48,23 +48,44 @@ def dict_to_sequence(d):
def super_len(o):
+ total_length = 0
+ current_position = 0
+
if hasattr(o, '__len__'):
- return len(o)
+ total_length = len(o)
+
+ elif hasattr(o, 'len'):
+ total_length = o.len
- if hasattr(o, 'len'):
- return o.len
+ elif hasattr(o, 'getvalue'):
+ # e.g. BytesIO, cStringIO.StringIO
+ total_length = len(o.getvalue())
- if hasattr(o, 'fileno'):
+ elif hasattr(o, 'fileno'):
try:
fileno = o.fileno()
except io.UnsupportedOperation:
pass
else:
- return os.fstat(fileno).st_size
+ total_length = os.fstat(fileno).st_size
- if hasattr(o, 'getvalue'):
- # e.g. BytesIO, cStringIO.StringIO
- return len(o.getvalue())
+ # Having used fstat to determine the file length, we need to
+ # confirm that this file was opened up in binary mode.
+ if 'b' not in o.mode:
+ warnings.warn((
+ "Requests has determined the content-length for this "
+ "request using the binary size of the file: however, the "
+ "file has been opened in text mode (i.e. without the 'b' "
+ "flag in the mode). This may lead to an incorrect "
+ "content-length. In Requests 3.0, support will be removed "
+ "for files in text mode."),
+ FileModeWarning
+ )
+
+ if hasattr(o, 'tell'):
+ current_position = o.tell()
+
+ return max(0, total_length - current_position)
def get_netrc_auth(url, raise_errors=False):
@@ -94,8 +115,12 @@ def get_netrc_auth(url, raise_errors=False):
ri = urlparse(url)
- # Strip port numbers from netloc
- host = ri.netloc.split(':')[0]
+ # Strip port numbers from netloc. This weird `if...encode`` dance is
+ # used for Python 3.2, which doesn't support unicode literals.
+ splitstr = b':'
+ if isinstance(url, str):
+ splitstr = splitstr.decode('ascii')
+ host = ri.netloc.split(splitstr)[0]
try:
_netrc = netrc(netrc_path).authenticators(host)
diff --git a/requirements.txt b/requirements.txt
index 1f5297d..ad5da76 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,4 +1,6 @@
py==1.4.30
pytest==2.8.1
pytest-cov==2.1.0
+pytest-httpbin==0.0.7
+httpbin==0.4.0
wheel
diff --git a/setup.py b/setup.py
index f98f528..b7ed12b 100755
--- a/setup.py
+++ b/setup.py
@@ -62,13 +62,13 @@ setup(
'Natural Language :: English',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python',
- 'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
- 'Programming Language :: Python :: 3.4'
+ 'Programming Language :: Python :: 3.4',
+ 'Programming Language :: Python :: 3.5',
),
extras_require={
- 'security': ['pyOpenSSL', 'ndg-httpsclient', 'pyasn1'],
+ 'security': ['pyOpenSSL>=0.13', 'ndg-httpsclient', 'pyasn1'],
},
)
diff --git a/test_requests.py b/test_requests.py
index c5ac4f8..0795241 100755
--- a/test_requests.py
+++ b/test_requests.py
@@ -48,20 +48,33 @@ else:
return s.decode('unicode-escape')
-# Requests to this URL should always fail with a connection timeout (nothing
-# listening on that port)
-TARPIT = "http://10.255.255.1"
-HTTPBIN = os.environ.get('HTTPBIN_URL', 'http://httpbin.org/')
-# Issue #1483: Make sure the URL always has a trailing slash
-HTTPBIN = HTTPBIN.rstrip('/') + '/'
+@pytest.fixture
+def httpbin(httpbin):
+ # Issue #1483: Make sure the URL always has a trailing slash
+ httpbin_url = httpbin.url.rstrip('/') + '/'
+
+ def inner(*suffix):
+ return urljoin(httpbin_url, '/'.join(suffix))
+
+ return inner
+
+
+@pytest.fixture
+def httpsbin_url(httpbin_secure):
+ # Issue #1483: Make sure the URL always has a trailing slash
+ httpbin_url = httpbin_secure.url.rstrip('/') + '/'
+ def inner(*suffix):
+ return urljoin(httpbin_url, '/'.join(suffix))
-def httpbin(*suffix):
- """Returns url for HTTPBIN resource."""
- return urljoin(HTTPBIN, '/'.join(suffix))
+ return inner
-class RequestsTestCase(unittest.TestCase):
+# Requests to this URL should always fail with a connection timeout (nothing
+# listening on that port)
+TARPIT = "http://10.255.255.1"
+
+class TestRequests(object):
_multiprocess_can_split_ = True
@@ -105,13 +118,13 @@ class RequestsTestCase(unittest.TestCase):
assert pr.url == req.url
assert pr.body == 'life=42'
- def test_no_content_length(self):
+ def test_no_content_length(self, httpbin):
get_req = requests.Request('GET', httpbin('get')).prepare()
assert 'Content-Length' not in get_req.headers
head_req = requests.Request('HEAD', httpbin('head')).prepare()
assert 'Content-Length' not in head_req.headers
- def test_override_content_length(self):
+ def test_override_content_length(self, httpbin):
headers = {
'Content-Length': 'not zero'
}
@@ -139,19 +152,28 @@ class RequestsTestCase(unittest.TestCase):
prep = session.prepare_request(request)
assert prep.url == 'http://example.com/?z=1&a=1&k=1&d=1'
- def test_mixed_case_scheme_acceptable(self):
+ def test_params_bytes_are_encoded(self):
+ request = requests.Request('GET', 'http://example.com',
+ params=b'test=foo').prepare()
+ assert request.url == 'http://example.com/?test=foo'
+
+ def test_binary_put(self):
+ request = requests.Request('PUT', 'http://example.com',
+ data=u"ööö".encode("utf-8")).prepare()
+ assert isinstance(request.body, bytes)
+
+ def test_mixed_case_scheme_acceptable(self, httpbin):
s = requests.Session()
s.proxies = getproxies()
parts = urlparse(httpbin('get'))
- schemes = ['http://', 'HTTP://', 'hTTp://', 'HttP://',
- 'https://', 'HTTPS://', 'hTTps://', 'HttPs://']
+ schemes = ['http://', 'HTTP://', 'hTTp://', 'HttP://']
for scheme in schemes:
url = scheme + parts.netloc + parts.path
r = requests.Request('GET', url)
r = s.send(r.prepare())
assert r.status_code == 200, 'failed for scheme {0}'.format(scheme)
- def test_HTTP_200_OK_GET_ALTERNATIVE(self):
+ def test_HTTP_200_OK_GET_ALTERNATIVE(self, httpbin):
r = requests.Request('GET', httpbin('get'))
s = requests.Session()
s.proxies = getproxies()
@@ -160,7 +182,7 @@ class RequestsTestCase(unittest.TestCase):
assert r.status_code == 200
- def test_HTTP_302_ALLOW_REDIRECT_GET(self):
+ def test_HTTP_302_ALLOW_REDIRECT_GET(self, httpbin):
r = requests.get(httpbin('redirect', '1'))
assert r.status_code == 200
assert r.history[0].status_code == 302
@@ -170,7 +192,7 @@ class RequestsTestCase(unittest.TestCase):
# r = requests.post(httpbin('status', '302'), data={'some': 'data'})
# self.assertEqual(r.status_code, 200)
- def test_HTTP_200_OK_GET_WITH_PARAMS(self):
+ def test_HTTP_200_OK_GET_WITH_PARAMS(self, httpbin):
heads = {'User-agent': 'Mozilla/5.0'}
r = requests.get(httpbin('user-agent'), headers=heads)
@@ -178,25 +200,25 @@ class RequestsTestCase(unittest.TestCase):
assert heads['User-agent'] in r.text
assert r.status_code == 200
- def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self):
+ def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self, httpbin):
heads = {'User-agent': 'Mozilla/5.0'}
r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)
assert r.status_code == 200
- def test_set_cookie_on_301(self):
+ def test_set_cookie_on_301(self, httpbin):
s = requests.session()
url = httpbin('cookies/set?foo=bar')
s.get(url)
assert s.cookies['foo'] == 'bar'
- def test_cookie_sent_on_redirect(self):
+ def test_cookie_sent_on_redirect(self, httpbin):
s = requests.session()
s.get(httpbin('cookies/set?foo=bar'))
r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')
assert 'Cookie' in r.json()['headers']
- def test_cookie_removed_on_expire(self):
+ def test_cookie_removed_on_expire(self, httpbin):
s = requests.session()
s.get(httpbin('cookies/set?foo=bar'))
assert s.cookies['foo'] == 'bar'
@@ -209,18 +231,18 @@ class RequestsTestCase(unittest.TestCase):
)
assert 'foo' not in s.cookies
- def test_cookie_quote_wrapped(self):
+ def test_cookie_quote_wrapped(self, httpbin):
s = requests.session()
s.get(httpbin('cookies/set?foo="bar:baz"'))
assert s.cookies['foo'] == '"bar:baz"'
- def test_cookie_persists_via_api(self):
+ def test_cookie_persists_via_api(self, httpbin):
s = requests.session()
r = s.get(httpbin('redirect/1'), cookies={'foo': 'bar'})
assert 'foo' in r.request.headers['Cookie']
assert 'foo' in r.history[0].request.headers['Cookie']
- def test_request_cookie_overrides_session_cookie(self):
+ def test_request_cookie_overrides_session_cookie(self, httpbin):
s = requests.session()
s.cookies['foo'] = 'bar'
r = s.get(httpbin('cookies'), cookies={'foo': 'baz'})
@@ -228,13 +250,13 @@ class RequestsTestCase(unittest.TestCase):
# Session cookie should not be modified
assert s.cookies['foo'] == 'bar'
- def test_request_cookies_not_persisted(self):
+ def test_request_cookies_not_persisted(self, httpbin):
s = requests.session()
s.get(httpbin('cookies'), cookies={'foo': 'baz'})
# Sending a request with cookies should not add cookies to the session
assert not s.cookies
- def test_generic_cookiejar_works(self):
+ def test_generic_cookiejar_works(self, httpbin):
cj = cookielib.CookieJar()
cookiejar_from_dict({'foo': 'bar'}, cj)
s = requests.session()
@@ -245,7 +267,7 @@ class RequestsTestCase(unittest.TestCase):
# Make sure the session cj is still the custom one
assert s.cookies is cj
- def test_param_cookiejar_works(self):
+ def test_param_cookiejar_works(self, httpbin):
cj = cookielib.CookieJar()
cookiejar_from_dict({'foo': 'bar'}, cj)
s = requests.session()
@@ -253,13 +275,13 @@ class RequestsTestCase(unittest.TestCase):
# Make sure the cookie was sent
assert r.json()['cookies']['foo'] == 'bar'
- def test_requests_in_history_are_not_overridden(self):
+ def test_requests_in_history_are_not_overridden(self, httpbin):
resp = requests.get(httpbin('redirect/3'))
urls = [r.url for r in resp.history]
req_urls = [r.request.url for r in resp.history]
assert urls == req_urls
- def test_history_is_always_a_list(self):
+ def test_history_is_always_a_list(self, httpbin):
"""
Show that even with redirects, Response.history is always a list.
"""
@@ -269,7 +291,7 @@ class RequestsTestCase(unittest.TestCase):
assert isinstance(resp.history, list)
assert not isinstance(resp.history, tuple)
- def test_headers_on_session_with_None_are_not_sent(self):
+ def test_headers_on_session_with_None_are_not_sent(self, httpbin):
"""Do not send headers in Session.headers with None values."""
ses = requests.Session()
ses.headers['Accept-Encoding'] = None
@@ -277,7 +299,7 @@ class RequestsTestCase(unittest.TestCase):
prep = ses.prepare_request(req)
assert 'Accept-Encoding' not in prep.headers
- def test_user_agent_transfers(self):
+ def test_user_agent_transfers(self, httpbin):
heads = {
'User-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)'
@@ -293,15 +315,15 @@ class RequestsTestCase(unittest.TestCase):
r = requests.get(httpbin('user-agent'), headers=heads)
assert heads['user-agent'] in r.text
- def test_HTTP_200_OK_HEAD(self):
+ def test_HTTP_200_OK_HEAD(self, httpbin):
r = requests.head(httpbin('get'))
assert r.status_code == 200
- def test_HTTP_200_OK_PUT(self):
+ def test_HTTP_200_OK_PUT(self, httpbin):
r = requests.put(httpbin('put'))
assert r.status_code == 200
- def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self):
+ def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self, httpbin):
auth = ('user', 'pass')
url = httpbin('basic-auth', 'user', 'pass')
@@ -324,42 +346,47 @@ class RequestsTestCase(unittest.TestCase):
def test_connection_error_invalid_port(self):
"""Connecting to an invalid port should raise a ConnectionError"""
with pytest.raises(ConnectionError):
- requests.get("http://httpbin.org:1", timeout=1)
+ requests.get("http://localhost:1", timeout=1)
def test_LocationParseError(self):
"""Inputing a URL that cannot be parsed should raise an InvalidURL error"""
with pytest.raises(InvalidURL):
requests.get("http://fe80::5054:ff:fe5a:fc0")
- def test_basicauth_with_netrc(self):
+ def test_basicauth_with_netrc(self, httpbin):
auth = ('user', 'pass')
wrong_auth = ('wronguser', 'wrongpass')
url = httpbin('basic-auth', 'user', 'pass')
- def get_netrc_auth_mock(url):
- return auth
- requests.sessions.get_netrc_auth = get_netrc_auth_mock
+ old_auth = requests.sessions.get_netrc_auth
- # Should use netrc and work.
- r = requests.get(url)
- assert r.status_code == 200
+ try:
+ def get_netrc_auth_mock(url):
+ return auth
+ requests.sessions.get_netrc_auth = get_netrc_auth_mock
- # Given auth should override and fail.
- r = requests.get(url, auth=wrong_auth)
- assert r.status_code == 401
+ # Should use netrc and work.
+ r = requests.get(url)
+ assert r.status_code == 200
- s = requests.session()
+ # Given auth should override and fail.
+ r = requests.get(url, auth=wrong_auth)
+ assert r.status_code == 401
- # Should use netrc and work.
- r = s.get(url)
- assert r.status_code == 200
+ s = requests.session()
- # Given auth should override and fail.
- s.auth = wrong_auth
- r = s.get(url)
- assert r.status_code == 401
+ # Should use netrc and work.
+ r = s.get(url)
+ assert r.status_code == 200
+
+ # Given auth should override and fail.
+ s.auth = wrong_auth
+ r = s.get(url)
+ assert r.status_code == 401
+ finally:
+ requests.sessions.get_netrc_auth = old_auth
- def test_DIGEST_HTTP_200_OK_GET(self):
+ def test_DIGEST_HTTP_200_OK_GET(self, httpbin):
auth = HTTPDigestAuth('user', 'pass')
url = httpbin('digest-auth', 'auth', 'user', 'pass')
@@ -375,7 +402,7 @@ class RequestsTestCase(unittest.TestCase):
r = s.get(url)
assert r.status_code == 200
- def test_DIGEST_AUTH_RETURNS_COOKIE(self):
+ def test_DIGEST_AUTH_RETURNS_COOKIE(self, httpbin):
url = httpbin('digest-auth', 'auth', 'user', 'pass')
auth = HTTPDigestAuth('user', 'pass')
r = requests.get(url)
@@ -384,14 +411,14 @@ class RequestsTestCase(unittest.TestCase):
r = requests.get(url, auth=auth)
assert r.status_code == 200
- def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self):
+ def test_DIGEST_AUTH_SETS_SESSION_COOKIES(self, httpbin):
url = httpbin('digest-auth', 'auth', 'user', 'pass')
auth = HTTPDigestAuth('user', 'pass')
s = requests.Session()
s.get(url, auth=auth)
assert s.cookies['fake'] == 'fake_value'
- def test_DIGEST_STREAM(self):
+ def test_DIGEST_STREAM(self, httpbin):
auth = HTTPDigestAuth('user', 'pass')
url = httpbin('digest-auth', 'auth', 'user', 'pass')
@@ -402,7 +429,7 @@ class RequestsTestCase(unittest.TestCase):
r = requests.get(url, auth=auth, stream=False)
assert r.raw.read() == b''
- def test_DIGESTAUTH_WRONG_HTTP_401_GET(self):
+ def test_DIGESTAUTH_WRONG_HTTP_401_GET(self, httpbin):
auth = HTTPDigestAuth('user', 'wrongpass')
url = httpbin('digest-auth', 'auth', 'user', 'pass')
@@ -418,7 +445,7 @@ class RequestsTestCase(unittest.TestCase):
r = s.get(url)
assert r.status_code == 401
- def test_DIGESTAUTH_QUOTES_QOP_VALUE(self):
+ def test_DIGESTAUTH_QUOTES_QOP_VALUE(self, httpbin):
auth = HTTPDigestAuth('user', 'pass')
url = httpbin('digest-auth', 'auth', 'user', 'pass')
@@ -426,22 +453,7 @@ class RequestsTestCase(unittest.TestCase):
r = requests.get(url, auth=auth)
assert '"auth"' in r.request.headers['Authorization']
- def test_DIGESTAUTH_THREADED(self):
-
- auth = HTTPDigestAuth('user', 'pass')
- url = httpbin('digest-auth', 'auth', 'user', 'pass')
- session = requests.Session()
- session.auth=auth
-
- def do_request(i):
- r = session.get(url)
- assert '"auth"' in r.request.headers['Authorization']
- return 1
- if ThreadPool is not None:
- pool = ThreadPool(processes=50)
- pool.map(do_request, range(100))
-
- def test_POSTBIN_GET_POST_FILES(self):
+ def test_POSTBIN_GET_POST_FILES(self, httpbin):
url = httpbin('post')
post1 = requests.post(url).raise_for_status()
@@ -459,7 +471,7 @@ class RequestsTestCase(unittest.TestCase):
with pytest.raises(ValueError):
requests.post(url, files=['bad file data'])
- def test_POSTBIN_GET_POST_FILES_WITH_DATA(self):
+ def test_POSTBIN_GET_POST_FILES_WITH_DATA(self, httpbin):
url = httpbin('post')
post1 = requests.post(url).raise_for_status()
@@ -478,17 +490,17 @@ class RequestsTestCase(unittest.TestCase):
with pytest.raises(ValueError):
requests.post(url, files=['bad file data'])
- def test_conflicting_post_params(self):
+ def test_conflicting_post_params(self, httpbin):
url = httpbin('post')
with open('requirements.txt') as f:
pytest.raises(ValueError, "requests.post(url, data='[{\"some\": \"data\"}]', files={'some': f})")
pytest.raises(ValueError, "requests.post(url, data=u('[{\"some\": \"data\"}]'), files={'some': f})")
- def test_request_ok_set(self):
+ def test_request_ok_set(self, httpbin):
r = requests.get(httpbin('status', '404'))
assert not r.ok
- def test_status_raising(self):
+ def test_status_raising(self, httpbin):
r = requests.get(httpbin('status', '404'))
with pytest.raises(requests.exceptions.HTTPError):
r.raise_for_status()
@@ -496,11 +508,11 @@ class RequestsTestCase(unittest.TestCase):
r = requests.get(httpbin('status', '500'))
assert not r.ok
- def test_decompress_gzip(self):
+ def test_decompress_gzip(self, httpbin):
r = requests.get(httpbin('gzip'))
r.content.decode('ascii')
- def test_unicode_get(self):
+ def test_unicode_get(self, httpbin):
url = httpbin('/get')
requests.get(url, params={'foo': 'føø'})
requests.get(url, params={'føø': 'føø'})
@@ -508,29 +520,29 @@ class RequestsTestCase(unittest.TestCase):
requests.get(url, params={'foo': 'foo'})
requests.get(httpbin('ø'), params={'foo': 'foo'})
- def test_unicode_header_name(self):
+ def test_unicode_header_name(self, httpbin):
requests.put(
httpbin('put'),
headers={str('Content-Type'): 'application/octet-stream'},
data='\xff') # compat.str is unicode.
- def test_pyopenssl_redirect(self):
- requests.get('https://httpbin.org/status/301')
+ def test_pyopenssl_redirect(self, httpsbin_url, httpbin_ca_bundle):
+ requests.get(httpsbin_url('status', '301'), verify=httpbin_ca_bundle)
- def test_urlencoded_get_query_multivalued_param(self):
+ def test_urlencoded_get_query_multivalued_param(self, httpbin):
r = requests.get(httpbin('get'), params=dict(test=['foo', 'baz']))
assert r.status_code == 200
assert r.url == httpbin('get?test=foo&test=baz')
- def test_different_encodings_dont_break_post(self):
+ def test_different_encodings_dont_break_post(self, httpbin):
r = requests.post(httpbin('post'),
data={'stuff': json.dumps({'a': 123})},
params={'blah': 'asdf1234'},
files={'file': ('test_requests.py', open(__file__, 'rb'))})
assert r.status_code == 200
- def test_unicode_multipart_post(self):
+ def test_unicode_multipart_post(self, httpbin):
r = requests.post(httpbin('post'),
data={'stuff': u('ëlïxr')},
files={'file': ('test_requests.py', open(__file__, 'rb'))})
@@ -551,7 +563,7 @@ class RequestsTestCase(unittest.TestCase):
files={'file': ('test_requests.py', open(__file__, 'rb'))})
assert r.status_code == 200
- def test_unicode_multipart_post_fieldnames(self):
+ def test_unicode_multipart_post_fieldnames(self, httpbin):
filename = os.path.splitext(__file__)[0] + '.py'
r = requests.Request(method='POST',
url=httpbin('post'),
@@ -562,13 +574,13 @@ class RequestsTestCase(unittest.TestCase):
assert b'name="stuff"' in prep.body
assert b'name="b\'stuff\'"' not in prep.body
- def test_unicode_method_name(self):
+ def test_unicode_method_name(self, httpbin):
files = {'file': open('test_requests.py', 'rb')}
r = requests.request(
method=u('POST'), url=httpbin('post'), files=files)
assert r.status_code == 200
- def test_unicode_method_name_with_request_object(self):
+ def test_unicode_method_name_with_request_object(self, httpbin):
files = {'file': open('test_requests.py', 'rb')}
s = requests.Session()
req = requests.Request(u("POST"), httpbin('post'), files=files)
@@ -579,7 +591,7 @@ class RequestsTestCase(unittest.TestCase):
resp = s.send(prep)
assert resp.status_code == 200
- def test_custom_content_type(self):
+ def test_custom_content_type(self, httpbin):
r = requests.post(
httpbin('post'),
data={'stuff': json.dumps({'a': 123})},
@@ -589,38 +601,38 @@ class RequestsTestCase(unittest.TestCase):
assert r.status_code == 200
assert b"text/py-content-type" in r.request.body
- def test_hook_receives_request_arguments(self):
+ def test_hook_receives_request_arguments(self, httpbin):
def hook(resp, **kwargs):
assert resp is not None
assert kwargs != {}
- requests.Request('GET', HTTPBIN, hooks={'response': hook})
+ requests.Request('GET', httpbin(), hooks={'response': hook})
- def test_session_hooks_are_used_with_no_request_hooks(self):
+ def test_session_hooks_are_used_with_no_request_hooks(self, httpbin):
hook = lambda x, *args, **kwargs: x
s = requests.Session()
s.hooks['response'].append(hook)
- r = requests.Request('GET', HTTPBIN)
+ r = requests.Request('GET', httpbin())
prep = s.prepare_request(r)
assert prep.hooks['response'] != []
assert prep.hooks['response'] == [hook]
- def test_session_hooks_are_overriden_by_request_hooks(self):
+ def test_session_hooks_are_overridden_by_request_hooks(self, httpbin):
hook1 = lambda x, *args, **kwargs: x
hook2 = lambda x, *args, **kwargs: x
assert hook1 is not hook2
s = requests.Session()
s.hooks['response'].append(hook2)
- r = requests.Request('GET', HTTPBIN, hooks={'response': [hook1]})
+ r = requests.Request('GET', httpbin(), hooks={'response': [hook1]})
prep = s.prepare_request(r)
assert prep.hooks['response'] == [hook1]
- def test_prepared_request_hook(self):
+ def test_prepared_request_hook(self, httpbin):
def hook(resp, **kwargs):
resp.hook_working = True
return resp
- req = requests.Request('GET', HTTPBIN, hooks={'response': hook})
+ req = requests.Request('GET', httpbin(), hooks={'response': hook})
prep = req.prepare()
s = requests.Session()
@@ -629,7 +641,7 @@ class RequestsTestCase(unittest.TestCase):
assert hasattr(resp, 'hook_working')
- def test_prepared_from_session(self):
+ def test_prepared_from_session(self, httpbin):
class DummyAuth(requests.auth.AuthBase):
def __call__(self, r):
r.headers['Dummy-Auth-Test'] = 'dummy-auth-test-ok'
@@ -782,7 +794,7 @@ class RequestsTestCase(unittest.TestCase):
# make sure one can use items multiple times
assert list(items) == list(items)
- def test_time_elapsed_blank(self):
+ def test_time_elapsed_blank(self, httpbin):
r = requests.get(httpbin('get'))
td = r.elapsed
total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600)
@@ -821,7 +833,7 @@ class RequestsTestCase(unittest.TestCase):
chunks = r.iter_content(decode_unicode=True)
assert all(isinstance(chunk, str) for chunk in chunks)
- def test_request_and_response_are_pickleable(self):
+ def test_request_and_response_are_pickleable(self, httpbin):
r = requests.get(httpbin('get'))
# verify we can pickle the original request
@@ -853,8 +865,8 @@ class RequestsTestCase(unittest.TestCase):
url = 'http://user:pass%23pass@complex.url.com/path?query=yes'
assert ('user', 'pass#pass') == requests.utils.get_auth_from_url(url)
- def test_cannot_send_unprepared_requests(self):
- r = requests.Request(url=HTTPBIN)
+ def test_cannot_send_unprepared_requests(self, httpbin):
+ r = requests.Request(url=httpbin())
with pytest.raises(ValueError):
requests.Session().send(r)
@@ -868,7 +880,7 @@ class RequestsTestCase(unittest.TestCase):
assert str(error) == 'message'
assert error.response == response
- def test_session_pickling(self):
+ def test_session_pickling(self, httpbin):
r = requests.Request('GET', httpbin('get'))
s = requests.Session()
@@ -878,7 +890,7 @@ class RequestsTestCase(unittest.TestCase):
r = s.send(r.prepare())
assert r.status_code == 200
- def test_fixes_1329(self):
+ def test_fixes_1329(self, httpbin):
"""
Ensure that header updates are done case-insensitively.
"""
@@ -891,7 +903,7 @@ class RequestsTestCase(unittest.TestCase):
assert headers['Accept'] == 'application/json'
assert headers['ACCEPT'] == 'application/json'
- def test_uppercase_scheme_redirect(self):
+ def test_uppercase_scheme_redirect(self, httpbin):
parts = urlparse(httpbin('html'))
url = "HTTP://" + parts.netloc + parts.path
r = requests.get(httpbin('redirect-to'), params={'url': url})
@@ -936,14 +948,14 @@ class RequestsTestCase(unittest.TestCase):
assert 'http://' in s2.adapters
assert 'https://' in s2.adapters
- def test_header_remove_is_case_insensitive(self):
+ def test_header_remove_is_case_insensitive(self, httpbin):
# From issue #1321
s = requests.Session()
s.headers['foo'] = 'bar'
r = s.get(httpbin('get'), headers={'FOO': None})
assert 'foo' not in r.request.headers
- def test_params_are_merged_case_sensitive(self):
+ def test_params_are_merged_case_sensitive(self, httpbin):
s = requests.Session()
s.params['foo'] = 'bar'
r = s.get(httpbin('get'), params={'FOO': 'bar'})
@@ -958,7 +970,7 @@ class RequestsTestCase(unittest.TestCase):
r = requests.Request('GET', url).prepare()
assert r.url == url
- def test_header_keys_are_native(self):
+ def test_header_keys_are_native(self, httpbin):
headers = {u('unicode'): 'blah', 'byte'.encode('ascii'): 'blah'}
r = requests.Request('GET', httpbin('get'), headers=headers)
p = r.prepare()
@@ -968,7 +980,7 @@ class RequestsTestCase(unittest.TestCase):
assert 'unicode' in p.headers.keys()
assert 'byte' in p.headers.keys()
- def test_can_send_nonstring_objects_with_files(self):
+ def test_can_send_nonstring_objects_with_files(self, httpbin):
data = {'a': 0.0}
files = {'b': 'foo'}
r = requests.Request('POST', httpbin('post'), data=data, files=files)
@@ -976,7 +988,7 @@ class RequestsTestCase(unittest.TestCase):
assert 'multipart/form-data' in p.headers['Content-Type']
- def test_can_send_bytes_bytearray_objects_with_files(self):
+ def test_can_send_bytes_bytearray_objects_with_files(self, httpbin):
# Test bytes:
data = {'a': 'this is a string'}
files = {'b': b'foo'}
@@ -989,7 +1001,7 @@ class RequestsTestCase(unittest.TestCase):
p = r.prepare()
assert 'multipart/form-data' in p.headers['Content-Type']
- def test_can_send_file_object_with_non_string_filename(self):
+ def test_can_send_file_object_with_non_string_filename(self, httpbin):
f = io.BytesIO()
f.name = 2
r = requests.Request('POST', httpbin('post'), files={'f': f})
@@ -997,7 +1009,7 @@ class RequestsTestCase(unittest.TestCase):
assert 'multipart/form-data' in p.headers['Content-Type']
- def test_autoset_header_values_are_native(self):
+ def test_autoset_header_values_are_native(self, httpbin):
data = 'this is a string'
length = '16'
req = requests.Request('POST', httpbin('post'), data=data)
@@ -1016,7 +1028,7 @@ class RequestsTestCase(unittest.TestCase):
preq = req.prepare()
assert test_url == preq.url
- def test_auth_is_stripped_on_redirect_off_host(self):
+ def test_auth_is_stripped_on_redirect_off_host(self, httpbin):
r = requests.get(
httpbin('redirect-to'),
params={'url': 'http://www.google.co.uk'},
@@ -1025,14 +1037,14 @@ class RequestsTestCase(unittest.TestCase):
assert r.history[0].request.headers['Authorization']
assert not r.request.headers.get('Authorization', '')
- def test_auth_is_retained_for_redirect_on_host(self):
+ def test_auth_is_retained_for_redirect_on_host(self, httpbin):
r = requests.get(httpbin('redirect/1'), auth=('user', 'pass'))
h1 = r.history[0].request.headers['Authorization']
h2 = r.request.headers['Authorization']
assert h1 == h2
- def test_manual_redirect_with_partial_body_read(self):
+ def test_manual_redirect_with_partial_body_read(self, httpbin):
s = requests.Session()
r1 = s.get(httpbin('redirect/2'), allow_redirects=False, stream=True)
assert r1.is_redirect
@@ -1065,7 +1077,7 @@ class RequestsTestCase(unittest.TestCase):
adapter.build_response = build_response
- def test_redirect_with_wrong_gzipped_header(self):
+ def test_redirect_with_wrong_gzipped_header(self, httpbin):
s = requests.Session()
url = httpbin('redirect/1')
self._patch_adapter_gzipped_redirect(s, url)
@@ -1076,7 +1088,7 @@ class RequestsTestCase(unittest.TestCase):
assert isinstance(s, builtin_str)
assert s == "Basic dGVzdDp0ZXN0"
- def test_requests_history_is_saved(self):
+ def test_requests_history_is_saved(self, httpbin):
r = requests.get(httpbin('redirect/5'))
total = r.history[-1].history
i = 0
@@ -1084,7 +1096,7 @@ class RequestsTestCase(unittest.TestCase):
assert item.history == total[0:i]
i = i + 1
- def test_json_param_post_content_type_works(self):
+ def test_json_param_post_content_type_works(self, httpbin):
r = requests.post(
httpbin('post'),
json={'life': 42}
@@ -1093,14 +1105,14 @@ class RequestsTestCase(unittest.TestCase):
assert 'application/json' in r.request.headers['Content-Type']
assert {'life': 42} == r.json()['json']
- def test_json_param_post_should_not_override_data_param(self):
- r = requests.Request(method='POST', url='http://httpbin.org/post',
+ def test_json_param_post_should_not_override_data_param(self, httpbin):
+ r = requests.Request(method='POST', url=httpbin('post'),
data={'stuff': 'elixr'},
json={'music': 'flute'})
prep = r.prepare()
assert 'stuff=elixr' == prep.body
- def test_response_iter_lines(self):
+ def test_response_iter_lines(self, httpbin):
r = requests.get(httpbin('stream/4'), stream=True)
assert r.status_code == 200
@@ -1108,17 +1120,17 @@ class RequestsTestCase(unittest.TestCase):
next(it)
assert len(list(it)) == 3
- def test_unconsumed_session_response_closes_connection(self):
+ def test_unconsumed_session_response_closes_connection(self, httpbin):
s = requests.session()
with contextlib.closing(s.get(httpbin('stream/4'), stream=True)) as response:
pass
- self.assertFalse(response._content_consumed)
- self.assertTrue(response.raw.closed)
+ assert response._content_consumed is False
+ assert response.raw.closed
@pytest.mark.xfail
- def test_response_iter_lines_reentrant(self):
+ def test_response_iter_lines_reentrant(self, httpbin):
"""Response.iter_lines() is not reentrant safe"""
r = requests.get(httpbin('stream/4'), stream=True)
assert r.status_code == 200
@@ -1344,6 +1356,13 @@ class UtilsTestCase(unittest.TestCase):
assert super_len(
cStringIO.StringIO('but some how, some way...')) == 25
+ def test_super_len_correctly_calculates_len_of_partially_read_file(self):
+ """Ensure that we handle partially consumed file like objects."""
+ from requests.utils import super_len
+ s = StringIO.StringIO()
+ s.write('foobarbogus')
+ assert super_len(s) == 0
+
def test_get_environ_proxies_ip_ranges(self):
"""Ensures that IP addresses are correctly matches with ranges
in no_proxy variable."""
@@ -1512,13 +1531,13 @@ class TestMorselToCookieMaxAge(unittest.TestCase):
class TestTimeout:
- def test_stream_timeout(self):
+ def test_stream_timeout(self, httpbin):
try:
requests.get(httpbin('delay/10'), timeout=2.0)
except requests.exceptions.Timeout as e:
assert 'Read timed out' in e.args[0].args[0]
- def test_invalid_timeout(self):
+ def test_invalid_timeout(self, httpbin):
with pytest.raises(ValueError) as e:
requests.get(httpbin('get'), timeout=(3, 4, 5))
assert '(connect, read)' in str(e)
@@ -1527,7 +1546,7 @@ class TestTimeout:
requests.get(httpbin('get'), timeout="foo")
assert 'must be an int or float' in str(e)
- def test_none_timeout(self):
+ def test_none_timeout(self, httpbin):
""" Check that you can set None as a valid timeout value.
To actually test this behavior, we'd want to check that setting the
@@ -1539,7 +1558,7 @@ class TestTimeout:
r = requests.get(httpbin('get'), timeout=None)
assert r.status_code == 200
- def test_read_timeout(self):
+ def test_read_timeout(self, httpbin):
try:
requests.get(httpbin('delay/10'), timeout=(None, 0.1))
assert False, "The recv() request should time out."
@@ -1561,7 +1580,7 @@ class TestTimeout:
except ConnectTimeout:
pass
- def test_encoded_methods(self):
+ def test_encoded_methods(self, httpbin):
"""See: https://github.com/kennethreitz/requests/issues/2316"""
r = requests.request(b'GET', httpbin('get'))
assert r.ok
@@ -1612,7 +1631,7 @@ class TestRedirects:
'proxies': {},
}
- def test_requests_are_updated_each_time(self):
+ def test_requests_are_updated_each_time(self, httpbin):
session = RedirectSession([303, 307])
prep = requests.Request('POST', httpbin('post')).prepare()
r0 = session.send(prep)
@@ -1694,7 +1713,7 @@ def test_prepare_unicode_url():
assert_copy(p, p.copy())
-def test_urllib3_retries():
+def test_urllib3_retries(httpbin):
from requests.packages.urllib3.util import Retry
s = requests.Session()
s.mount('http://', HTTPAdapter(max_retries=Retry(
@@ -1705,14 +1724,15 @@ def test_urllib3_retries():
s.get(httpbin('status/500'))
-def test_urllib3_pool_connection_closed():
+def test_urllib3_pool_connection_closed(httpbin):
s = requests.Session()
s.mount('http://', HTTPAdapter(pool_connections=0, pool_maxsize=0))
try:
s.get(httpbin('status/200'))
except ConnectionError as e:
- assert u"HTTPConnectionPool(host='httpbin.org', port=80): Pool is closed." in str(e)
+ assert u"Pool is closed." in str(e)
+
def test_vendor_aliases():
from requests.packages import urllib3
@@ -1721,5 +1741,6 @@ def test_vendor_aliases():
with pytest.raises(ImportError):
from requests.packages import webbrowser
+
if __name__ == '__main__':
unittest.main()