aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--CHANGES.rst23
-rw-r--r--CONTRIBUTORS.txt18
-rw-r--r--Makefile3
-rw-r--r--PKG-INFO25
-rw-r--r--debian/.git-dpm6
-rw-r--r--debian/patches/01_do-not-use-embedded-python-six.patch51
-rw-r--r--debian/patches/02_require-cert-verification.patch8
-rw-r--r--debian/patches/03_force_setuptools.patch4
-rw-r--r--debian/patches/04_relax_nosetests_options.patch4
-rw-r--r--debian/patches/05_avoid-embedded-ssl-match-hostname.patch13
-rw-r--r--docs/index.rst2
-rw-r--r--docs/managers.rst11
-rw-r--r--docs/security.rst73
-rw-r--r--dummyserver/certs/server.ipv6addr.crt16
-rw-r--r--dummyserver/certs/server.ipv6addr.key27
-rw-r--r--dummyserver/handlers.py11
-rwxr-xr-xdummyserver/proxy.py9
-rwxr-xr-xdummyserver/server.py6
-rw-r--r--dummyserver/testcase.py19
-rw-r--r--setup.cfg9
-rw-r--r--setup.py11
-rw-r--r--test/test_util.py30
-rw-r--r--test/with_dummyserver/test_connectionpool.py44
-rw-r--r--test/with_dummyserver/test_https.py32
-rw-r--r--test/with_dummyserver/test_poolmanager.py12
-rw-r--r--test/with_dummyserver/test_proxy_poolmanager.py8
-rw-r--r--test/with_dummyserver/test_socketlevel.py71
-rw-r--r--urllib3/__init__.py32
-rw-r--r--urllib3/_collections.py7
-rw-r--r--urllib3/connection.py39
-rw-r--r--urllib3/connectionpool.py34
-rw-r--r--urllib3/contrib/appengine.py23
-rw-r--r--urllib3/contrib/ntlmpool.py1
-rw-r--r--urllib3/contrib/pyopenssl.py35
-rw-r--r--urllib3/exceptions.py14
-rw-r--r--urllib3/fields.py1
-rw-r--r--urllib3/filepost.py1
-rw-r--r--urllib3/packages/__init__.py2
-rw-r--r--urllib3/poolmanager.py3
-rw-r--r--urllib3/request.py4
-rw-r--r--urllib3/response.py53
-rw-r--r--urllib3/util/__init__.py20
-rw-r--r--urllib3/util/connection.py1
-rw-r--r--urllib3/util/request.py1
-rw-r--r--urllib3/util/response.py4
-rw-r--r--urllib3/util/retry.py7
-rw-r--r--urllib3/util/ssl_.py39
-rw-r--r--urllib3/util/timeout.py8
-rw-r--r--urllib3/util/url.py5
49 files changed, 668 insertions, 212 deletions
diff --git a/CHANGES.rst b/CHANGES.rst
index ea10bb7..674aa35 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -1,6 +1,29 @@
Changes
=======
+1.13.1 (2015-12-18)
++++++++++++++++++++
+
+* Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
+
+
+1.13 (2015-12-14)
++++++++++++++++++
+
+* Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
+
+* pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
+
+* pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
+
+* Close connections more defensively on exception. (Issue #734)
+
+* Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
+ repeatedly flushing the decoder, to function better on Jython. (Issue #743)
+
+* Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
+
+
1.12 (2015-09-03)
+++++++++++++++++
diff --git a/CONTRIBUTORS.txt b/CONTRIBUTORS.txt
index b8a0f01..448a4ab 100644
--- a/CONTRIBUTORS.txt
+++ b/CONTRIBUTORS.txt
@@ -153,7 +153,7 @@ In chronological order:
* Jon Wayne Parrott <jonwayne@google.com>
* App Engine environment tests.
-
+
* John Krauss <https://github.com/talos>
* Clues to debugging problems with `cryptography` dependency in docs
@@ -169,6 +169,20 @@ In chronological order:
* Jordan Moldow <https://github.com/jmoldow>
* Fix low-level exceptions leaking from ``HTTPResponse.stream()``.
+* Predrag Gruevski <https://github.com/obi1kenobi>
+ * Made cert digest comparison use a constant-time algorithm.
+
+* Adam Talsma <https://github.com/a-tal>
+ * Bugfix to ca_cert file paths.
+
+* Evan Meagher <https://evanmeagher.net>
+ * Bugfix related to `memoryview` usage in PyOpenSSL adapter
+
+* John Vandenberg <jayvdb@gmail.com>
+ * Python 2.6 fixes; pyflakes and pep8 compliance
+
+* Andy Caldwell <andy.m.caldwell@googlemail.com>
+ * Bugfix related to reusing connections in indeterminate states.
+
* [Your name or handle] <[email or website]>
* [Brief summary of your changes]
-
diff --git a/Makefile b/Makefile
index 3b3314e..8236cc8 100644
--- a/Makefile
+++ b/Makefile
@@ -30,8 +30,7 @@ clean:
find . -name "*.py[oc]" -delete
find . -name "__pycache__" -delete
rm -f $(REQUIREMENTS_OUT)
- rm -rf docs/_build
- rm -rf build/
+ rm -rf docs/_build build/ dist/
test: requirements
nosetests
diff --git a/PKG-INFO b/PKG-INFO
index cdd3ed3..a375cfe 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: urllib3
-Version: 1.12
+Version: 1.13.1
Summary: HTTP library with thread-safe connection pooling, file post, and more.
Home-page: http://urllib3.readthedocs.org/
Author: Andrey Petrov
@@ -156,6 +156,29 @@ Description: =======
Changes
=======
+ 1.13.1 (2015-12-18)
+ +++++++++++++++++++
+
+ * Fixed regression in IPv6 + SSL for match_hostname. (Issue #761)
+
+
+ 1.13 (2015-12-14)
+ +++++++++++++++++
+
+ * Fixed ``pip install urllib3[secure]`` on modern pip. (Issue #706)
+
+ * pyopenssl: Fixed SSL3_WRITE_PENDING error. (Issue #717)
+
+ * pyopenssl: Support for TLSv1.1 and TLSv1.2. (Issue #696)
+
+ * Close connections more defensively on exception. (Issue #734)
+
+ * Adjusted ``read_chunked`` to handle gzipped, chunk-encoded bodies without
+ repeatedly flushing the decoder, to function better on Jython. (Issue #743)
+
+ * Accept ``ca_cert_dir`` for SSL-related PoolManager configuration. (Issue #758)
+
+
1.12 (2015-09-03)
+++++++++++++++++
diff --git a/debian/.git-dpm b/debian/.git-dpm
index 2f8c6d5..9f02acc 100644
--- a/debian/.git-dpm
+++ b/debian/.git-dpm
@@ -1,7 +1,7 @@
# see git-dpm(1) from git-dpm package
-842fd4e9f188f13313124e3750f1c768d319fc34
-842fd4e9f188f13313124e3750f1c768d319fc34
-2b3d330a120a16e97cecd5163b5d454dcfe38a2b
+24ae6ba32069ca15f4a163e386964a3accdccf63
+24ae6ba32069ca15f4a163e386964a3accdccf63
+dff63335f212d32d7c1a4bb5276f2d31f5995ea1
dff63335f212d32d7c1a4bb5276f2d31f5995ea1
python-urllib3_1.13.1.orig.tar.gz
1309e9536c74cdad6d5ab089c83235a687b6d7e6
diff --git a/debian/patches/01_do-not-use-embedded-python-six.patch b/debian/patches/01_do-not-use-embedded-python-six.patch
index 225195f..553086b 100644
--- a/debian/patches/01_do-not-use-embedded-python-six.patch
+++ b/debian/patches/01_do-not-use-embedded-python-six.patch
@@ -1,4 +1,4 @@
-From 4e097b776bf499a02b262c722487fcd55c2ee379 Mon Sep 17 00:00:00 2001
+From 39fe68dc64cc8b675bb94892935ef2c8b50f583c Mon Sep 17 00:00:00 2001
From: Daniele Tricoli <eriol@mornie.org>
Date: Thu, 8 Oct 2015 13:19:46 -0700
Subject: Do not use embedded copy of python-six.
@@ -21,15 +21,15 @@ Patch-Name: 01_do-not-use-embedded-python-six.patch
urllib3/filepost.py | 4 ++--
urllib3/response.py | 4 ++--
urllib3/util/request.py | 2 +-
- urllib3/util/response.py | 2 +-
+ urllib3/util/response.py | 3 ++-
urllib3/util/retry.py | 2 +-
- 16 files changed, 18 insertions(+), 18 deletions(-)
+ 16 files changed, 19 insertions(+), 18 deletions(-)
diff --git a/dummyserver/handlers.py b/dummyserver/handlers.py
-index 43398cd..a7828a3 100644
+index fb6f44f..c5ac9b4 100644
--- a/dummyserver/handlers.py
+++ b/dummyserver/handlers.py
-@@ -263,7 +263,7 @@ def _parse_header(line):
+@@ -264,7 +264,7 @@ def _parse_header(line):
"""
import tornado.httputil
import email.utils
@@ -115,10 +115,10 @@ index 421e508..8fcc287 100644
from urllib3.exceptions import (
ConnectTimeoutError,
diff --git a/urllib3/_collections.py b/urllib3/_collections.py
-index b68b9a5..af8074a 100644
+index 67f3ce9..b69ce20 100644
--- a/urllib3/_collections.py
+++ b/urllib3/_collections.py
-@@ -14,7 +14,7 @@ try: # Python 2.7+
+@@ -15,7 +15,7 @@ try: # Python 2.7+
from collections import OrderedDict
except ImportError:
from .packages.ordered_dict import OrderedDict
@@ -128,10 +128,10 @@ index b68b9a5..af8074a 100644
__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
diff --git a/urllib3/connection.py b/urllib3/connection.py
-index 3eab1e2..115eac9 100644
+index 1e4cd41..0075541 100644
--- a/urllib3/connection.py
+++ b/urllib3/connection.py
-@@ -3,7 +3,7 @@ import sys
+@@ -5,7 +5,7 @@ import sys
import socket
from socket import error as SocketError, timeout as SocketTimeout
import warnings
@@ -139,12 +139,12 @@ index 3eab1e2..115eac9 100644
+import six
try: # Python 3
- from http.client import HTTPConnection as _HTTPConnection, HTTPException
+ from http.client import HTTPConnection as _HTTPConnection
diff --git a/urllib3/connectionpool.py b/urllib3/connectionpool.py
-index b38ac68..563f108 100644
+index 995b416..2204b30 100644
--- a/urllib3/connectionpool.py
+++ b/urllib3/connectionpool.py
-@@ -30,7 +30,7 @@ from .exceptions import (
+@@ -31,7 +31,7 @@ from .exceptions import (
NewConnectionError,
)
from .packages.ssl_match_hostname import CertificateError
@@ -154,10 +154,10 @@ index b38ac68..563f108 100644
port_by_scheme,
DummyConnection,
diff --git a/urllib3/fields.py b/urllib3/fields.py
-index c853f8d..5fe3c24 100644
+index c7d4811..2152829 100644
--- a/urllib3/fields.py
+++ b/urllib3/fields.py
-@@ -1,7 +1,7 @@
+@@ -2,7 +2,7 @@ from __future__ import absolute_import
import email.utils
import mimetypes
@@ -167,10 +167,10 @@ index c853f8d..5fe3c24 100644
def guess_content_type(filename, default='application/octet-stream'):
diff --git a/urllib3/filepost.py b/urllib3/filepost.py
-index 0fbf488..97ab970 100644
+index 97a2843..2fea190 100644
--- a/urllib3/filepost.py
+++ b/urllib3/filepost.py
-@@ -3,8 +3,8 @@ import codecs
+@@ -4,8 +4,8 @@ import codecs
from uuid import uuid4
from io import BytesIO
@@ -182,10 +182,10 @@ index 0fbf488..97ab970 100644
writer = codecs.lookup('utf-8')[3]
diff --git a/urllib3/response.py b/urllib3/response.py
-index 788eb6c..c41bd2c 100644
+index 8f2a1b5..e034068 100644
--- a/urllib3/response.py
+++ b/urllib3/response.py
-@@ -7,8 +7,8 @@ from ._collections import HTTPHeaderDict
+@@ -9,8 +9,8 @@ from ._collections import HTTPHeaderDict
from .exceptions import (
ProtocolError, DecodeError, ReadTimeoutError, ResponseNotChunked
)
@@ -197,10 +197,11 @@ index 788eb6c..c41bd2c 100644
from .util.response import is_fp_closed, is_response_to_head
diff --git a/urllib3/util/request.py b/urllib3/util/request.py
-index bc64f6b..5f4ccfd 100644
+index 7377931..40bf0b4 100644
--- a/urllib3/util/request.py
+++ b/urllib3/util/request.py
-@@ -1,6 +1,6 @@
+@@ -1,7 +1,7 @@
+ from __future__ import absolute_import
from base64 import b64encode
-from ..packages.six import b
@@ -209,20 +210,22 @@ index bc64f6b..5f4ccfd 100644
ACCEPT_ENCODING = 'gzip,deflate'
diff --git a/urllib3/util/response.py b/urllib3/util/response.py
-index 2c1de15..6695809 100644
+index bc72327..efb9e04 100644
--- a/urllib3/util/response.py
+++ b/urllib3/util/response.py
-@@ -1,4 +1,4 @@
+@@ -1,5 +1,6 @@
+ from __future__ import absolute_import
-from ..packages.six.moves import http_client as httplib
++
+from six.moves import http_client as httplib
from ..exceptions import HeaderParsingError
diff --git a/urllib3/util/retry.py b/urllib3/util/retry.py
-index 1fb1f23..178b374 100644
+index 03a0124..fd1f5dd 100644
--- a/urllib3/util/retry.py
+++ b/urllib3/util/retry.py
-@@ -8,7 +8,7 @@ from ..exceptions import (
+@@ -9,7 +9,7 @@ from ..exceptions import (
ReadTimeoutError,
ResponseError,
)
diff --git a/debian/patches/02_require-cert-verification.patch b/debian/patches/02_require-cert-verification.patch
index 75d38b6..99f6483 100644
--- a/debian/patches/02_require-cert-verification.patch
+++ b/debian/patches/02_require-cert-verification.patch
@@ -1,4 +1,4 @@
-From 9e59bf6b30776302e6ccd960331a91d073f6a0c3 Mon Sep 17 00:00:00 2001
+From 27ce208c67f4041e8c7e9867236609e522cf06cb Mon Sep 17 00:00:00 2001
From: Jamie Strandboge <jamie@canonical.com>
Date: Thu, 8 Oct 2015 13:19:47 -0700
Subject: require SSL certificate validation by default by using
@@ -14,10 +14,10 @@ Patch-Name: 02_require-cert-verification.patch
1 file changed, 4 insertions(+), 2 deletions(-)
diff --git a/urllib3/connectionpool.py b/urllib3/connectionpool.py
-index 563f108..a99d4ec 100644
+index 2204b30..ef60000 100644
--- a/urllib3/connectionpool.py
+++ b/urllib3/connectionpool.py
-@@ -681,6 +681,8 @@ class HTTPSConnectionPool(HTTPConnectionPool):
+@@ -683,6 +683,8 @@ class HTTPSConnectionPool(HTTPConnectionPool):
``ca_cert_dir``, and ``ssl_version`` are only used if :mod:`ssl` is
available and are fed into :meth:`urllib3.util.ssl_wrap_socket` to upgrade
the connection socket into an SSL socket.
@@ -26,7 +26,7 @@ index 563f108..a99d4ec 100644
"""
scheme = 'https'
-@@ -690,8 +692,8 @@ class HTTPSConnectionPool(HTTPConnectionPool):
+@@ -692,8 +694,8 @@ class HTTPSConnectionPool(HTTPConnectionPool):
strict=False, timeout=Timeout.DEFAULT_TIMEOUT, maxsize=1,
block=False, headers=None, retries=None,
_proxy=None, _proxy_headers=None,
diff --git a/debian/patches/03_force_setuptools.patch b/debian/patches/03_force_setuptools.patch
index 37e1512..4182284 100644
--- a/debian/patches/03_force_setuptools.patch
+++ b/debian/patches/03_force_setuptools.patch
@@ -1,4 +1,4 @@
-From f3ff91a4a00160a0e1afcaa7cd79219cd851bccd Mon Sep 17 00:00:00 2001
+From 32579d35274ade2e68935bb21263b4dc63910ad6 Mon Sep 17 00:00:00 2001
From: Barry Warsaw <barry@debian.org>
Date: Thu, 8 Oct 2015 13:19:49 -0700
Subject: Use setuptools.setup() so that the bdist_wheel
@@ -12,7 +12,7 @@ Patch-Name: 03_force_setuptools.patch
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/setup.py b/setup.py
-index 6bdb7b9..1e1888d 100644
+index 0a2dac3..02d5ec6 100644
--- a/setup.py
+++ b/setup.py
@@ -1,6 +1,6 @@
diff --git a/debian/patches/04_relax_nosetests_options.patch b/debian/patches/04_relax_nosetests_options.patch
index b898d19..a52b940 100644
--- a/debian/patches/04_relax_nosetests_options.patch
+++ b/debian/patches/04_relax_nosetests_options.patch
@@ -1,4 +1,4 @@
-From 28953c4c9c5ad7893bd392658016019fbf78088a Mon Sep 17 00:00:00 2001
+From 11c93ac43225db29b9c7ead00e704ce97a11f89c Mon Sep 17 00:00:00 2001
From: Daniele Tricoli <eriol@mornie.org>
Date: Thu, 8 Oct 2015 13:19:50 -0700
Subject: Do not use logging-clear-handlers to see all logging output and
@@ -14,7 +14,7 @@ Patch-Name: 04_relax_nosetests_options.patch
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/setup.cfg b/setup.cfg
-index 6a64ffa..25a6815 100644
+index b5fe992..ca24a71 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -1,8 +1,8 @@
diff --git a/debian/patches/05_avoid-embedded-ssl-match-hostname.patch b/debian/patches/05_avoid-embedded-ssl-match-hostname.patch
index 04894d4..1da4d26 100644
--- a/debian/patches/05_avoid-embedded-ssl-match-hostname.patch
+++ b/debian/patches/05_avoid-embedded-ssl-match-hostname.patch
@@ -1,4 +1,4 @@
-From 842fd4e9f188f13313124e3750f1c768d319fc34 Mon Sep 17 00:00:00 2001
+From 24ae6ba32069ca15f4a163e386964a3accdccf63 Mon Sep 17 00:00:00 2001
From: Stefano Rivera <stefanor@debian.org>
Date: Thu, 8 Oct 2015 13:19:51 -0700
Subject: Do not use embedded copy of ssl.match_hostname, when possible
@@ -11,18 +11,17 @@ Last-Update: 2014-11-18
Patch-Name: 05_avoid-embedded-ssl-match-hostname.patch
---
- urllib3/packages/__init__.py | 9 +++++++--
- 1 file changed, 7 insertions(+), 2 deletions(-)
+ urllib3/packages/__init__.py | 8 +++++++-
+ 1 file changed, 7 insertions(+), 1 deletion(-)
diff --git a/urllib3/packages/__init__.py b/urllib3/packages/__init__.py
-index 37e8351..10a3aa8 100644
+index 170e974..35555ed 100644
--- a/urllib3/packages/__init__.py
+++ b/urllib3/packages/__init__.py
-@@ -1,4 +1,9 @@
+@@ -1,5 +1,11 @@
from __future__ import absolute_import
-from . import ssl_match_hostname
--
+try:
+ # cPython >= 2.7.9 has ssl features backported from Python3
+ from ssl import CertificateError
@@ -30,3 +29,5 @@ index 37e8351..10a3aa8 100644
+ import ssl as ssl_match_hostname
+except ImportError:
+ from . import ssl_match_hostname
+
+ __all__ = ('ssl_match_hostname', )
diff --git a/docs/index.rst b/docs/index.rst
index 78d3601..29e7ad7 100644
--- a/docs/index.rst
+++ b/docs/index.rst
@@ -288,7 +288,7 @@ You may also stream your response and get data as they come (e.g. when using
::
- >>> from urllib3 import PoolManager
+ >>> import urllib3
>>> http = urllib3.PoolManager()
>>> r = http.request("GET", "http://httpbin.org/stream/3")
diff --git a/docs/managers.rst b/docs/managers.rst
index 6c841b7..825e2f4 100644
--- a/docs/managers.rst
+++ b/docs/managers.rst
@@ -70,6 +70,17 @@ connections and individual per-server:port
:class:`~urllib3.connectionpool.HTTPSConnectionPool` instances for tunnelled
HTTPS connections.
+Example using proxy authentication:
+
+::
+
+ >>> headers = urllib3.make_headers(proxy_basic_auth='myusername:mypassword')
+ >>> proxy = urllib3.ProxyManager('http://localhost:3128', proxy_headers=headers)
+ >>> r = proxy.request('GET', 'http://example.com/')
+ >>> r.status
+ 200
+
+
API
---
.. autoclass:: ProxyManager
diff --git a/docs/security.rst b/docs/security.rst
index 0f5aa1c..48de053 100644
--- a/docs/security.rst
+++ b/docs/security.rst
@@ -185,21 +185,8 @@ Unverified HTTPS requests will trigger a warning via Python's ``warnings`` modul
This would be a great time to enable HTTPS verification:
:ref:`certifi-with-urllib3`.
-If you know what you're doing and would like to disable this and other warnings,
-you can use :func:`~urllib3.disable_warnings`::
-
- import urllib3
- urllib3.disable_warnings()
+For info about disabling warnings, see `Disabling Warnings`_.
-Making unverified HTTPS requests is strongly discouraged. ˙ ͜ʟ˙
-
-Alternatively, if you are using Python's ``logging`` module, you can capture the
-warnings to your own log::
-
- logging.captureWarnings(True)
-
-Capturing the warnings to your own log is much preferred over simply disabling
-the warnings.
InsecurePlatformWarning
-----------------------
@@ -216,6 +203,58 @@ If you encounter this warning, it is strongly recommended you upgrade to a
newer Python version, or that you use pyOpenSSL as described in the
:ref:`pyopenssl` section.
-If you know what you are doing and would like to disable this and other
-warnings, please consult the :ref:`insecurerequestwarning` section for
-instructions on how to handle the warnings.
+For info about disabling warnings, see `Disabling Warnings`_.
+
+
+SNIMissingWarning
+-----------------
+
+.. versionadded:: 1.13
+
+Certain Python distributions (specifically, versions of Python earlier than
+2.7.9) and older OpenSSLs have restrictions that prevent them from using the
+SNI (Server Name Indication) extension. This can cause unexpected behaviour
+when making some HTTPS requests, usually causing the server to present the a
+TLS certificate that is not valid for the website you're trying to access.
+
+If you encounter this warning, it is strongly recommended that you upgrade
+to a newer Python version, or that you use pyOpenSSL as described in the
+:ref:`pyopenssl` section.
+
+For info about disabling warnings, see `Disabling Warnings`_.
+
+
+Disabling Warnings
+------------------
+
+Making unverified HTTPS requests is strongly discouraged. ˙ ͜ʟ˙
+
+But if you understand the ramifications and still want to do it...
+
+Within the code
++++++++++++++++
+
+If you know what you're doing and would like to disable all ``urllib3`` warnings,
+you can use :func:`~urllib3.disable_warnings`::
+
+ import urllib3
+ urllib3.disable_warnings()
+
+Alternatively, if you are using Python's ``logging`` module, you can capture the
+warnings to your own log::
+
+ logging.captureWarnings(True)
+
+Capturing the warnings to your own log is much preferred over simply disabling
+the warnings.
+
+Without modifying code
+++++++++++++++++++++++
+
+If you are using a program that uses ``urllib3`` and don't want to change the
+code, you can suppress warnings by setting the ``PYTHONWARNINGS`` environment
+variable in Python 2.7+ or by using the ``-W`` flag with the Python
+interpreter (see `docs
+<https://docs.python.org/2/using/cmdline.html#cmdoption-W>`_), such as::
+
+ PYTHONWARNINGS="ignore:Unverified HTTPS request" ./do-insecure-request.py
diff --git a/dummyserver/certs/server.ipv6addr.crt b/dummyserver/certs/server.ipv6addr.crt
new file mode 100644
index 0000000..c32a0ce
--- /dev/null
+++ b/dummyserver/certs/server.ipv6addr.crt
@@ -0,0 +1,16 @@
+-----BEGIN CERTIFICATE-----
+MIIClTCCAX2gAwIBAgIBATANBgkqhkiG9w0BAQsFADAOMQwwCgYDVQQDDAM6OjEw
+HhcNMTUxMjE1MTY0NjQxWhcNMjEwODAyMDAwMDAwWjAOMQwwCgYDVQQDDAM6OjEw
+ggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvFke8g6Pco24cdWuOD8Wb
+blIUH6iieNpJqcdNTzKUgtWvlpDeiIOweTuawUWX7bz35fn2KBMty68tmz+64iWJ
+AKe6tJtbXQHty+Y09CPDkkC8f0cmXjqwnNbMT7kVPTaPQZkW7hnGS4XgpnzswpqP
+dMLpUzYwoucaScC/flawLafP3jq6hfae2F5wafwPIVvlURL7ZR7FZPuLW2L4T1Wu
+BHc6gOPQfohjQtiiTNtcEIhsmA3zY4DWuMUJePrEtXhPqcXtogoEiwzLKBeKOYJu
+LIQ3++nWLel+HPvhg52wT4Dhb45PQy55ziyelXiHSro5PQmXTiQebuPMLy/8CiSn
+AgMBAAEwDQYJKoZIhvcNAQELBQADggEBAILPtFVSOrXiXQ3b8Gslh4TOxxTPSdnO
+AkOooYyg5oLJy+CAnDL+u+mFCDFC2maDPm3qyeAn31h5aDyazCzyDvVFVw2uWSuG
+a67YNbCLrVBLVIgqyJfMayY3rcjz6rV4n7hpHn42zuwaI8H1z2T1bjvNU6nsizNK
+qo80nvJ6Kge2kbAa0aMOIHsGX4KGiUwUb4+LpRAP5ZDC8EnDNNfURt1w94nnAH70
+V1RgztaAlVNcPqrSMBLXryNXz+X+Hyd79Nd5runemYUG4fQ50jabC5WHeXqH0uOC
+sDtBgqWHxcxnAQhhJ8jF7wodcUjv5AE204ECmZgyQ475kuZpSh6+IvQ=
+-----END CERTIFICATE-----
diff --git a/dummyserver/certs/server.ipv6addr.key b/dummyserver/certs/server.ipv6addr.key
new file mode 100644
index 0000000..0891327
--- /dev/null
+++ b/dummyserver/certs/server.ipv6addr.key
@@ -0,0 +1,27 @@
+-----BEGIN RSA PRIVATE KEY-----
+MIIEpAIBAAKCAQEArxZHvIOj3KNuHHVrjg/Fm25SFB+oonjaSanHTU8ylILVr5aQ
+3oiDsHk7msFFl+289+X59igTLcuvLZs/uuIliQCnurSbW10B7cvmNPQjw5JAvH9H
+Jl46sJzWzE+5FT02j0GZFu4ZxkuF4KZ87MKaj3TC6VM2MKLnGknAv35WsC2nz946
+uoX2nthecGn8DyFb5VES+2UexWT7i1ti+E9VrgR3OoDj0H6IY0LYokzbXBCIbJgN
+82OA1rjFCXj6xLV4T6nF7aIKBIsMyygXijmCbiyEN/vp1i3pfhz74YOdsE+A4W+O
+T0Muec4snpV4h0q6OT0Jl04kHm7jzC8v/AokpwIDAQABAoIBAB5Av0x3gozRQGly
+cI8vcnmU6mHHxp+sOiuYRnO5R4gZw0p/vbB17itEB2SKGLhTv98lwbci3Y2AMFi1
+BqIICWxOzKyaIG38+CRaowBrXvKgBseLRoP+rC1wLD1JWTWuSgsezpEDuzhkPFHA
+8r0GMyauii8+zZJB06TbMY7lCZ2PPKFGtojhbRTe//Nnk925KzSQz7Rk/ylouHra
+4Zi9lDodGvZMhZ8zoNDL2/yvCiDIWffpsdFKn4iKNeme1L7JE8AHBeCFo4eIKeAU
+uPlZDFgo/WdDVQQO7LnBv7tRVUB89ARBc9Egt0JoUpSq9pDaMkiBjcJcNiHkbvNj
+csFN5GECgYEA44VVoxouGs08TqnJmlJvVu4hA5+te50POQbAlcOR+rLpwl0bPuti
+tTaarO4hYwtB87s1owdOOIIqfFSoUTZKy8Ip2OE7pU5CYNZU6b/Z3bWn/+p0mOhF
+aoB/FuifPcepY5Fspx2SFFOlHxrkIEkkk1FBWEX9uDPzvJoLsg6jAMUCgYEAxQDC
+eFj/Mgjb4oIAtBZJGkRpkNK0Ngw2+Mi2ApEVrlylN4vAtNEBa3HRkZprhPrBIqhw
+k129jJ81nBWOPDFqhrqmlfolA2K8YxD6wyE6h6qPyO55BbUfAY1uS8ObNLvWZC4o
+hO5+AHzMnRc8Qi7CVvPVNbSPE5x5gaCWMiHWDnsCgYEAyfdSTbavtpr5LdsKWzNS
+IpjANbdJCLIjETwtOMSikSjA2QhNZ00MElCmfVvlx0X3BaTpuA6EISVrEXMU9wJ6
+4uU4wI0vpU4THmMkfVsQyv62YzZ8yj9lX2Uxa+STdwQGGZy+FprpUaHuse3tE7vZ
+++vlVbbLwvhbJNCaETVs/QECgYApYV139gm0stOtIm2PZlb4o4IhH4EnES3C2DYT
+F/Kb623w2vQhhv1rWj8Q/IS1LA7BfuwWqEwWa6LRJF36Gs8rea1gN2jq6yRzEq/5
+qNMoiBUnuZ/GKSNYKiMO2wmQ7Bu+c0ujkIz7ATvhi23m4PeFWDZiz4h2MBn9toOW
+CDF0XQKBgQCurY35+4jdMOtMuAHquAEtzpf5zbO8p9Bj5qet8N+QXuGTXwaGXlkT
+S6i2iXjboNIWfPhqna+BMAmw+NP4TYiNgWoiRd27SDY8LcPXJee8c0+iWKsJkdl8
+90guxzVXSZg478by9ob83Zod2xBhzUSXYGuQrOrItiuiSnMMhHgtrw==
+-----END RSA PRIVATE KEY-----
diff --git a/dummyserver/handlers.py b/dummyserver/handlers.py
index a7828a3..c5ac9b4 100644
--- a/dummyserver/handlers.py
+++ b/dummyserver/handlers.py
@@ -28,8 +28,8 @@ class Response(object):
def __call__(self, request_handler):
status, reason = self.status.split(' ', 1)
request_handler.set_status(int(status), reason)
- for header,value in self.headers:
- request_handler.add_header(header,value)
+ for header, value in self.headers:
+ request_handler.add_header(header, value)
# chunked
if isinstance(self.body, list):
@@ -48,6 +48,7 @@ class Response(object):
RETRY_TEST_NAMES = collections.defaultdict(int)
+
class TestingApp(RequestHandler):
"""
Simple app that performs various operations, useful for testing an HTTP
@@ -136,8 +137,8 @@ class TestingApp(RequestHandler):
files_ = request.files.get(param)
if len(files_) != 1:
- return Response("Expected 1 file for '%s', not %d" %(param, len(files_)),
- status='400 Bad Request')
+ return Response("Expected 1 file for '%s', not %d" % (param, len(files_)),
+ status='400 Bad Request')
file_ = files_[0]
data = file_['body']
@@ -277,7 +278,7 @@ def _parse_header(line):
value = p[i + 1:].strip()
params.append((name, value))
params = email.utils.decode_params(params)
- params.pop(0) # get rid of the dummy again
+ params.pop(0) # get rid of the dummy again
pdict = {}
for name, value in params:
value = email.utils.collapse_rfc2231_value(value)
diff --git a/dummyserver/proxy.py b/dummyserver/proxy.py
index aca92a7..5b9984d 100755
--- a/dummyserver/proxy.py
+++ b/dummyserver/proxy.py
@@ -45,14 +45,14 @@ class ProxyHandler(tornado.web.RequestHandler):
def handle_response(response):
if response.error and not isinstance(response.error,
- tornado.httpclient.HTTPError):
+ tornado.httpclient.HTTPError):
self.set_status(500)
self.write('Internal server error:\n' + str(response.error))
self.finish()
else:
self.set_status(response.code)
for header in ('Date', 'Cache-Control', 'Server',
- 'Content-Type', 'Location'):
+ 'Content-Type', 'Location'):
v = response.headers.get(header)
if v:
self.set_header(header, v)
@@ -60,7 +60,8 @@ class ProxyHandler(tornado.web.RequestHandler):
self.write(response.body)
self.finish()
- req = tornado.httpclient.HTTPRequest(url=self.request.uri,
+ req = tornado.httpclient.HTTPRequest(
+ url=self.request.uri,
method=self.request.method, body=self.request.body,
headers=self.request.headers, follow_redirects=False,
allow_nonstandard_methods=True)
@@ -133,5 +134,5 @@ if __name__ == '__main__':
if len(sys.argv) > 1:
port = int(sys.argv[1])
- print ("Starting HTTP proxy on port %d" % port)
+ print("Starting HTTP proxy on port %d" % port)
run_proxy(port)
diff --git a/dummyserver/server.py b/dummyserver/server.py
index e0b6345..ef053a7 100755
--- a/dummyserver/server.py
+++ b/dummyserver/server.py
@@ -34,10 +34,16 @@ NO_SAN_CERTS = {
'certfile': os.path.join(CERTS_PATH, 'server.no_san.crt'),
'keyfile': DEFAULT_CERTS['keyfile']
}
+IPV6_ADDR_CERTS = {
+ 'certfile': os.path.join(CERTS_PATH, 'server.ipv6addr.crt'),
+ 'keyfile': os.path.join(CERTS_PATH, 'server.ipv6addr.key'),
+}
DEFAULT_CA = os.path.join(CERTS_PATH, 'cacert.pem')
DEFAULT_CA_BAD = os.path.join(CERTS_PATH, 'client_bad.pem')
NO_SAN_CA = os.path.join(CERTS_PATH, 'cacert.no_san.pem')
DEFAULT_CA_DIR = os.path.join(CERTS_PATH, 'ca_path_test')
+IPV6_ADDR_CA = os.path.join(CERTS_PATH, 'server.ipv6addr.crt')
+
def _has_ipv6(host):
""" Returns True if the system can bind an IPv6 address. """
diff --git a/dummyserver/testcase.py b/dummyserver/testcase.py
index e5ae51b..f5588a0 100644
--- a/dummyserver/testcase.py
+++ b/dummyserver/testcase.py
@@ -42,10 +42,10 @@ class SocketDummyServerTestCase(unittest.TestCase):
@classmethod
def start_response_handler(cls, response, num=1, block_send=None):
ready_event = threading.Event()
+
def socket_handler(listener):
for _ in range(num):
ready_event.set()
- ready_event.clear()
sock = listener.accept()[0]
consume_socket(sock)
@@ -61,9 +61,9 @@ class SocketDummyServerTestCase(unittest.TestCase):
@classmethod
def start_basic_handler(cls, **kw):
return cls.start_response_handler(
- b'HTTP/1.1 200 OK\r\n'
- b'Content-Length: 0\r\n'
- b'\r\n', **kw)
+ b'HTTP/1.1 200 OK\r\n'
+ b'Content-Length: 0\r\n'
+ b'\r\n', **kw)
@classmethod
def tearDownClass(cls):
@@ -113,6 +113,17 @@ class HTTPSDummyServerTestCase(HTTPDummyServerTestCase):
certs = DEFAULT_CERTS
+class IPV6HTTPSDummyServerTestCase(HTTPSDummyServerTestCase):
+ host = '::1'
+
+ @classmethod
+ def setUpClass(cls):
+ if not socket.has_ipv6:
+ raise SkipTest('IPv6 not available')
+ else:
+ super(IPV6HTTPSDummyServerTestCase, cls).setUpClass()
+
+
class HTTPDummyProxyTestCase(unittest.TestCase):
http_host = 'localhost'
diff --git a/setup.cfg b/setup.cfg
index 25a6815..ca24a71 100644
--- a/setup.cfg
+++ b/setup.cfg
@@ -6,11 +6,20 @@ cover-package = urllib3
cover-erase = true
[flake8]
+exclude = ./docs/conf.py,./test/*,./urllib3/packages/*
max-line-length = 99
[wheel]
universal = 1
+[metadata]
+provides-extra = secure
+requires-dist =
+ pyOpenSSL>=0.13; python_version<="2.7" and extra == 'secure'
+ ndg-httpsclient; python_version<="2.7" and extra == 'secure'
+ pyasn1; python_version<="2.7" and extra == 'secure'
+ certifi; extra == 'secure'
+
[egg_info]
tag_build =
tag_date = 0
diff --git a/setup.py b/setup.py
index 1e1888d..02d5ec6 100644
--- a/setup.py
+++ b/setup.py
@@ -6,9 +6,9 @@ import os
import re
try:
- import setuptools
+ import setuptools # noqa: unused
except ImportError:
- pass # No 'develop' command, oh well.
+ pass # No 'develop' command, oh well.
base_path = os.path.dirname(__file__)
@@ -55,14 +55,11 @@ setup(name='urllib3',
],
test_suite='test',
extras_require={
- 'secure;python_version<="2.7"': [
- 'pyOpenSSL',
+ 'secure': [
+ 'pyOpenSSL>=0.13',
'ndg-httpsclient',
'pyasn1',
'certifi',
],
- 'secure;python_version>"2.7"': [
- 'certifi',
- ],
},
)
diff --git a/test/test_util.py b/test/test_util.py
index fa59ada..ef4caab 100644
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -1,3 +1,4 @@
+import hashlib
import warnings
import logging
import unittest
@@ -18,12 +19,14 @@ from urllib3.util.url import (
from urllib3.util.ssl_ import (
resolve_cert_reqs,
ssl_wrap_socket,
+ _const_compare_digest_backport,
)
from urllib3.exceptions import (
LocationParseError,
TimeoutStateError,
InsecureRequestWarning,
SSLError,
+ SNIMissingWarning,
)
from urllib3.util import is_fp_closed, ssl_
@@ -412,3 +415,30 @@ class TestUtil(unittest.TestCase):
ssl_wrap_socket(ssl_context=mock_context, sock=socket)
mock_context.wrap_socket.assert_called_once_with(socket)
ssl_.HAS_SNI = HAS_SNI
+
+ def test_ssl_wrap_socket_with_no_sni_warns(self):
+ socket = object()
+ mock_context = Mock()
+ # Ugly preservation of original value
+ HAS_SNI = ssl_.HAS_SNI
+ ssl_.HAS_SNI = False
+ with patch('warnings.warn') as warn:
+ ssl_wrap_socket(ssl_context=mock_context, sock=socket)
+ mock_context.wrap_socket.assert_called_once_with(socket)
+ ssl_.HAS_SNI = HAS_SNI
+ self.assertTrue(warn.call_count >= 1)
+ warnings = [call[0][1] for call in warn.call_args_list]
+ self.assertTrue(SNIMissingWarning in warnings)
+
+ def test_const_compare_digest_fallback(self):
+ target = hashlib.sha256(b'abcdef').digest()
+ self.assertTrue(_const_compare_digest_backport(target, target))
+
+ prefix = target[:-1]
+ self.assertFalse(_const_compare_digest_backport(target, prefix))
+
+ suffix = target + b'0'
+ self.assertFalse(_const_compare_digest_backport(target, suffix))
+
+ incorrect = hashlib.sha256(b'xyz').digest()
+ self.assertFalse(_const_compare_digest_backport(target, incorrect))
diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
index 9294adf..0f31fa0 100644
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -49,6 +49,11 @@ SHORT_TIMEOUT = 0.001
LONG_TIMEOUT = 0.01
+def wait_for_socket(ready_event):
+ ready_event.wait()
+ ready_event.clear()
+
+
class TestConnectionPoolTimeouts(SocketDummyServerTestCase):
def test_timeout_float(self):
@@ -57,11 +62,12 @@ class TestConnectionPoolTimeouts(SocketDummyServerTestCase):
# Pool-global timeout
pool = HTTPConnectionPool(self.host, self.port, timeout=SHORT_TIMEOUT, retries=False)
+ wait_for_socket(ready_event)
self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/')
block_event.set() # Release block
# Shouldn't raise this time
- ready_event.wait()
+ wait_for_socket(ready_event)
block_event.set() # Pre-release block
pool.request('GET', '/')
@@ -92,12 +98,13 @@ class TestConnectionPoolTimeouts(SocketDummyServerTestCase):
timeout = Timeout(read=SHORT_TIMEOUT)
pool = HTTPConnectionPool(self.host, self.port, timeout=timeout, retries=False)
+ wait_for_socket(ready_event)
conn = pool._get_conn()
self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET', '/')
pool._put_conn(conn)
block_event.set() # Release request
- ready_event.wait()
+ wait_for_socket(ready_event)
block_event.clear()
self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/')
block_event.set() # Release request
@@ -106,7 +113,7 @@ class TestConnectionPoolTimeouts(SocketDummyServerTestCase):
pool = HTTPConnectionPool(self.host, self.port, timeout=LONG_TIMEOUT, retries=False)
conn = pool._get_conn()
- ready_event.wait()
+ wait_for_socket(ready_event)
now = time.time()
self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET', '/', timeout=timeout)
delta = time.time() - now
@@ -115,7 +122,7 @@ class TestConnectionPoolTimeouts(SocketDummyServerTestCase):
self.assertTrue(delta < LONG_TIMEOUT, "timeout was pool-level LONG_TIMEOUT rather than request-level SHORT_TIMEOUT")
pool._put_conn(conn)
- ready_event.wait()
+ wait_for_socket(ready_event)
now = time.time()
self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/', timeout=timeout)
delta = time.time() - now
@@ -125,24 +132,19 @@ class TestConnectionPoolTimeouts(SocketDummyServerTestCase):
# Timeout int/float passed directly to request and _make_request should
# raise a request timeout
- ready_event.wait()
+ wait_for_socket(ready_event)
self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/', timeout=SHORT_TIMEOUT)
block_event.set() # Release request
- ready_event.wait()
+ wait_for_socket(ready_event)
conn = pool._new_conn()
# FIXME: This assert flakes sometimes. Not sure why.
self.assertRaises(ReadTimeoutError, pool._make_request, conn, 'GET', '/', timeout=SHORT_TIMEOUT)
block_event.set() # Release request
def test_connect_timeout(self):
- def noop_handler(listener):
- return
-
- self._start_server(noop_handler)
-
url = '/'
- host, port = self.host, self.port
+ host, port = TARPIT_HOST, 80
timeout = Timeout(connect=SHORT_TIMEOUT)
# Pool-global timeout
@@ -164,18 +166,15 @@ class TestConnectionPoolTimeouts(SocketDummyServerTestCase):
self.assertRaises(ConnectTimeoutError, pool.request, 'GET', url, timeout=timeout)
def test_total_applies_connect(self):
- def noop_handler(listener):
- return
-
- self._start_server(noop_handler)
+ host, port = TARPIT_HOST, 80
timeout = Timeout(total=None, connect=SHORT_TIMEOUT)
- pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
+ pool = HTTPConnectionPool(host, port, timeout=timeout)
conn = pool._get_conn()
self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET', '/')
timeout = Timeout(connect=3, read=5, total=SHORT_TIMEOUT)
- pool = HTTPConnectionPool(self.host, self.port, timeout=timeout)
+ pool = HTTPConnectionPool(host, port, timeout=timeout)
conn = pool._get_conn()
self.assertRaises(ConnectTimeoutError, pool._make_request, conn, 'GET', '/')
@@ -183,13 +182,14 @@ class TestConnectionPoolTimeouts(SocketDummyServerTestCase):
block_event = Event()
ready_event = self.start_basic_handler(block_send=block_event, num=2)
+ wait_for_socket(ready_event)
# This will get the socket to raise an EAGAIN on the read
timeout = Timeout(connect=3, read=SHORT_TIMEOUT)
pool = HTTPConnectionPool(self.host, self.port, timeout=timeout, retries=False)
self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/')
block_event.set()
- ready_event.wait()
+ wait_for_socket(ready_event)
block_event.clear()
# The connect should succeed and this should hit the read timeout
@@ -666,15 +666,15 @@ class TestConnectionPool(HTTPDummyServerTestCase):
def test_cleanup_on_connection_error(self):
'''
- Test that connections are recycled to the pool on
+ Test that connections are recycled to the pool on
connection errors where no http response is received.
'''
poolsize = 3
with HTTPConnectionPool(self.host, self.port, maxsize=poolsize, block=True) as http:
self.assertEqual(http.pool.qsize(), poolsize)
- # force a connection error by supplying a non-existent
- # url. We won't get a response for this and so the
+ # force a connection error by supplying a non-existent
+ # url. We won't get a response for this and so the
# conn won't be implicitly returned to the pool.
self.assertRaises(MaxRetryError,
http.request, 'GET', '/redirect', fields={'target': '/'}, release_conn=False, retries=0)
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
index 862ebd9..7319d7e 100644
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -8,9 +8,12 @@ import warnings
import mock
from nose.plugins.skip import SkipTest
-from dummyserver.testcase import HTTPSDummyServerTestCase
+from dummyserver.testcase import (
+ HTTPSDummyServerTestCase, IPV6HTTPSDummyServerTestCase
+)
from dummyserver.server import (DEFAULT_CA, DEFAULT_CA_BAD, DEFAULT_CERTS,
- NO_SAN_CERTS, NO_SAN_CA, DEFAULT_CA_DIR)
+ NO_SAN_CERTS, NO_SAN_CA, DEFAULT_CA_DIR,
+ IPV6_ADDR_CERTS, IPV6_ADDR_CA, HAS_IPV6)
from test import (
onlyPy26OrOlder,
@@ -35,6 +38,7 @@ from urllib3.exceptions import (
)
from urllib3.packages import six
from urllib3.util.timeout import Timeout
+from urllib3.util.ssl_ import HAS_SNI
ResourceWarning = getattr(
@@ -77,7 +81,10 @@ class TestHTTPS(HTTPSDummyServerTestCase):
self.assertFalse(warn.called, warn.call_args_list)
else:
self.assertTrue(warn.called)
- call, = warn.call_args_list
+ if HAS_SNI:
+ call = warn.call_args_list[0]
+ else:
+ call = warn.call_args_list[1]
error = call[0][1]
self.assertEqual(error, InsecurePlatformWarning)
@@ -176,8 +183,10 @@ class TestHTTPS(HTTPSDummyServerTestCase):
calls = warn.call_args_list
if sys.version_info >= (2, 7, 9):
category = calls[0][0][1]
- else:
+ elif HAS_SNI:
category = calls[1][0][1]
+ else:
+ category = calls[2][0][1]
self.assertEqual(category, InsecureRequestWarning)
@requires_network
@@ -460,5 +469,20 @@ class TestHTTPS_NoSAN(HTTPSDummyServerTestCase):
self.assertTrue(warn.called)
+
+class TestHTTPS_IPv6Addr(IPV6HTTPSDummyServerTestCase):
+ certs = IPV6_ADDR_CERTS
+
+ def test_strip_square_brackets_before_validating(self):
+ """Test that the fix for #760 works."""
+ if not HAS_IPV6:
+ raise SkipTest("Only runs on IPv6 systems")
+ https_pool = HTTPSConnectionPool('[::1]', self.port,
+ cert_reqs='CERT_REQUIRED',
+ ca_certs=IPV6_ADDR_CA)
+ r = https_pool.request('GET', '/')
+ self.assertEqual(r.status, 200)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py
index 099ac52..4065ff8 100644
--- a/test/with_dummyserver/test_poolmanager.py
+++ b/test/with_dummyserver/test_poolmanager.py
@@ -69,7 +69,7 @@ class TestPoolManager(HTTPDummyServerTestCase):
try:
http.request('GET', '%s/redirect' % self.base_url,
fields={'target': cross_host_location},
- timeout=0.01, retries=0)
+ timeout=1, retries=0)
self.fail("Request succeeded instead of raising an exception like it should.")
except MaxRetryError:
@@ -77,7 +77,7 @@ class TestPoolManager(HTTPDummyServerTestCase):
r = http.request('GET', '%s/redirect' % self.base_url,
fields={'target': '%s/echo?a=b' % self.base_url_alt},
- timeout=0.01, retries=1)
+ timeout=1, retries=1)
self.assertEqual(r._pool.host, self.host_alt)
@@ -137,7 +137,7 @@ class TestPoolManager(HTTPDummyServerTestCase):
r = http.request('POST', '%s/headers' % self.base_url)
returned_headers = json.loads(r.data.decode())
self.assertEqual(returned_headers.get('Foo'), 'bar')
-
+
r = http.request_encode_url('GET', '%s/headers' % self.base_url)
returned_headers = json.loads(r.data.decode())
self.assertEqual(returned_headers.get('Foo'), 'bar')
@@ -162,6 +162,12 @@ class TestPoolManager(HTTPDummyServerTestCase):
r = http.request('GET', 'http://%s:%s/' % (self.host, self.port))
self.assertEqual(r.status, 200)
+ def test_http_with_ca_cert_dir(self):
+ http = PoolManager(ca_certs='REQUIRED', ca_cert_dir='/nosuchdir')
+
+ r = http.request('GET', 'http://%s:%s/' % (self.host, self.port))
+ self.assertEqual(r.status, 200)
+
class TestIPv6PoolManager(IPv6HTTPDummyServerTestCase):
if not HAS_IPV6:
diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
index b2894a8..b37d8bb 100644
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -124,7 +124,7 @@ class TestHTTPProxyManager(HTTPDummyProxyTestCase):
try:
http.request('GET', '%s/redirect' % self.http_url,
fields={'target': cross_host_location},
- timeout=0.1, retries=0)
+ timeout=1, retries=0)
self.fail("We don't want to follow redirects here.")
except MaxRetryError:
@@ -132,7 +132,7 @@ class TestHTTPProxyManager(HTTPDummyProxyTestCase):
r = http.request('GET', '%s/redirect' % self.http_url,
fields={'target': '%s/echo?a=b' % self.http_url_alt},
- timeout=0.1, retries=1)
+ timeout=1, retries=1)
self.assertNotEqual(r._pool.host, self.http_host_alt)
def test_cross_protocol_redirect(self):
@@ -142,7 +142,7 @@ class TestHTTPProxyManager(HTTPDummyProxyTestCase):
try:
http.request('GET', '%s/redirect' % self.http_url,
fields={'target': cross_protocol_location},
- timeout=0.1, retries=0)
+ timeout=1, retries=0)
self.fail("We don't want to follow redirects here.")
except MaxRetryError:
@@ -150,7 +150,7 @@ class TestHTTPProxyManager(HTTPDummyProxyTestCase):
r = http.request('GET', '%s/redirect' % self.http_url,
fields={'target': '%s/echo?a=b' % self.https_url},
- timeout=0.1, retries=1)
+ timeout=1, retries=1)
self.assertEqual(r._pool.host, self.https_host)
def test_headers(self):
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
index d09002b..1e6113f 100644
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -30,6 +30,7 @@ except ImportError:
class MimeToolMessage(object):
pass
from threading import Event
+import select
import socket
import ssl
@@ -366,6 +367,72 @@ class TestSocketClosing(SocketDummyServerTestCase):
self.assertRaises(ProtocolError, response.read)
self.assertEqual(poolsize, pool.pool.qsize())
+ def test_connection_closed_on_read_timeout_preload_false(self):
+ timed_out = Event()
+
+ def socket_handler(listener):
+ sock = listener.accept()[0]
+
+ # Consume request
+ buf = b''
+ while not buf.endswith(b'\r\n\r\n'):
+ buf = sock.recv(65535)
+
+ # Send partial chunked response and then hang.
+ sock.send((
+ 'HTTP/1.1 200 OK\r\n'
+ 'Content-Type: text/plain\r\n'
+ 'Transfer-Encoding: chunked\r\n'
+ '\r\n'
+ '8\r\n'
+ '12345678\r\n').encode('utf-8')
+ )
+ timed_out.wait(5)
+
+ # Expect a new request, but keep hold of the old socket to avoid
+ # leaking it. Because we don't want to hang this thread, we
+ # actually use select.select to confirm that a new request is
+ # coming in: this lets us time the thread out.
+ rlist, _, _ = select.select([listener], [], [], 1)
+ assert rlist
+ new_sock = listener.accept()[0]
+
+ # Consume request
+ buf = b''
+ while not buf.endswith(b'\r\n\r\n'):
+ buf = new_sock.recv(65535)
+
+ # Send complete chunked response.
+ new_sock.send((
+ 'HTTP/1.1 200 OK\r\n'
+ 'Content-Type: text/plain\r\n'
+ 'Transfer-Encoding: chunked\r\n'
+ '\r\n'
+ '8\r\n'
+ '12345678\r\n'
+ '0\r\n\r\n').encode('utf-8')
+ )
+
+ new_sock.close()
+ sock.close()
+
+ self._start_server(socket_handler)
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ # First request should fail.
+ response = pool.urlopen('GET', '/', retries=0,
+ preload_content=False,
+ timeout=Timeout(connect=1, read=0.001))
+ try:
+ self.assertRaises(ReadTimeoutError, response.read)
+ finally:
+ timed_out.set()
+
+ # Second should succeed.
+ response = pool.urlopen('GET', '/', retries=0,
+ preload_content=False,
+ timeout=Timeout(connect=1, read=0.1))
+ self.assertEqual(len(response.read()), 8)
+
class TestProxyManager(SocketDummyServerTestCase):
@@ -437,6 +504,8 @@ class TestProxyManager(SocketDummyServerTestCase):
self.assertTrue(b'For The Proxy: YEAH!\r\n' in r.data)
def test_retries(self):
+ close_event = Event()
+
def echo_socket_handler(listener):
sock = listener.accept()[0]
# First request, which should fail
@@ -455,6 +524,7 @@ class TestProxyManager(SocketDummyServerTestCase):
'\r\n'
'%s' % (len(buf), buf.decode('utf-8'))).encode('utf-8'))
sock.close()
+ close_event.set()
self._start_server(echo_socket_handler)
base_url = 'http://%s:%d' % (self.host, self.port)
@@ -466,6 +536,7 @@ class TestProxyManager(SocketDummyServerTestCase):
assert_same_host=False, retries=1)
self.assertEqual(r.status, 200)
+ close_event.wait(timeout=1)
self.assertRaises(ProxyError, conn.urlopen, 'GET',
'http://www.google.com',
assert_same_host=False, retries=False)
diff --git a/urllib3/__init__.py b/urllib3/__init__.py
index 86bb71d..e43991a 100644
--- a/urllib3/__init__.py
+++ b/urllib3/__init__.py
@@ -2,10 +2,8 @@
urllib3 - Thread-safe connection pooling and re-using.
"""
-__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
-__license__ = 'MIT'
-__version__ = '1.12'
-
+from __future__ import absolute_import
+import warnings
from .connectionpool import (
HTTPConnectionPool,
@@ -32,8 +30,30 @@ except ImportError:
def emit(self, record):
pass
+__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
+__license__ = 'MIT'
+__version__ = '1.13.1'
+
+__all__ = (
+ 'HTTPConnectionPool',
+ 'HTTPSConnectionPool',
+ 'PoolManager',
+ 'ProxyManager',
+ 'HTTPResponse',
+ 'Retry',
+ 'Timeout',
+ 'add_stderr_logger',
+ 'connection_from_url',
+ 'disable_warnings',
+ 'encode_multipart_formdata',
+ 'get_host',
+ 'make_headers',
+ 'proxy_from_url',
+)
+
logging.getLogger(__name__).addHandler(NullHandler())
+
def add_stderr_logger(level=logging.DEBUG):
"""
Helper for quickly adding a StreamHandler to the logger. Useful for
@@ -55,7 +75,6 @@ def add_stderr_logger(level=logging.DEBUG):
del NullHandler
-import warnings
# SecurityWarning's always go off by default.
warnings.simplefilter('always', exceptions.SecurityWarning, append=True)
# SubjectAltNameWarning's should go off once per host
@@ -63,6 +82,9 @@ warnings.simplefilter('default', exceptions.SubjectAltNameWarning)
# InsecurePlatformWarning's don't vary between requests, so we keep it default.
warnings.simplefilter('default', exceptions.InsecurePlatformWarning,
append=True)
+# SNIMissingWarnings should go off only once.
+warnings.simplefilter('default', exceptions.SNIMissingWarning)
+
def disable_warnings(category=exceptions.HTTPWarning):
"""
diff --git a/urllib3/_collections.py b/urllib3/_collections.py
index af8074a..b69ce20 100644
--- a/urllib3/_collections.py
+++ b/urllib3/_collections.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
from collections import Mapping, MutableMapping
try:
from threading import RLock
@@ -167,7 +168,7 @@ class HTTPHeaderDict(MutableMapping):
def __ne__(self, other):
return not self.__eq__(other)
- if not PY3: # Python 2
+ if not PY3: # Python 2
iterkeys = MutableMapping.iterkeys
itervalues = MutableMapping.itervalues
@@ -234,7 +235,7 @@ class HTTPHeaderDict(MutableMapping):
"""
if len(args) > 1:
raise TypeError("extend() takes at most 1 positional "
- "arguments ({} given)".format(len(args)))
+ "arguments ({0} given)".format(len(args)))
other = args[0] if len(args) >= 1 else ()
if isinstance(other, HTTPHeaderDict):
@@ -304,7 +305,7 @@ class HTTPHeaderDict(MutableMapping):
return list(self.iteritems())
@classmethod
- def from_httplib(cls, message): # Python 2
+ def from_httplib(cls, message): # Python 2
"""Read headers from a Python 2 httplib message object."""
# python2.7 does not expose a proper API for exporting multiheaders
# efficiently. This function re-reads raw lines from the message
diff --git a/urllib3/connection.py b/urllib3/connection.py
index 115eac9..0075541 100644
--- a/urllib3/connection.py
+++ b/urllib3/connection.py
@@ -1,4 +1,6 @@
+from __future__ import absolute_import
import datetime
+import os
import sys
import socket
from socket import error as SocketError, timeout as SocketTimeout
@@ -6,18 +8,13 @@ import warnings
import six
try: # Python 3
- from http.client import HTTPConnection as _HTTPConnection, HTTPException
+ from http.client import HTTPConnection as _HTTPConnection
+ from http.client import HTTPException # noqa: unused in this module
except ImportError:
- from httplib import HTTPConnection as _HTTPConnection, HTTPException
-
-
-class DummyConnection(object):
- "Used to detect a failed ConnectionCls import."
- pass
-
+ from httplib import HTTPConnection as _HTTPConnection
+ from httplib import HTTPException # noqa: unused in this module
try: # Compiled with SSL?
- HTTPSConnection = DummyConnection
import ssl
BaseSSLError = ssl.SSLError
except (ImportError, AttributeError): # Platform-specific: No SSL.
@@ -61,6 +58,11 @@ port_by_scheme = {
RECENT_DATE = datetime.date(2014, 1, 1)
+class DummyConnection(object):
+ """Used to detect a failed ConnectionCls import."""
+ pass
+
+
class HTTPConnection(_HTTPConnection, object):
"""
Based on httplib.HTTPConnection but provides an extra constructor
@@ -205,10 +207,10 @@ class VerifiedHTTPSConnection(HTTPSConnection):
self.key_file = key_file
self.cert_file = cert_file
self.cert_reqs = cert_reqs
- self.ca_certs = ca_certs
- self.ca_cert_dir = ca_cert_dir
self.assert_hostname = assert_hostname
self.assert_fingerprint = assert_fingerprint
+ self.ca_certs = ca_certs and os.path.expanduser(ca_certs)
+ self.ca_cert_dir = ca_cert_dir and os.path.expanduser(ca_cert_dir)
def connect(self):
# Add certificate verification
@@ -263,10 +265,19 @@ class VerifiedHTTPSConnection(HTTPSConnection):
'for details.)'.format(hostname)),
SubjectAltNameWarning
)
- match_hostname(cert, self.assert_hostname or hostname)
- self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED
- or self.assert_fingerprint is not None)
+ # In case the hostname is an IPv6 address, strip the square
+ # brackets from it before using it to validate. This is because
+ # a certificate with an IPv6 address in it won't have square
+ # brackets around that address. Sadly, match_hostname won't do this
+ # for us: it expects the plain host part without any extra work
+ # that might have been done to make it palatable to httplib.
+ asserted_hostname = self.assert_hostname or hostname
+ asserted_hostname = asserted_hostname.strip('[]')
+ match_hostname(cert, asserted_hostname)
+
+ self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED or
+ self.assert_fingerprint is not None)
if ssl:
diff --git a/urllib3/connectionpool.py b/urllib3/connectionpool.py
index a99d4ec..ef60000 100644
--- a/urllib3/connectionpool.py
+++ b/urllib3/connectionpool.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import errno
import logging
import sys
@@ -10,7 +11,8 @@ try: # Python 3
from queue import LifoQueue, Empty, Full
except ImportError:
from Queue import LifoQueue, Empty, Full
- import Queue as _ # Platform-specific: Windows
+ # Queue is imported for side effects on MS Windows
+ import Queue as _unused_module_Queue # noqa: unused
from .exceptions import (
@@ -22,7 +24,6 @@ from .exceptions import (
LocationValueError,
MaxRetryError,
ProxyError,
- ConnectTimeoutError,
ReadTimeoutError,
SSLError,
TimeoutError,
@@ -35,7 +36,7 @@ from .connection import (
port_by_scheme,
DummyConnection,
HTTPConnection, HTTPSConnection, VerifiedHTTPSConnection,
- HTTPException, BaseSSLError, ConnectionError
+ HTTPException, BaseSSLError,
)
from .request import RequestMethods
from .response import HTTPResponse
@@ -54,7 +55,7 @@ log = logging.getLogger(__name__)
_Default = object()
-## Pool objects
+# Pool objects
class ConnectionPool(object):
"""
Base class for all connection pools, such as
@@ -68,8 +69,7 @@ class ConnectionPool(object):
if not host:
raise LocationValueError("No host specified.")
- # httplib doesn't like it when we include brackets in ipv6 addresses
- self.host = host.strip('[]')
+ self.host = host
self.port = port
def __str__(self):
@@ -645,22 +645,24 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
return response
log.info("Redirecting %s -> %s" % (url, redirect_location))
- return self.urlopen(method, redirect_location, body, headers,
- retries=retries, redirect=redirect,
- assert_same_host=assert_same_host,
- timeout=timeout, pool_timeout=pool_timeout,
- release_conn=release_conn, **response_kw)
+ return self.urlopen(
+ method, redirect_location, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
# Check if we should retry the HTTP response.
if retries.is_forced_retry(method, status_code=response.status):
retries = retries.increment(method, url, response=response, _pool=self)
retries.sleep()
log.info("Forced retry: %s" % url)
- return self.urlopen(method, url, body, headers,
- retries=retries, redirect=redirect,
- assert_same_host=assert_same_host,
- timeout=timeout, pool_timeout=pool_timeout,
- release_conn=release_conn, **response_kw)
+ return self.urlopen(
+ method, url, body, headers,
+ retries=retries, redirect=redirect,
+ assert_same_host=assert_same_host,
+ timeout=timeout, pool_timeout=pool_timeout,
+ release_conn=release_conn, **response_kw)
return response
diff --git a/urllib3/contrib/appengine.py b/urllib3/contrib/appengine.py
index ed9d8b8..884cdb2 100644
--- a/urllib3/contrib/appengine.py
+++ b/urllib3/contrib/appengine.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import logging
import os
import warnings
@@ -60,7 +61,7 @@ class AppEngineManager(RequestMethods):
raise AppEnginePlatformError(
"URLFetch is not available in this environment.")
- if is_prod_appengine_v2():
+ if is_prod_appengine_mvms():
raise AppEnginePlatformError(
"Use normal urllib3.PoolManager instead of AppEngineManager"
"on Managed VMs, as using URLFetch is not necessary in "
@@ -108,14 +109,14 @@ class AppEngineManager(RequestMethods):
raise TimeoutError(self, e)
except urlfetch.InvalidURLError as e:
- if 'too large' in e.message:
+ if 'too large' in str(e):
raise AppEnginePlatformError(
"URLFetch request too large, URLFetch only "
"supports requests up to 10mb in size.", e)
raise ProtocolError(e)
except urlfetch.DownloadError as e:
- if 'Too many redirects' in e.message:
+ if 'Too many redirects' in str(e):
raise MaxRetryError(self, url, reason=e)
raise ProtocolError(e)
@@ -155,7 +156,7 @@ class AppEngineManager(RequestMethods):
def _urlfetch_response_to_http_response(self, urlfetch_resp, **response_kw):
- if is_prod_appengine_v1():
+ if is_prod_appengine():
# Production GAE handles deflate encoding automatically, but does
# not remove the encoding header.
content_encoding = urlfetch_resp.headers.get('content-encoding')
@@ -176,7 +177,7 @@ class AppEngineManager(RequestMethods):
if timeout is Timeout.DEFAULT_TIMEOUT:
return 5 # 5s is the default timeout for URLFetch.
if isinstance(timeout, Timeout):
- if not timeout.read is timeout.connect:
+ if timeout.read is not timeout.connect:
warnings.warn(
"URLFetch does not support granular timeout settings, "
"reverting to total timeout.", AppEnginePlatformWarning)
@@ -199,12 +200,12 @@ class AppEngineManager(RequestMethods):
def is_appengine():
return (is_local_appengine() or
- is_prod_appengine_v1() or
- is_prod_appengine_v2())
+ is_prod_appengine() or
+ is_prod_appengine_mvms())
def is_appengine_sandbox():
- return is_appengine() and not is_prod_appengine_v2()
+ return is_appengine() and not is_prod_appengine_mvms()
def is_local_appengine():
@@ -212,11 +213,11 @@ def is_local_appengine():
'Development/' in os.environ['SERVER_SOFTWARE'])
-def is_prod_appengine_v1():
+def is_prod_appengine():
return ('APPENGINE_RUNTIME' in os.environ and
'Google App Engine/' in os.environ['SERVER_SOFTWARE'] and
- not is_prod_appengine_v2())
+ not is_prod_appengine_mvms())
-def is_prod_appengine_v2():
+def is_prod_appengine_mvms():
return os.environ.get('GAE_VM', False) == 'true'
diff --git a/urllib3/contrib/ntlmpool.py b/urllib3/contrib/ntlmpool.py
index c6b266f..c136a23 100644
--- a/urllib3/contrib/ntlmpool.py
+++ b/urllib3/contrib/ntlmpool.py
@@ -3,6 +3,7 @@ NTLM authenticating pool, contributed by erikcederstran
Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
"""
+from __future__ import absolute_import
try:
from http.client import HTTPSConnection
diff --git a/urllib3/contrib/pyopenssl.py b/urllib3/contrib/pyopenssl.py
index c20ae46..5996153 100644
--- a/urllib3/contrib/pyopenssl.py
+++ b/urllib3/contrib/pyopenssl.py
@@ -43,6 +43,7 @@ Module Variables
.. _crime attack: https://en.wikipedia.org/wiki/CRIME_(security_exploit)
'''
+from __future__ import absolute_import
try:
from ndg.httpsclient.ssl_peer_verification import SUBJ_ALT_NAME_SUPPORT
@@ -53,7 +54,7 @@ except SyntaxError as e:
import OpenSSL.SSL
from pyasn1.codec.der import decoder as der_decoder
from pyasn1.type import univ, constraint
-from socket import _fileobject, timeout
+from socket import _fileobject, timeout, error as SocketError
import ssl
import select
@@ -71,6 +72,12 @@ _openssl_versions = {
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
+if hasattr(ssl, 'PROTOCOL_TLSv1_1') and hasattr(OpenSSL.SSL, 'TLSv1_1_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_1] = OpenSSL.SSL.TLSv1_1_METHOD
+
+if hasattr(ssl, 'PROTOCOL_TLSv1_2') and hasattr(OpenSSL.SSL, 'TLSv1_2_METHOD'):
+ _openssl_versions[ssl.PROTOCOL_TLSv1_2] = OpenSSL.SSL.TLSv1_2_METHOD
+
try:
_openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
except AttributeError:
@@ -79,8 +86,8 @@ except AttributeError:
_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
- ssl.CERT_REQUIRED: OpenSSL.SSL.VERIFY_PEER
- + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
+ ssl.CERT_REQUIRED:
+ OpenSSL.SSL.VERIFY_PEER + OpenSSL.SSL.VERIFY_FAIL_IF_NO_PEER_CERT,
}
DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
@@ -88,12 +95,6 @@ DEFAULT_SSL_CIPHER_LIST = util.ssl_.DEFAULT_CIPHERS
# OpenSSL will only write 16K at a time
SSL_WRITE_BLOCKSIZE = 16384
-try:
- _ = memoryview
- has_memoryview = True
-except NameError:
- has_memoryview = False
-
orig_util_HAS_SNI = util.HAS_SNI
orig_connection_ssl_wrap_socket = connection.ssl_wrap_socket
@@ -112,7 +113,7 @@ def extract_from_urllib3():
util.HAS_SNI = orig_util_HAS_SNI
-### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
+# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
class SubjectAltName(BaseSubjectAltName):
'''ASN.1 implementation for subjectAltNames support'''
@@ -123,7 +124,7 @@ class SubjectAltName(BaseSubjectAltName):
constraint.ValueSizeConstraint(1, 1024)
-### Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
+# Note: This is a slightly bug-fixed version of same from ndg-httpsclient.
def get_subj_alt_name(peer_cert):
# Search through extensions
dns_name = []
@@ -181,7 +182,7 @@ class WrappedSocket(object):
if self.suppress_ragged_eofs and e.args == (-1, 'Unexpected EOF'):
return b''
else:
- raise
+ raise SocketError(e)
except OpenSSL.SSL.ZeroReturnError as e:
if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
return b''
@@ -212,12 +213,9 @@ class WrappedSocket(object):
continue
def sendall(self, data):
- if has_memoryview and not isinstance(data, memoryview):
- data = memoryview(data)
-
total_sent = 0
while total_sent < len(data):
- sent = self._send_until_done(data[total_sent:total_sent+SSL_WRITE_BLOCKSIZE])
+ sent = self._send_until_done(data[total_sent:total_sent + SSL_WRITE_BLOCKSIZE])
total_sent += sent
def shutdown(self):
@@ -226,7 +224,10 @@ class WrappedSocket(object):
def close(self):
if self._makefile_refs < 1:
- return self.connection.close()
+ try:
+ return self.connection.close()
+ except OpenSSL.SSL.Error:
+ return
else:
self._makefile_refs -= 1
diff --git a/urllib3/exceptions.py b/urllib3/exceptions.py
index 9607d65..8e07eb6 100644
--- a/urllib3/exceptions.py
+++ b/urllib3/exceptions.py
@@ -1,16 +1,17 @@
+from __future__ import absolute_import
+# Base Exceptions
-## Base Exceptions
class HTTPError(Exception):
"Base exception used by this module."
pass
+
class HTTPWarning(Warning):
"Base warning used by this module."
pass
-
class PoolError(HTTPError):
"Base exception for errors caused within a pool."
def __init__(self, pool, message):
@@ -57,7 +58,7 @@ class ProtocolError(HTTPError):
ConnectionError = ProtocolError
-## Leaf Exceptions
+# Leaf Exceptions
class MaxRetryError(RequestError):
"""Raised when the maximum number of retries is exceeded.
@@ -112,10 +113,12 @@ class ConnectTimeoutError(TimeoutError):
"Raised when a socket timeout occurs while connecting to a server"
pass
+
class NewConnectionError(ConnectTimeoutError, PoolError):
"Raised when we fail to establish a new connection. Usually ECONNREFUSED."
pass
+
class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
@@ -172,6 +175,11 @@ class InsecurePlatformWarning(SecurityWarning):
pass
+class SNIMissingWarning(HTTPWarning):
+ "Warned when making a HTTPS request without SNI available."
+ pass
+
+
class ResponseNotChunked(ProtocolError, ValueError):
"Response needs to be chunked in order to read it as chunks."
pass
diff --git a/urllib3/fields.py b/urllib3/fields.py
index 5fe3c24..2152829 100644
--- a/urllib3/fields.py
+++ b/urllib3/fields.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import email.utils
import mimetypes
diff --git a/urllib3/filepost.py b/urllib3/filepost.py
index 97ab970..2fea190 100644
--- a/urllib3/filepost.py
+++ b/urllib3/filepost.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import codecs
from uuid import uuid4
diff --git a/urllib3/packages/__init__.py b/urllib3/packages/__init__.py
index 10a3aa8..35555ed 100644
--- a/urllib3/packages/__init__.py
+++ b/urllib3/packages/__init__.py
@@ -7,3 +7,5 @@ try:
import ssl as ssl_match_hostname
except ImportError:
from . import ssl_match_hostname
+
+__all__ = ('ssl_match_hostname', )
diff --git a/urllib3/poolmanager.py b/urllib3/poolmanager.py
index 76b6a12..f13e673 100644
--- a/urllib3/poolmanager.py
+++ b/urllib3/poolmanager.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import logging
try: # Python 3
@@ -25,7 +26,7 @@ pool_classes_by_scheme = {
log = logging.getLogger(__name__)
SSL_KEYWORDS = ('key_file', 'cert_file', 'cert_reqs', 'ca_certs',
- 'ssl_version')
+ 'ssl_version', 'ca_cert_dir')
class PoolManager(RequestMethods):
diff --git a/urllib3/request.py b/urllib3/request.py
index a1a12bc..d5aa62d 100644
--- a/urllib3/request.py
+++ b/urllib3/request.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
try:
from urllib.parse import urlencode
except ImportError:
@@ -133,7 +134,8 @@ class RequestMethods(object):
if fields:
if 'body' in urlopen_kw:
- raise TypeError('request got values for both \'fields\' and \'body\', can only specify one.')
+ raise TypeError(
+ "request got values for both 'fields' and 'body', can only specify one.")
if encode_multipart:
body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
diff --git a/urllib3/response.py b/urllib3/response.py
index c41bd2c..e034068 100644
--- a/urllib3/response.py
+++ b/urllib3/response.py
@@ -1,7 +1,9 @@
+from __future__ import absolute_import
from contextlib import contextmanager
import zlib
import io
from socket import timeout as SocketTimeout
+from socket import error as SocketError
from ._collections import HTTPHeaderDict
from .exceptions import (
@@ -130,8 +132,8 @@ class HTTPResponse(io.IOBase):
if "chunked" in encodings:
self.chunked = True
- # We certainly don't want to preload content when the response is chunked.
- if not self.chunked and preload_content and not self._body:
+ # If requested, preload the body.
+ if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
def get_redirect_location(self):
@@ -194,12 +196,22 @@ class HTTPResponse(io.IOBase):
"Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding, e)
- if flush_decoder and decode_content and self._decoder:
- buf = self._decoder.decompress(binary_type())
- data += buf + self._decoder.flush()
+ if flush_decoder and decode_content:
+ data += self._flush_decoder()
return data
+ def _flush_decoder(self):
+ """
+ Flushes the decoder. Should only be called if the decoder is actually
+ being used.
+ """
+ if self._decoder:
+ buf = self._decoder.decompress(b'')
+ return buf + self._decoder.flush()
+
+ return b''
+
@contextmanager
def _error_catcher(self):
"""
@@ -227,15 +239,22 @@ class HTTPResponse(io.IOBase):
raise ReadTimeoutError(self._pool, None, 'Read timed out.')
- except HTTPException as e:
+ except (HTTPException, SocketError) as e:
# This includes IncompleteRead.
raise ProtocolError('Connection broken: %r' % e, e)
+
except Exception:
# The response may not be closed but we're not going to use it anymore
# so close it now to ensure that the connection is released back to the pool.
if self._original_response and not self._original_response.isclosed():
self._original_response.close()
+ # Closing the response may not actually be sufficient to close
+ # everything, so if we have a hold of the connection close that
+ # too.
+ if self._connection is not None:
+ self._connection.close()
+
raise
finally:
if self._original_response and self._original_response.isclosed():
@@ -301,7 +320,6 @@ class HTTPResponse(io.IOBase):
return data
-
def stream(self, amt=2**16, decode_content=None):
"""
A generator wrapper for the read() method. A call will block until
@@ -340,9 +358,9 @@ class HTTPResponse(io.IOBase):
headers = r.msg
if not isinstance(headers, HTTPHeaderDict):
- if PY3: # Python 3
+ if PY3: # Python 3
headers = HTTPHeaderDict(headers.items())
- else: # Python 2
+ else: # Python 2
headers = HTTPHeaderDict.from_httplib(headers)
# HTTPResponse objects in Python 3 don't have a .strict attribute
@@ -454,7 +472,8 @@ class HTTPResponse(io.IOBase):
self._init_decoder()
# FIXME: Rewrite this method and make it a class with a better structured logic.
if not self.chunked:
- raise ResponseNotChunked("Response is not chunked. "
+ raise ResponseNotChunked(
+ "Response is not chunked. "
"Header 'transfer-encoding: chunked' is missing.")
# Don't bother reading the body of a HEAD request.
@@ -468,8 +487,18 @@ class HTTPResponse(io.IOBase):
if self.chunk_left == 0:
break
chunk = self._handle_chunk(amt)
- yield self._decode(chunk, decode_content=decode_content,
- flush_decoder=True)
+ decoded = self._decode(chunk, decode_content=decode_content,
+ flush_decoder=False)
+ if decoded:
+ yield decoded
+
+ if decode_content:
+ # On CPython and PyPy, we should never need to flush the
+ # decoder. However, on Jython we *might* need to, so
+ # lets defensively do it anyway.
+ decoded = self._flush_decoder()
+ if decoded: # Platform-specific: Jython.
+ yield decoded
# Chunk content ends with \r\n: discard it.
while True:
diff --git a/urllib3/util/__init__.py b/urllib3/util/__init__.py
index 8becc81..c6c6243 100644
--- a/urllib3/util/__init__.py
+++ b/urllib3/util/__init__.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
# For backwards compatibility, provide imports that used to be here.
from .connection import is_connection_dropped
from .request import make_headers
@@ -22,3 +23,22 @@ from .url import (
split_first,
Url,
)
+
+__all__ = (
+ 'HAS_SNI',
+ 'SSLContext',
+ 'Retry',
+ 'Timeout',
+ 'Url',
+ 'assert_fingerprint',
+ 'current_time',
+ 'is_connection_dropped',
+ 'is_fp_closed',
+ 'get_host',
+ 'parse_url',
+ 'make_headers',
+ 'resolve_cert_reqs',
+ 'resolve_ssl_version',
+ 'split_first',
+ 'ssl_wrap_socket',
+)
diff --git a/urllib3/util/connection.py b/urllib3/util/connection.py
index 4f2f0f1..01a4812 100644
--- a/urllib3/util/connection.py
+++ b/urllib3/util/connection.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import socket
try:
from select import poll, POLLIN
diff --git a/urllib3/util/request.py b/urllib3/util/request.py
index 5f4ccfd..40bf0b4 100644
--- a/urllib3/util/request.py
+++ b/urllib3/util/request.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
from base64 import b64encode
from six import b
diff --git a/urllib3/util/response.py b/urllib3/util/response.py
index 6695809..efb9e04 100644
--- a/urllib3/util/response.py
+++ b/urllib3/util/response.py
@@ -1,3 +1,5 @@
+from __future__ import absolute_import
+
from six.moves import http_client as httplib
from ..exceptions import HeaderParsingError
@@ -44,7 +46,7 @@ def assert_header_parsing(headers):
# This will fail silently if we pass in the wrong kind of parameter.
# To make debugging easier add an explicit check.
if not isinstance(headers, httplib.HTTPMessage):
- raise TypeError('expected httplib.Message, got {}.'.format(
+ raise TypeError('expected httplib.Message, got {0}.'.format(
type(headers)))
defects = getattr(headers, 'defects', None)
diff --git a/urllib3/util/retry.py b/urllib3/util/retry.py
index 178b374..fd1f5dd 100644
--- a/urllib3/util/retry.py
+++ b/urllib3/util/retry.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
import time
import logging
@@ -126,7 +127,7 @@ class Retry(object):
self.method_whitelist = method_whitelist
self.backoff_factor = backoff_factor
self.raise_on_redirect = raise_on_redirect
- self._observed_errors = _observed_errors # TODO: use .history instead?
+ self._observed_errors = _observed_errors # TODO: use .history instead?
def new(self, **kw):
params = dict(
@@ -206,7 +207,8 @@ class Retry(object):
return min(retry_counts) < 0
- def increment(self, method=None, url=None, response=None, error=None, _pool=None, _stacktrace=None):
+ def increment(self, method=None, url=None, response=None, error=None,
+ _pool=None, _stacktrace=None):
""" Return a new Retry object with incremented retry counters.
:param response: A response object, or None, if the server did not
@@ -274,7 +276,6 @@ class Retry(object):
return new_retry
-
def __repr__(self):
return ('{cls.__name__}(total={self.total}, connect={self.connect}, '
'read={self.read}, redirect={self.redirect})').format(
diff --git a/urllib3/util/ssl_.py b/urllib3/util/ssl_.py
index 47b817e..67f8344 100644
--- a/urllib3/util/ssl_.py
+++ b/urllib3/util/ssl_.py
@@ -1,7 +1,12 @@
+from __future__ import absolute_import
+import errno
+import warnings
+import hmac
+
from binascii import hexlify, unhexlify
from hashlib import md5, sha1, sha256
-from ..exceptions import SSLError, InsecurePlatformWarning
+from ..exceptions import SSLError, InsecurePlatformWarning, SNIMissingWarning
SSLContext = None
@@ -15,8 +20,23 @@ HASHFUNC_MAP = {
64: sha256,
}
-import errno
-import warnings
+
+def _const_compare_digest_backport(a, b):
+ """
+ Compare two digests of equal length in constant time.
+
+ The digests must be of type str/bytes.
+ Returns True if the digests match, and False otherwise.
+ """
+ result = abs(len(a) - len(b))
+ for l, r in zip(bytearray(a), bytearray(b)):
+ result |= l ^ r
+ return result == 0
+
+
+_const_compare_digest = getattr(hmac, 'compare_digest',
+ _const_compare_digest_backport)
+
try: # Test for SSL features
import ssl
@@ -134,7 +154,7 @@ def assert_fingerprint(cert, fingerprint):
cert_digest = hashfunc(cert).digest()
- if cert_digest != fingerprint_bytes:
+ if not _const_compare_digest(cert_digest, fingerprint_bytes):
raise SSLError('Fingerprints did not match. Expected "{0}", got "{1}".'
.format(fingerprint, hexlify(cert_digest)))
@@ -283,4 +303,15 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
context.load_cert_chain(certfile, keyfile)
if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
return context.wrap_socket(sock, server_hostname=server_hostname)
+
+ warnings.warn(
+ 'An HTTPS request has been made, but the SNI (Subject Name '
+ 'Indication) extension to TLS is not available on this platform. '
+ 'This may cause the server to present an incorrect TLS '
+ 'certificate, which can cause validation failures. For more '
+ 'information, see '
+ 'https://urllib3.readthedocs.org/en/latest/security.html'
+ '#snimissingwarning.',
+ SNIMissingWarning
+ )
return context.wrap_socket(sock)
diff --git a/urllib3/util/timeout.py b/urllib3/util/timeout.py
index ea7027f..ff62f47 100644
--- a/urllib3/util/timeout.py
+++ b/urllib3/util/timeout.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
# The default socket timeout, used by httplib to indicate that no timeout was
# specified by the user
from socket import _GLOBAL_DEFAULT_TIMEOUT
@@ -9,6 +10,7 @@ from ..exceptions import TimeoutStateError
# urllib3
_Default = object()
+
def current_time():
"""
Retrieve the current time. This function is mocked out in unit testing.
@@ -226,9 +228,9 @@ class Timeout(object):
has not yet been called on this object.
"""
if (self.total is not None and
- self.total is not self.DEFAULT_TIMEOUT and
- self._read is not None and
- self._read is not self.DEFAULT_TIMEOUT):
+ self.total is not self.DEFAULT_TIMEOUT and
+ self._read is not None and
+ self._read is not self.DEFAULT_TIMEOUT):
# In case the connect timeout has not yet been established.
if self._start_connect is None:
return self._read
diff --git a/urllib3/util/url.py b/urllib3/util/url.py
index e58050c..e996204 100644
--- a/urllib3/util/url.py
+++ b/urllib3/util/url.py
@@ -1,3 +1,4 @@
+from __future__ import absolute_import
from collections import namedtuple
from ..exceptions import LocationParseError
@@ -85,6 +86,7 @@ class Url(namedtuple('Url', url_attrs)):
def __str__(self):
return self.url
+
def split_first(s, delims):
"""
Given a string and an iterable of delimiters, split on the first found
@@ -115,7 +117,7 @@ def split_first(s, delims):
if min_idx is None or min_idx < 0:
return s, '', None
- return s[:min_idx], s[min_idx+1:], min_delim
+ return s[:min_idx], s[min_idx + 1:], min_delim
def parse_url(url):
@@ -206,6 +208,7 @@ def parse_url(url):
return Url(scheme, auth, host, port, path, query, fragment)
+
def get_host(url):
"""
Deprecated. Use :func:`.parse_url` instead.