aboutsummaryrefslogtreecommitdiff
path: root/urllib3
diff options
context:
space:
mode:
authorSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:19:41 -0700
committerSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:19:41 -0700
commitb6ab7bae87b22c6fae783e8850533219d3bf8a29 (patch)
tree472a760e2e976ea3e9545e09584392accee9cd6d /urllib3
parent54bdd56778a37ea9d56d451d4ae49b99cbbfceaa (diff)
downloadpython-urllib3-b6ab7bae87b22c6fae783e8850533219d3bf8a29.tar
python-urllib3-b6ab7bae87b22c6fae783e8850533219d3bf8a29.tar.gz
Imported Upstream version 1.10
Diffstat (limited to 'urllib3')
-rw-r--r--urllib3/__init__.py6
-rw-r--r--urllib3/_collections.py7
-rw-r--r--urllib3/connection.py12
-rw-r--r--urllib3/connectionpool.py103
-rw-r--r--urllib3/contrib/pyopenssl.py28
-rw-r--r--urllib3/exceptions.py13
-rw-r--r--urllib3/request.py28
-rw-r--r--urllib3/util/retry.py26
-rw-r--r--urllib3/util/ssl_.py208
-rw-r--r--urllib3/util/url.py45
10 files changed, 351 insertions, 125 deletions
diff --git a/urllib3/__init__.py b/urllib3/__init__.py
index 3546d13..4f9d4a7 100644
--- a/urllib3/__init__.py
+++ b/urllib3/__init__.py
@@ -4,7 +4,7 @@ urllib3 - Thread-safe connection pooling and re-using.
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
-__version__ = '1.9.1'
+__version__ = '1.10'
from .connectionpool import (
@@ -55,9 +55,9 @@ def add_stderr_logger(level=logging.DEBUG):
del NullHandler
-# Set security warning to only go off once by default.
+# Set security warning to always go off by default.
import warnings
-warnings.simplefilter('module', exceptions.SecurityWarning)
+warnings.simplefilter('always', exceptions.SecurityWarning)
def disable_warnings(category=exceptions.HTTPWarning):
"""
diff --git a/urllib3/_collections.py b/urllib3/_collections.py
index d77ebb8..784342a 100644
--- a/urllib3/_collections.py
+++ b/urllib3/_collections.py
@@ -14,7 +14,7 @@ try: # Python 2.7+
from collections import OrderedDict
except ImportError:
from .packages.ordered_dict import OrderedDict
-from .packages.six import itervalues
+from .packages.six import iterkeys, itervalues
__all__ = ['RecentlyUsedContainer', 'HTTPHeaderDict']
@@ -85,8 +85,7 @@ class RecentlyUsedContainer(MutableMapping):
def clear(self):
with self.lock:
# Copy pointers to all values, then wipe the mapping
- # under Python 2, this copies the list of values twice :-|
- values = list(self._container.values())
+ values = list(itervalues(self._container))
self._container.clear()
if self.dispose_func:
@@ -95,7 +94,7 @@ class RecentlyUsedContainer(MutableMapping):
def keys(self):
with self.lock:
- return self._container.keys()
+ return list(iterkeys(self._container))
class HTTPHeaderDict(MutableMapping):
diff --git a/urllib3/connection.py b/urllib3/connection.py
index cebdd86..e5de769 100644
--- a/urllib3/connection.py
+++ b/urllib3/connection.py
@@ -38,6 +38,7 @@ except NameError: # Python 2:
from .exceptions import (
ConnectTimeoutError,
SystemTimeWarning,
+ SecurityWarning,
)
from .packages.ssl_match_hostname import match_hostname
@@ -241,8 +242,15 @@ class VerifiedHTTPSConnection(HTTPSConnection):
self.assert_fingerprint)
elif resolved_cert_reqs != ssl.CERT_NONE \
and self.assert_hostname is not False:
- match_hostname(self.sock.getpeercert(),
- self.assert_hostname or hostname)
+ cert = self.sock.getpeercert()
+ if not cert.get('subjectAltName', ()):
+ warnings.warn((
+ 'Certificate has no `subjectAltName`, falling back to check for a `commonName` for now. '
+ 'This feature is being removed by major browsers and deprecated by RFC 2818. '
+ '(See https://github.com/shazow/urllib3/issues/497 for details.)'),
+ SecurityWarning
+ )
+ match_hostname(cert, self.assert_hostname or hostname)
self.is_verified = (resolved_cert_reqs == ssl.CERT_REQUIRED
or self.assert_fingerprint is not None)
diff --git a/urllib3/connectionpool.py b/urllib3/connectionpool.py
index ac6e0ca..8bdf228 100644
--- a/urllib3/connectionpool.py
+++ b/urllib3/connectionpool.py
@@ -266,6 +266,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
"""
pass
+ def _prepare_proxy(self, conn):
+ # Nothing to do for HTTP connections.
+ pass
+
def _get_timeout(self, timeout):
""" Helper that always returns a :class:`urllib3.util.Timeout` """
if timeout is _Default:
@@ -278,6 +282,23 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# can be removed later
return Timeout.from_float(timeout)
+ def _raise_timeout(self, err, url, timeout_value):
+ """Is the error actually a timeout? Will raise a ReadTimeout or pass"""
+
+ if isinstance(err, SocketTimeout):
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # See the above comment about EAGAIN in Python 3. In Python 2 we have
+ # to specifically catch it and throw the timeout error
+ if hasattr(err, 'errno') and err.errno in _blocking_errnos:
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
+ # Catch possible read timeouts thrown as SSL errors. If not the
+ # case, rethrow the original. We need to do this because of:
+ # http://bugs.python.org/issue10272
+ if 'timed out' in str(err) or 'did not complete (read)' in str(err): # Python 2.6
+ raise ReadTimeoutError(self, url, "Read timed out. (read timeout=%s)" % timeout_value)
+
def _make_request(self, conn, method, url, timeout=_Default,
**httplib_request_kw):
"""
@@ -301,7 +322,12 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn.timeout = timeout_obj.connect_timeout
# Trigger any extra validation we need to do.
- self._validate_conn(conn)
+ try:
+ self._validate_conn(conn)
+ except (SocketTimeout, BaseSSLError) as e:
+ # Py2 raises this as a BaseSSLError, Py3 raises it as socket timeout.
+ self._raise_timeout(err=e, url=url, timeout_value=conn.timeout)
+ raise
# conn.request() calls httplib.*.request, not the method in
# urllib3.request. It also calls makefile (recv) on the socket.
@@ -331,28 +357,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
httplib_response = conn.getresponse(buffering=True)
except TypeError: # Python 2.6 and older
httplib_response = conn.getresponse()
- except SocketTimeout:
- raise ReadTimeoutError(
- self, url, "Read timed out. (read timeout=%s)" % read_timeout)
-
- except BaseSSLError as e:
- # Catch possible read timeouts thrown as SSL errors. If not the
- # case, rethrow the original. We need to do this because of:
- # http://bugs.python.org/issue10272
- if 'timed out' in str(e) or \
- 'did not complete (read)' in str(e): # Python 2.6
- raise ReadTimeoutError(
- self, url, "Read timed out. (read timeout=%s)" % read_timeout)
-
- raise
-
- except SocketError as e: # Platform-specific: Python 2
- # See the above comment about EAGAIN in Python 3. In Python 2 we
- # have to specifically catch it and throw the timeout error
- if e.errno in _blocking_errnos:
- raise ReadTimeoutError(
- self, url, "Read timed out. (read timeout=%s)" % read_timeout)
-
+ except (SocketTimeout, BaseSSLError, SocketError) as e:
+ self._raise_timeout(err=e, url=url, timeout_value=read_timeout)
raise
# AppEngine doesn't have a version attr.
@@ -508,11 +514,18 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
try:
# Request a connection from the queue.
+ timeout_obj = self._get_timeout(timeout)
conn = self._get_conn(timeout=pool_timeout)
+ conn.timeout = timeout_obj.connect_timeout
+
+ is_new_proxy_conn = self.proxy is not None and not getattr(conn, 'sock', None)
+ if is_new_proxy_conn:
+ self._prepare_proxy(conn)
+
# Make the request on the httplib connection object.
httplib_response = self._make_request(conn, method, url,
- timeout=timeout,
+ timeout=timeout_obj,
body=body, headers=headers)
# If we're going to release the connection in ``finally:``, then
@@ -537,9 +550,12 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
raise EmptyPoolError(self, "No pool connections are available.")
except (BaseSSLError, CertificateError) as e:
- # Release connection unconditionally because there is no way to
- # close it externally in case of exception.
- release_conn = True
+ # Close the connection. If a connection is reused on which there
+ # was a Certificate error, the next request will certainly raise
+ # another Certificate error.
+ if conn:
+ conn.close()
+ conn = None
raise SSLError(e)
except (TimeoutError, HTTPException, SocketError, ConnectionError) as e:
@@ -668,23 +684,25 @@ class HTTPSConnectionPool(HTTPConnectionPool):
assert_fingerprint=self.assert_fingerprint)
conn.ssl_version = self.ssl_version
- if self.proxy is not None:
- # Python 2.7+
- try:
- set_tunnel = conn.set_tunnel
- except AttributeError: # Platform-specific: Python 2.6
- set_tunnel = conn._set_tunnel
+ return conn
- if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
- set_tunnel(self.host, self.port)
- else:
- set_tunnel(self.host, self.port, self.proxy_headers)
+ def _prepare_proxy(self, conn):
+ """
+ Establish tunnel connection early, because otherwise httplib
+ would improperly set Host: header to proxy's IP:port.
+ """
+ # Python 2.7+
+ try:
+ set_tunnel = conn.set_tunnel
+ except AttributeError: # Platform-specific: Python 2.6
+ set_tunnel = conn._set_tunnel
- # Establish tunnel connection early, because otherwise httplib
- # would improperly set Host: header to proxy's IP:port.
- conn.connect()
+ if sys.version_info <= (2, 6, 4) and not self.proxy_headers: # Python 2.6.4 and older
+ set_tunnel(self.host, self.port)
+ else:
+ set_tunnel(self.host, self.port, self.proxy_headers)
- return conn
+ conn.connect()
def _new_conn(self):
"""
@@ -725,8 +743,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
warnings.warn((
'Unverified HTTPS request is being made. '
'Adding certificate verification is strongly advised. See: '
- 'https://urllib3.readthedocs.org/en/latest/security.html '
- '(This warning will only appear once by default.)'),
+ 'https://urllib3.readthedocs.org/en/latest/security.html'),
InsecureRequestWarning)
diff --git a/urllib3/contrib/pyopenssl.py b/urllib3/contrib/pyopenssl.py
index 8475eeb..ee657fb 100644
--- a/urllib3/contrib/pyopenssl.py
+++ b/urllib3/contrib/pyopenssl.py
@@ -70,9 +70,14 @@ HAS_SNI = SUBJ_ALT_NAME_SUPPORT
# Map from urllib3 to PyOpenSSL compatible parameter-values.
_openssl_versions = {
ssl.PROTOCOL_SSLv23: OpenSSL.SSL.SSLv23_METHOD,
- ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD,
ssl.PROTOCOL_TLSv1: OpenSSL.SSL.TLSv1_METHOD,
}
+
+try:
+ _openssl_versions.update({ssl.PROTOCOL_SSLv3: OpenSSL.SSL.SSLv3_METHOD})
+except AttributeError:
+ pass
+
_openssl_verify = {
ssl.CERT_NONE: OpenSSL.SSL.VERIFY_NONE,
ssl.CERT_OPTIONAL: OpenSSL.SSL.VERIFY_PEER,
@@ -186,6 +191,11 @@ class WrappedSocket(object):
return b''
else:
raise
+ except OpenSSL.SSL.ZeroReturnError as e:
+ if self.connection.get_shutdown() == OpenSSL.SSL.RECEIVED_SHUTDOWN:
+ return b''
+ else:
+ raise
except OpenSSL.SSL.WantReadError:
rd, wd, ed = select.select(
[self.socket], [], [], self.socket.gettimeout())
@@ -199,8 +209,21 @@ class WrappedSocket(object):
def settimeout(self, timeout):
return self.socket.settimeout(timeout)
+ def _send_until_done(self, data):
+ while True:
+ try:
+ return self.connection.send(data)
+ except OpenSSL.SSL.WantWriteError:
+ _, wlist, _ = select.select([], [self.socket], [],
+ self.socket.gettimeout())
+ if not wlist:
+ raise timeout()
+ continue
+
def sendall(self, data):
- return self.connection.sendall(data)
+ while len(data):
+ sent = self._send_until_done(data)
+ data = data[sent:]
def close(self):
if self._makefile_refs < 1:
@@ -248,6 +271,7 @@ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ssl_version=None):
ctx = OpenSSL.SSL.Context(_openssl_versions[ssl_version])
if certfile:
+ keyfile = keyfile or certfile # Match behaviour of the normal python ssl library
ctx.use_certificate_file(certfile)
if keyfile:
ctx.use_privatekey_file(keyfile)
diff --git a/urllib3/exceptions.py b/urllib3/exceptions.py
index 7519ba9..0c6fd3c 100644
--- a/urllib3/exceptions.py
+++ b/urllib3/exceptions.py
@@ -72,11 +72,8 @@ class MaxRetryError(RequestError):
def __init__(self, pool, url, reason=None):
self.reason = reason
- message = "Max retries exceeded with url: %s" % url
- if reason:
- message += " (Caused by %r)" % reason
- else:
- message += " (Caused by redirect)"
+ message = "Max retries exceeded with url: %s (Caused by %r)" % (
+ url, reason)
RequestError.__init__(self, pool, url, message)
@@ -141,6 +138,12 @@ class LocationParseError(LocationValueError):
self.location = location
+class ResponseError(HTTPError):
+ "Used as a container for an error reason supplied in a MaxRetryError."
+ GENERIC_ERROR = 'too many error responses'
+ SPECIFIC_ERROR = 'too many {status_code} error responses'
+
+
class SecurityWarning(HTTPWarning):
"Warned when perfoming security reducing actions"
pass
diff --git a/urllib3/request.py b/urllib3/request.py
index 51fe238..b08d6c9 100644
--- a/urllib3/request.py
+++ b/urllib3/request.py
@@ -118,18 +118,24 @@ class RequestMethods(object):
which is used to compose the body of the request. The random boundary
string can be explicitly set with the ``multipart_boundary`` parameter.
"""
- if encode_multipart:
- body, content_type = encode_multipart_formdata(
- fields or {}, boundary=multipart_boundary)
- else:
- body, content_type = (urlencode(fields or {}),
- 'application/x-www-form-urlencoded')
-
if headers is None:
headers = self.headers
- headers_ = {'Content-Type': content_type}
- headers_.update(headers)
+ extra_kw = {'headers': {}}
+
+ if fields:
+ if 'body' in urlopen_kw:
+ raise TypeError('request got values for both \'fields\' and \'body\', can only specify one.')
+
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(fields, boundary=multipart_boundary)
+ else:
+ body, content_type = urlencode(fields), 'application/x-www-form-urlencoded'
+
+ extra_kw['body'] = body
+ extra_kw['headers'] = {'Content-Type': content_type}
+
+ extra_kw['headers'].update(headers)
+ extra_kw.update(urlopen_kw)
- return self.urlopen(method, url, body=body, headers=headers_,
- **urlopen_kw)
+ return self.urlopen(method, url, **extra_kw)
diff --git a/urllib3/util/retry.py b/urllib3/util/retry.py
index eb560df..7e0959d 100644
--- a/urllib3/util/retry.py
+++ b/urllib3/util/retry.py
@@ -2,10 +2,11 @@ import time
import logging
from ..exceptions import (
- ProtocolError,
ConnectTimeoutError,
- ReadTimeoutError,
MaxRetryError,
+ ProtocolError,
+ ReadTimeoutError,
+ ResponseError,
)
from ..packages import six
@@ -36,7 +37,6 @@ class Retry(object):
Errors will be wrapped in :class:`~urllib3.exceptions.MaxRetryError` unless
retries are disabled, in which case the causing exception will be raised.
-
:param int total:
Total number of retries to allow. Takes precedence over other counts.
@@ -184,13 +184,13 @@ class Retry(object):
return isinstance(err, ConnectTimeoutError)
def _is_read_error(self, err):
- """ Errors that occur after the request has been started, so we can't
- assume that the server did not process any of it.
+ """ Errors that occur after the request has been started, so we should
+ assume that the server began processing it.
"""
return isinstance(err, (ReadTimeoutError, ProtocolError))
def is_forced_retry(self, method, status_code):
- """ Is this method/response retryable? (Based on method/codes whitelists)
+ """ Is this method/status code retryable? (Based on method/codes whitelists)
"""
if self.method_whitelist and method.upper() not in self.method_whitelist:
return False
@@ -198,8 +198,7 @@ class Retry(object):
return self.status_forcelist and status_code in self.status_forcelist
def is_exhausted(self):
- """ Are we out of retries?
- """
+ """ Are we out of retries? """
retry_counts = (self.total, self.connect, self.read, self.redirect)
retry_counts = list(filter(None, retry_counts))
if not retry_counts:
@@ -230,6 +229,7 @@ class Retry(object):
connect = self.connect
read = self.read
redirect = self.redirect
+ cause = 'unknown'
if error and self._is_connection_error(error):
# Connect retry?
@@ -251,10 +251,16 @@ class Retry(object):
# Redirect retry?
if redirect is not None:
redirect -= 1
+ cause = 'too many redirects'
else:
- # FIXME: Nothing changed, scenario doesn't make sense.
+ # Incrementing because of a server error like a 500 in
+ # status_forcelist and a the given method is in the whitelist
_observed_errors += 1
+ cause = ResponseError.GENERIC_ERROR
+ if response and response.status:
+ cause = ResponseError.SPECIFIC_ERROR.format(
+ status_code=response.status)
new_retry = self.new(
total=total,
@@ -262,7 +268,7 @@ class Retry(object):
_observed_errors=_observed_errors)
if new_retry.is_exhausted():
- raise MaxRetryError(_pool, url, error)
+ raise MaxRetryError(_pool, url, error or ResponseError(cause))
log.debug("Incremented Retry for (url='%s'): %r" % (url, new_retry))
diff --git a/urllib3/util/ssl_.py b/urllib3/util/ssl_.py
index 9cfe2d2..a788b1b 100644
--- a/urllib3/util/ssl_.py
+++ b/urllib3/util/ssl_.py
@@ -4,18 +4,84 @@ from hashlib import md5, sha1
from ..exceptions import SSLError
-try: # Test for SSL features
- SSLContext = None
- HAS_SNI = False
+SSLContext = None
+HAS_SNI = False
+create_default_context = None
+
+import errno
+import ssl
- import ssl
+try: # Test for SSL features
from ssl import wrap_socket, CERT_NONE, PROTOCOL_SSLv23
- from ssl import SSLContext # Modern SSL?
from ssl import HAS_SNI # Has SNI?
except ImportError:
pass
+try:
+ from ssl import OP_NO_SSLv2, OP_NO_SSLv3, OP_NO_COMPRESSION
+except ImportError:
+ OP_NO_SSLv2, OP_NO_SSLv3 = 0x1000000, 0x2000000
+ OP_NO_COMPRESSION = 0x20000
+
+try:
+ from ssl import _DEFAULT_CIPHERS
+except ImportError:
+ _DEFAULT_CIPHERS = (
+ 'ECDH+AESGCM:DH+AESGCM:ECDH+AES256:DH+AES256:ECDH+AES128:DH+AES:ECDH+HIGH:'
+ 'DH+HIGH:ECDH+3DES:DH+3DES:RSA+AESGCM:RSA+AES:RSA+HIGH:RSA+3DES:ECDH+RC4:'
+ 'DH+RC4:RSA+RC4:!aNULL:!eNULL:!MD5'
+ )
+
+try:
+ from ssl import SSLContext # Modern SSL?
+except ImportError:
+ import sys
+
+ class SSLContext(object): # Platform-specific: Python 2 & 3.1
+ supports_set_ciphers = sys.version_info >= (2, 7)
+
+ def __init__(self, protocol_version):
+ self.protocol = protocol_version
+ # Use default values from a real SSLContext
+ self.check_hostname = False
+ self.verify_mode = ssl.CERT_NONE
+ self.ca_certs = None
+ self.options = 0
+ self.certfile = None
+ self.keyfile = None
+ self.ciphers = None
+
+ def load_cert_chain(self, certfile, keyfile):
+ self.certfile = certfile
+ self.keyfile = keyfile
+
+ def load_verify_locations(self, location):
+ self.ca_certs = location
+
+ def set_ciphers(self, cipher_suite):
+ if not self.supports_set_ciphers:
+ raise TypeError(
+ 'Your version of Python does not support setting '
+ 'a custom cipher suite. Please upgrade to Python '
+ '2.7, 3.2, or later if you need this functionality.'
+ )
+ self.ciphers = cipher_suite
+
+ def wrap_socket(self, socket, server_hostname=None):
+ kwargs = {
+ 'keyfile': self.keyfile,
+ 'certfile': self.certfile,
+ 'ca_certs': self.ca_certs,
+ 'cert_reqs': self.verify_mode,
+ 'ssl_version': self.protocol,
+ }
+ if self.supports_set_ciphers: # Platform-specific: Python 2.7+
+ return wrap_socket(socket, ciphers=self.ciphers, **kwargs)
+ else: # Platform-specific: Python 2.6
+ return wrap_socket(socket, **kwargs)
+
+
def assert_fingerprint(cert, fingerprint):
"""
Checks if given fingerprint matches the supplied certificate.
@@ -91,42 +157,98 @@ def resolve_ssl_version(candidate):
return candidate
-if SSLContext is not None: # Python 3.2+
- def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
- ca_certs=None, server_hostname=None,
- ssl_version=None):
- """
- All arguments except `server_hostname` have the same meaning as for
- :func:`ssl.wrap_socket`
-
- :param server_hostname:
- Hostname of the expected certificate
- """
- context = SSLContext(ssl_version)
- context.verify_mode = cert_reqs
-
- # Disable TLS compression to migitate CRIME attack (issue #309)
- OP_NO_COMPRESSION = 0x20000
- context.options |= OP_NO_COMPRESSION
-
- if ca_certs:
- try:
- context.load_verify_locations(ca_certs)
- # Py32 raises IOError
- # Py33 raises FileNotFoundError
- except Exception as e: # Reraise as SSLError
+def create_urllib3_context(ssl_version=None, cert_reqs=ssl.CERT_REQUIRED,
+ options=None, ciphers=None):
+ """All arguments have the same meaning as ``ssl_wrap_socket``.
+
+ By default, this function does a lot of the same work that
+ ``ssl.create_default_context`` does on Python 3.4+. It:
+
+ - Disables SSLv2, SSLv3, and compression
+ - Sets a restricted set of server ciphers
+
+ If you wish to enable SSLv3, you can do::
+
+ from urllib3.util import ssl_
+ context = ssl_.create_urllib3_context()
+ context.options &= ~ssl_.OP_NO_SSLv3
+
+ You can do the same to enable compression (substituting ``COMPRESSION``
+ for ``SSLv3`` in the last line above).
+
+ :param ssl_version:
+ The desired protocol version to use. This will default to
+ PROTOCOL_SSLv23 which will negotiate the highest protocol that both
+ the server and your installation of OpenSSL support.
+ :param cert_reqs:
+ Whether to require the certificate verification. This defaults to
+ ``ssl.CERT_REQUIRED``.
+ :param options:
+ Specific OpenSSL options. These default to ``ssl.OP_NO_SSLv2``,
+ ``ssl.OP_NO_SSLv3``, ``ssl.OP_NO_COMPRESSION``.
+ :param ciphers:
+ Which cipher suites to allow the server to select.
+ :returns:
+ Constructed SSLContext object with specified options
+ :rtype: SSLContext
+ """
+ context = SSLContext(ssl_version or ssl.PROTOCOL_SSLv23)
+
+ if options is None:
+ options = 0
+ # SSLv2 is easily broken and is considered harmful and dangerous
+ options |= OP_NO_SSLv2
+ # SSLv3 has several problems and is now dangerous
+ options |= OP_NO_SSLv3
+ # Disable compression to prevent CRIME attacks for OpenSSL 1.0+
+ # (issue #309)
+ options |= OP_NO_COMPRESSION
+
+ context.options |= options
+
+ if getattr(context, 'supports_set_ciphers', True): # Platform-specific: Python 2.6
+ context.set_ciphers(ciphers or _DEFAULT_CIPHERS)
+
+ context.verify_mode = cert_reqs
+ if getattr(context, 'check_hostname', None) is not None: # Platform-specific: Python 3.2
+ context.check_hostname = (context.verify_mode == ssl.CERT_REQUIRED)
+ return context
+
+
+def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
+ ca_certs=None, server_hostname=None,
+ ssl_version=None, ciphers=None, ssl_context=None):
+ """
+ All arguments except for server_hostname and ssl_context have the same
+ meaning as they do when using :func:`ssl.wrap_socket`.
+
+ :param server_hostname:
+ When SNI is supported, the expected hostname of the certificate
+ :param ssl_context:
+ A pre-made :class:`SSLContext` object. If none is provided, one will
+ be created using :func:`create_urllib3_context`.
+ :param ciphers:
+ A string of ciphers we wish the client to support. This is not
+ supported on Python 2.6 as the ssl module does not support it.
+ """
+ context = ssl_context
+ if context is None:
+ context = create_urllib3_context(ssl_version, cert_reqs,
+ ciphers=ciphers)
+
+ if ca_certs:
+ try:
+ context.load_verify_locations(ca_certs)
+ except IOError as e: # Platform-specific: Python 2.6, 2.7, 3.2
+ raise SSLError(e)
+ # Py33 raises FileNotFoundError which subclasses OSError
+ # These are not equivalent unless we check the errno attribute
+ except OSError as e: # Platform-specific: Python 3.3 and beyond
+ if e.errno == errno.ENOENT:
raise SSLError(e)
- if certfile:
- # FIXME: This block needs a test.
- context.load_cert_chain(certfile, keyfile)
- if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
- return context.wrap_socket(sock, server_hostname=server_hostname)
- return context.wrap_socket(sock)
-
-else: # Python 3.1 and earlier
- def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
- ca_certs=None, server_hostname=None,
- ssl_version=None):
- return wrap_socket(sock, keyfile=keyfile, certfile=certfile,
- ca_certs=ca_certs, cert_reqs=cert_reqs,
- ssl_version=ssl_version)
+ raise
+ if certfile:
+ context.load_cert_chain(certfile, keyfile)
+ if HAS_SNI: # Platform-specific: OpenSSL with enabled SNI
+ return context.wrap_socket(sock, server_hostname=server_hostname)
+ return context.wrap_socket(sock)
diff --git a/urllib3/util/url.py b/urllib3/util/url.py
index 487d456..b2ec834 100644
--- a/urllib3/util/url.py
+++ b/urllib3/util/url.py
@@ -40,6 +40,48 @@ class Url(namedtuple('Url', url_attrs)):
return '%s:%d' % (self.host, self.port)
return self.host
+ @property
+ def url(self):
+ """
+ Convert self into a url
+
+ This function should more or less round-trip with :func:`.parse_url`. The
+ returned url may not be exactly the same as the url inputted to
+ :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls
+ with a blank port will have : removed).
+
+ Example: ::
+
+ >>> U = parse_url('http://google.com/mail/')
+ >>> U.url
+ 'http://google.com/mail/'
+ >>> Url('http', 'username:password', 'host.com', 80,
+ ... '/path', 'query', 'fragment').url
+ 'http://username:password@host.com:80/path?query#fragment'
+ """
+ scheme, auth, host, port, path, query, fragment = self
+ url = ''
+
+ # We use "is not None" we want things to happen with empty strings (or 0 port)
+ if scheme is not None:
+ url += scheme + '://'
+ if auth is not None:
+ url += auth + '@'
+ if host is not None:
+ url += host
+ if port is not None:
+ url += ':' + str(port)
+ if path is not None:
+ url += path
+ if query is not None:
+ url += '?' + query
+ if fragment is not None:
+ url += '#' + fragment
+
+ return url
+
+ def __str__(self):
+ return self.url
def split_first(s, delims):
"""
@@ -84,7 +126,7 @@ def parse_url(url):
Example::
>>> parse_url('http://google.com/mail/')
- Url(scheme='http', host='google.com', port=None, path='/', ...)
+ Url(scheme='http', host='google.com', port=None, path='/mail/', ...)
>>> parse_url('google.com:80')
Url(scheme=None, host='google.com', port=80, path=None, ...)
>>> parse_url('/foo?bar')
@@ -162,7 +204,6 @@ def parse_url(url):
return Url(scheme, auth, host, port, path, query, fragment)
-
def get_host(url):
"""
Deprecated. Use :func:`.parse_url` instead.