aboutsummaryrefslogtreecommitdiff
path: root/urllib3
diff options
context:
space:
mode:
Diffstat (limited to 'urllib3')
-rw-r--r--urllib3/__init__.py48
-rw-r--r--urllib3/_collections.py131
-rw-r--r--urllib3/connectionpool.py629
-rw-r--r--urllib3/contrib/__init__.py0
-rw-r--r--urllib3/contrib/ntlmpool.py120
-rw-r--r--urllib3/exceptions.py67
-rw-r--r--urllib3/filepost.py74
-rw-r--r--urllib3/packages/__init__.py4
-rw-r--r--urllib3/packages/mimetools_choose_boundary/__init__.py47
-rw-r--r--urllib3/packages/six.py372
-rw-r--r--urllib3/packages/ssl_match_hostname/__init__.py61
-rw-r--r--urllib3/poolmanager.py138
-rw-r--r--urllib3/request.py147
-rw-r--r--urllib3/response.py191
14 files changed, 2029 insertions, 0 deletions
diff --git a/urllib3/__init__.py b/urllib3/__init__.py
new file mode 100644
index 0000000..2e9c663
--- /dev/null
+++ b/urllib3/__init__.py
@@ -0,0 +1,48 @@
+# urllib3/__init__.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+urllib3 - Thread-safe connection pooling and re-using.
+"""
+
+__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
+__license__ = 'MIT'
+__version__ = '1.2.2'
+
+
+from .connectionpool import (
+ HTTPConnectionPool,
+ HTTPSConnectionPool,
+ connection_from_url,
+ get_host,
+ make_headers)
+
+
+from .exceptions import (
+ HTTPError,
+ MaxRetryError,
+ SSLError,
+ TimeoutError)
+
+from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+from .response import HTTPResponse
+from .filepost import encode_multipart_formdata
+
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+try:
+ from logging import NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+# ... Clean up.
+del logging
+del NullHandler
diff --git a/urllib3/_collections.py b/urllib3/_collections.py
new file mode 100644
index 0000000..3cef081
--- /dev/null
+++ b/urllib3/_collections.py
@@ -0,0 +1,131 @@
+# urllib3/_collections.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from collections import deque
+
+from threading import RLock
+
+__all__ = ['RecentlyUsedContainer']
+
+
+class AccessEntry(object):
+ __slots__ = ('key', 'is_valid')
+
+ def __init__(self, key, is_valid=True):
+ self.key = key
+ self.is_valid = is_valid
+
+
+class RecentlyUsedContainer(dict):
+ """
+ Provides a dict-like that maintains up to ``maxsize`` keys while throwing
+ away the least-recently-used keys beyond ``maxsize``.
+ """
+
+ # If len(self.access_log) exceeds self._maxsize * CLEANUP_FACTOR, then we
+ # will attempt to cleanup the invalidated entries in the access_log
+ # datastructure during the next 'get' operation.
+ CLEANUP_FACTOR = 10
+
+ def __init__(self, maxsize=10):
+ self._maxsize = maxsize
+
+ self._container = {}
+
+ # We use a deque to to store our keys ordered by the last access.
+ self.access_log = deque()
+ self.access_log_lock = RLock()
+
+ # We look up the access log entry by the key to invalidate it so we can
+ # insert a new authorative entry at the head without having to dig and
+ # find the old entry for removal immediately.
+ self.access_lookup = {}
+
+ # Trigger a heap cleanup when we get past this size
+ self.access_log_limit = maxsize * self.CLEANUP_FACTOR
+
+ def _invalidate_entry(self, key):
+ "If exists: Invalidate old entry and return it."
+ old_entry = self.access_lookup.get(key)
+ if old_entry:
+ old_entry.is_valid = False
+
+ return old_entry
+
+ def _push_entry(self, key):
+ "Push entry onto our access log, invalidate the old entry if exists."
+ self._invalidate_entry(key)
+
+ new_entry = AccessEntry(key)
+ self.access_lookup[key] = new_entry
+
+ self.access_log_lock.acquire()
+ self.access_log.appendleft(new_entry)
+ self.access_log_lock.release()
+
+ def _prune_entries(self, num):
+ "Pop entries from our access log until we popped ``num`` valid ones."
+ while num > 0:
+ self.access_log_lock.acquire()
+ p = self.access_log.pop()
+ self.access_log_lock.release()
+
+ if not p.is_valid:
+ continue # Invalidated entry, skip
+
+ dict.pop(self, p.key, None)
+ self.access_lookup.pop(p.key, None)
+ num -= 1
+
+ def _prune_invalidated_entries(self):
+ "Rebuild our access_log without the invalidated entries."
+ self.access_log_lock.acquire()
+ self.access_log = deque(e for e in self.access_log if e.is_valid)
+ self.access_log_lock.release()
+
+ def _get_ordered_access_keys(self):
+ "Return ordered access keys for inspection. Used for testing."
+ self.access_log_lock.acquire()
+ r = [e.key for e in self.access_log if e.is_valid]
+ self.access_log_lock.release()
+
+ return r
+
+ def __getitem__(self, key):
+ item = dict.get(self, key)
+
+ if not item:
+ raise KeyError(key)
+
+ # Insert new entry with new high priority, also implicitly invalidates
+ # the old entry.
+ self._push_entry(key)
+
+ if len(self.access_log) > self.access_log_limit:
+ # Heap is getting too big, try to clean up any tailing invalidated
+ # entries.
+ self._prune_invalidated_entries()
+
+ return item
+
+ def __setitem__(self, key, item):
+ # Add item to our container and access log
+ dict.__setitem__(self, key, item)
+ self._push_entry(key)
+
+ # Discard invalid and excess entries
+ self._prune_entries(len(self) - self._maxsize)
+
+ def __delitem__(self, key):
+ self._invalidate_entry(key)
+ self.access_lookup.pop(key, None)
+ dict.__delitem__(self, key)
+
+ def get(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ return default
diff --git a/urllib3/connectionpool.py b/urllib3/connectionpool.py
new file mode 100644
index 0000000..39e652e
--- /dev/null
+++ b/urllib3/connectionpool.py
@@ -0,0 +1,629 @@
+# urllib3/connectionpool.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import logging
+import socket
+
+from base64 import b64encode
+from socket import error as SocketError, timeout as SocketTimeout
+
+try:
+ from select import poll, POLLIN
+except ImportError: # Doesn't exist on OSX and other platforms
+ from select import select
+ poll = False
+
+try: # Python 3
+ from http.client import HTTPConnection, HTTPException
+ from http.client import HTTP_PORT, HTTPS_PORT
+except ImportError:
+ from httplib import HTTPConnection, HTTPException
+ from httplib import HTTP_PORT, HTTPS_PORT
+
+try: # Python 3
+ from queue import LifoQueue, Empty, Full
+except ImportError:
+ from Queue import LifoQueue, Empty, Full
+
+
+try: # Compiled with SSL?
+ HTTPSConnection = object
+ BaseSSLError = None
+ ssl = None
+
+ try: # Python 3
+ from http.client import HTTPSConnection
+ except ImportError:
+ from httplib import HTTPSConnection
+
+ import ssl
+ BaseSSLError = ssl.SSLError
+
+except ImportError:
+ pass
+
+
+from .packages.ssl_match_hostname import match_hostname, CertificateError
+from .request import RequestMethods
+from .response import HTTPResponse
+from .exceptions import (
+ EmptyPoolError,
+ HostChangedError,
+ LocationParseError,
+ MaxRetryError,
+ SSLError,
+ TimeoutError,
+)
+
+from .packages.ssl_match_hostname import match_hostname, CertificateError
+from .packages import six
+
+xrange = six.moves.xrange
+
+log = logging.getLogger(__name__)
+
+_Default = object()
+
+port_by_scheme = {
+ 'http': HTTP_PORT,
+ 'https': HTTPS_PORT,
+}
+
+## Connection objects (extension of httplib)
+
+class VerifiedHTTPSConnection(HTTPSConnection):
+ """
+ Based on httplib.HTTPSConnection but wraps the socket with
+ SSL certification.
+ """
+ cert_reqs = None
+ ca_certs = None
+
+ def set_cert(self, key_file=None, cert_file=None,
+ cert_reqs='CERT_NONE', ca_certs=None):
+ ssl_req_scheme = {
+ 'CERT_NONE': ssl.CERT_NONE,
+ 'CERT_OPTIONAL': ssl.CERT_OPTIONAL,
+ 'CERT_REQUIRED': ssl.CERT_REQUIRED
+ }
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = ssl_req_scheme.get(cert_reqs) or ssl.CERT_NONE
+ self.ca_certs = ca_certs
+
+ def connect(self):
+ # Add certificate verification
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+
+ # Wrap socket using verification with the root certs in
+ # trusted_root_certs
+ self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
+ cert_reqs=self.cert_reqs,
+ ca_certs=self.ca_certs)
+ if self.ca_certs:
+ match_hostname(self.sock.getpeercert(), self.host)
+
+## Pool objects
+
+class ConnectionPool(object):
+ """
+ Base class for all connection pools, such as
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+ """
+
+ scheme = None
+ QueueCls = LifoQueue
+
+ def __init__(self, host, port=None):
+ self.host = host
+ self.port = port
+
+ def __str__(self):
+ return '%s(host=%r, port=%r)' % (type(self).__name__,
+ self.host, self.port)
+
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+ """
+ Thread-safe connection pool for one host.
+
+ :param host:
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
+ :class:`httplib.HTTPConnection`.
+
+ :param port:
+ Port used for this HTTP Connection (None is equivalent to 80), passed
+ into :class:`httplib.HTTPConnection`.
+
+ :param strict:
+ Causes BadStatusLine to be raised if the status line can't be parsed
+ as a valid HTTP/1.0 or 1.1 status line, passed into
+ :class:`httplib.HTTPConnection`.
+
+ :param timeout:
+ Socket timeout for each individual connection, can be a float. None
+ disables timeout.
+
+ :param maxsize:
+ Number of connections to save that can be reused. More than 1 is useful
+ in multithreaded situations. If ``block`` is set to false, more
+ connections will be created but they will not be saved once they've
+ been used.
+
+ :param block:
+ If set to True, no more than ``maxsize`` connections will be used at
+ a time. When no free connections are available, the call will block
+ until a connection has been released. This is a useful side effect for
+ particular multithreaded situations where one does not want to use more
+ than maxsize connections per host to prevent flooding.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+ """
+
+ scheme = 'http'
+
+ def __init__(self, host, port=None, strict=False, timeout=None, maxsize=1,
+ block=False, headers=None):
+ super(HTTPConnectionPool, self).__init__(host, port)
+
+ self.strict = strict
+ self.timeout = timeout
+ self.pool = self.QueueCls(maxsize)
+ self.block = block
+ self.headers = headers or {}
+
+ # Fill the queue up so that doing get() on it will block properly
+ for _ in xrange(maxsize):
+ self.pool.put(None)
+
+ # These are mostly for testing and debugging purposes.
+ self.num_connections = 0
+ self.num_requests = 0
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`httplib.HTTPConnection`.
+ """
+ self.num_connections += 1
+ log.info("Starting new HTTP connection (%d): %s" %
+ (self.num_connections, self.host))
+ return HTTPConnection(host=self.host, port=self.port)
+
+ def _get_conn(self, timeout=None):
+ """
+ Get a connection. Will return a pooled connection if one is available.
+
+ If no connections are available and :prop:`.block` is ``False``, then a
+ fresh connection is returned.
+
+ :param timeout:
+ Seconds to wait before giving up and raising
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
+ :prop:`.block` is ``True``.
+ """
+ conn = None
+ try:
+ conn = self.pool.get(block=self.block, timeout=timeout)
+
+ # If this is a persistent connection, check if it got disconnected
+ if conn and conn.sock and is_connection_dropped(conn):
+ log.info("Resetting dropped connection: %s" % self.host)
+ conn.close()
+
+ except Empty:
+ if self.block:
+ raise EmptyPoolError(self,
+ "Pool reached maximum size and no more "
+ "connections are allowed.")
+ pass # Oh well, we'll create a new connection then
+
+ return conn or self._new_conn()
+
+ def _put_conn(self, conn):
+ """
+ Put a connection back into the pool.
+
+ :param conn:
+ Connection object for the current host and port as returned by
+ :meth:`._new_conn` or :meth:`._get_conn`.
+
+ If the pool is already full, the connection is discarded because we
+ exceeded maxsize. If connections are discarded frequently, then maxsize
+ should be increased.
+ """
+ try:
+ self.pool.put(conn, block=False)
+ except Full:
+ # This should never happen if self.block == True
+ log.warning("HttpConnectionPool is full, discarding connection: %s"
+ % self.host)
+
+ def _make_request(self, conn, method, url, timeout=_Default,
+ **httplib_request_kw):
+ """
+ Perform a request on a given httplib connection object taken from our
+ pool.
+ """
+ self.num_requests += 1
+
+ if timeout is _Default:
+ timeout = self.timeout
+
+ conn.timeout = timeout # This only does anything in Py26+
+
+ conn.request(method, url, **httplib_request_kw)
+ conn.sock.settimeout(timeout)
+ httplib_response = conn.getresponse()
+
+ log.debug("\"%s %s %s\" %s %s" %
+ (method, url,
+ conn._http_vsn_str, # pylint: disable-msg=W0212
+ httplib_response.status, httplib_response.length))
+
+ return httplib_response
+
+
+ def is_same_host(self, url):
+ """
+ Check if the given ``url`` is a member of the same host as this
+ connection pool.
+ """
+ # TODO: Add optional support for socket.gethostbyname checking.
+ scheme, host, port = get_host(url)
+
+ if self.port and not port:
+ # Use explicit default port for comparison when none is given.
+ port = port_by_scheme.get(scheme)
+
+ return (url.startswith('/') or
+ (scheme, host, port) == (self.scheme, self.host, self.port))
+
+ def urlopen(self, method, url, body=None, headers=None, retries=3,
+ redirect=True, assert_same_host=True, timeout=_Default,
+ pool_timeout=None, release_conn=None, **response_kw):
+ """
+ Get a connection from the pool and perform an HTTP request. This is the
+ lowest level call for making a request, so you'll need to specify all
+ the raw details.
+
+ .. note::
+
+ More commonly, it's appropriate to use a convenience method provided
+ by :class:`.RequestMethods`, such as :meth:`.request`.
+
+ .. note::
+
+ `release_conn` will only behave as expected if
+ `preload_content=False` because we want to make
+ `preload_content=False` the default behaviour someday soon without
+ breaking backwards compatibility.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param body:
+ Data to send in the request body (useful for creating
+ POST requests, see HTTPConnectionPool.post_url for
+ more convenience).
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Number of retries to allow before raising a MaxRetryError exception.
+
+ :param redirect:
+ Automatically handle redirects (status codes 301, 302, 303, 307),
+ each redirect counts as a retry.
+
+ :param assert_same_host:
+ If ``True``, will make sure that the host of the pool requests is
+ consistent else will raise HostChangedError. When False, you can
+ use the pool on an HTTP proxy and request foreign hosts.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one request.
+
+ :param pool_timeout:
+ If set and the pool is set to block=True, then this method will
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
+ connection is available within the time period.
+
+ :param release_conn:
+ If False, then the urlopen call will not release the connection
+ back into the pool once a response is received (but will release if
+ you read the entire contents of the response such as when
+ `preload_content=True`). This is useful if you're not preloading
+ the response's content immediately. You will need to call
+ ``r.release_conn()`` on the response ``r`` to return the connection
+ back into the pool. If None, it takes the value of
+ ``response_kw.get('preload_content', True)``.
+
+ :param \**response_kw:
+ Additional parameters are passed to
+ :meth:`urllib3.response.HTTPResponse.from_httplib`
+ """
+ if headers is None:
+ headers = self.headers
+
+ if retries < 0:
+ raise MaxRetryError(self, url)
+
+ if timeout is _Default:
+ timeout = self.timeout
+
+ if release_conn is None:
+ release_conn = response_kw.get('preload_content', True)
+
+ # Check host
+ if assert_same_host and not self.is_same_host(url):
+ host = "%s://%s" % (self.scheme, self.host)
+ if self.port:
+ host = "%s:%d" % (host, self.port)
+
+ raise HostChangedError(self, url, retries - 1)
+
+ conn = None
+
+ try:
+ # Request a connection from the queue
+ # (Could raise SocketError: Bad file descriptor)
+ conn = self._get_conn(timeout=pool_timeout)
+
+ # Make the request on the httplib connection object
+ httplib_response = self._make_request(conn, method, url,
+ timeout=timeout,
+ body=body, headers=headers)
+
+ # If we're going to release the connection in ``finally:``, then
+ # the request doesn't need to know about the connection. Otherwise
+ # it will also try to release it and we'll have a double-release
+ # mess.
+ response_conn = not release_conn and conn
+
+ # Import httplib's response into our own wrapper object
+ response = HTTPResponse.from_httplib(httplib_response,
+ pool=self,
+ connection=response_conn,
+ **response_kw)
+
+ # else:
+ # The connection will be put back into the pool when
+ # ``response.release_conn()`` is called (implicitly by
+ # ``response.read()``)
+
+ except Empty as e:
+ # Timed out by queue
+ raise TimeoutError(self, "Request timed out. (pool_timeout=%s)" %
+ pool_timeout)
+
+ except SocketTimeout as e:
+ # Timed out by socket
+ raise TimeoutError(self, "Request timed out. (timeout=%s)" %
+ timeout)
+
+ except BaseSSLError as e:
+ # SSL certificate error
+ raise SSLError(e)
+
+ except CertificateError as e:
+ # Name mismatch
+ raise SSLError(e)
+
+ except (HTTPException, SocketError) as e:
+ # Connection broken, discard. It will be replaced next _get_conn().
+ conn = None
+ # This is necessary so we can access e below
+ err = e
+
+ finally:
+ if conn and release_conn:
+ # Put the connection back to be reused
+ self._put_conn(conn)
+
+ if not conn:
+ log.warn("Retrying (%d attempts remain) after connection "
+ "broken by '%r': %s" % (retries, err, url))
+ return self.urlopen(method, url, body, headers, retries - 1,
+ redirect, assert_same_host) # Try again
+
+ # Handle redirect?
+ redirect_location = redirect and response.get_redirect_location()
+ if redirect_location:
+ log.info("Redirecting %s -> %s" % (url, redirect_location))
+ return self.urlopen(method, redirect_location, body, headers,
+ retries - 1, redirect, assert_same_host)
+
+ return response
+
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+ """
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
+
+ When Python is compiled with the :mod:`ssl` module, then
+ :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
+ instead of :class:httplib.HTTPSConnection`.
+
+ The ``key_file``, ``cert_file``, ``cert_reqs``, and ``ca_certs`` parameters
+ are only used if :mod:`ssl` is available and are fed into
+ :meth:`ssl.wrap_socket` to upgrade the connection socket into an SSL socket.
+ """
+
+ scheme = 'https'
+
+ def __init__(self, host, port=None,
+ strict=False, timeout=None, maxsize=1,
+ block=False, headers=None,
+ key_file=None, cert_file=None,
+ cert_reqs='CERT_NONE', ca_certs=None):
+
+ super(HTTPSConnectionPool, self).__init__(host, port,
+ strict, timeout, maxsize,
+ block, headers)
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.ca_certs = ca_certs
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`httplib.HTTPSConnection`.
+ """
+ self.num_connections += 1
+ log.info("Starting new HTTPS connection (%d): %s"
+ % (self.num_connections, self.host))
+
+ if not ssl: # Platform-specific: Python compiled without +ssl
+ if not HTTPSConnection or HTTPSConnection is object:
+ raise SSLError("Can't connect to HTTPS URL because the SSL "
+ "module is not available.")
+
+ return HTTPSConnection(host=self.host, port=self.port)
+
+ connection = VerifiedHTTPSConnection(host=self.host, port=self.port)
+ connection.set_cert(key_file=self.key_file, cert_file=self.cert_file,
+ cert_reqs=self.cert_reqs, ca_certs=self.ca_certs)
+ return connection
+
+
+## Helpers
+
+def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
+ basic_auth=None):
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'.
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ Example: ::
+
+ >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+ {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ >>> make_headers(accept_encoding=True)
+ {'accept-encoding': 'gzip,deflate'}
+ """
+ headers = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ','.join(accept_encoding)
+ else:
+ accept_encoding = 'gzip,deflate'
+ headers['accept-encoding'] = accept_encoding
+
+ if user_agent:
+ headers['user-agent'] = user_agent
+
+ if keep_alive:
+ headers['connection'] = 'keep-alive'
+
+ if basic_auth:
+ headers['authorization'] = 'Basic ' + \
+ b64encode(six.b(basic_auth)).decode('utf-8')
+
+ return headers
+
+
+def get_host(url):
+ """
+ Given a url, return its scheme, host and port (None if it's not there).
+
+ For example: ::
+
+ >>> get_host('http://google.com/mail/')
+ ('http', 'google.com', None)
+ >>> get_host('google.com:80')
+ ('http', 'google.com', 80)
+ """
+
+ # This code is actually similar to urlparse.urlsplit, but much
+ # simplified for our needs.
+ port = None
+ scheme = 'http'
+
+ if '://' in url:
+ scheme, url = url.split('://', 1)
+ if '/' in url:
+ url, _path = url.split('/', 1)
+ if '@' in url:
+ _auth, url = url.split('@', 1)
+ if ':' in url:
+ url, port = url.split(':', 1)
+
+ if not port.isdigit():
+ raise LocationParseError("Failed to parse: %s")
+
+ port = int(port)
+
+ return scheme, url, port
+
+
+def connection_from_url(url, **kw):
+ """
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
+
+ This is a shortcut for not having to parse out the scheme, host, and port
+ of the url before creating an :class:`.ConnectionPool` instance.
+
+ :param url:
+ Absolute URL string that must include the scheme. Port is optional.
+
+ :param \**kw:
+ Passes additional parameters to the constructor of the appropriate
+ :class:`.ConnectionPool`. Useful for specifying things like
+ timeout, maxsize, headers, etc.
+
+ Example: ::
+
+ >>> conn = connection_from_url('http://google.com/')
+ >>> r = conn.request('GET', '/')
+ """
+ scheme, host, port = get_host(url)
+ if scheme == 'https':
+ return HTTPSConnectionPool(host, port=port, **kw)
+ else:
+ return HTTPConnectionPool(host, port=port, **kw)
+
+
+def is_connection_dropped(conn):
+ """
+ Returns True if the connection is dropped and should be closed.
+
+ :param conn:
+ ``HTTPConnection`` object.
+ """
+ if not poll: # Platform-specific
+ return select([conn.sock], [], [], 0.0)[0]
+
+ # This version is better on platforms that support it.
+ p = poll()
+ p.register(conn.sock, POLLIN)
+ for (fno, ev) in p.poll(0.0):
+ if fno == conn.sock.fileno():
+ # Either data is buffered (bad), or the connection is dropped.
+ return True
diff --git a/urllib3/contrib/__init__.py b/urllib3/contrib/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/urllib3/contrib/__init__.py
diff --git a/urllib3/contrib/ntlmpool.py b/urllib3/contrib/ntlmpool.py
new file mode 100644
index 0000000..bb41fd1
--- /dev/null
+++ b/urllib3/contrib/ntlmpool.py
@@ -0,0 +1,120 @@
+# urllib3/contrib/ntlmpool.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+NTLM authenticating pool, contributed by erikcederstran
+
+Issue #10, see: http://code.google.com/p/urllib3/issues/detail?id=10
+"""
+
+try:
+ from http.client import HTTPSConnection
+except ImportError:
+ from httplib import HTTPSConnection
+from logging import getLogger
+from ntlm import ntlm
+
+from urllib3 import HTTPSConnectionPool
+
+
+log = getLogger(__name__)
+
+
+class NTLMConnectionPool(HTTPSConnectionPool):
+ """
+ Implements an NTLM authentication version of an urllib3 connection pool
+ """
+
+ scheme = 'https'
+
+ def __init__(self, user, pw, authurl, *args, **kwargs):
+ """
+ authurl is a random URL on the server that is protected by NTLM.
+ user is the Windows user, probably in the DOMAIN\username format.
+ pw is the password for the user.
+ """
+ super(NTLMConnectionPool, self).__init__(*args, **kwargs)
+ self.authurl = authurl
+ self.rawuser = user
+ user_parts = user.split('\\', 1)
+ self.domain = user_parts[0].upper()
+ self.user = user_parts[1]
+ self.pw = pw
+
+ def _new_conn(self):
+ # Performs the NTLM handshake that secures the connection. The socket
+ # must be kept open while requests are performed.
+ self.num_connections += 1
+ log.debug('Starting NTLM HTTPS connection no. %d: https://%s%s' %
+ (self.num_connections, self.host, self.authurl))
+
+ headers = {}
+ headers['Connection'] = 'Keep-Alive'
+ req_header = 'Authorization'
+ resp_header = 'www-authenticate'
+
+ conn = HTTPSConnection(host=self.host, port=self.port)
+
+ # Send negotiation message
+ headers[req_header] = (
+ 'NTLM %s' % ntlm.create_NTLM_NEGOTIATE_MESSAGE(self.rawuser))
+ log.debug('Request headers: %s' % headers)
+ conn.request('GET', self.authurl, None, headers)
+ res = conn.getresponse()
+ reshdr = dict(res.getheaders())
+ log.debug('Response status: %s %s' % (res.status, res.reason))
+ log.debug('Response headers: %s' % reshdr)
+ log.debug('Response data: %s [...]' % res.read(100))
+
+ # Remove the reference to the socket, so that it can not be closed by
+ # the response object (we want to keep the socket open)
+ res.fp = None
+
+ # Server should respond with a challenge message
+ auth_header_values = reshdr[resp_header].split(', ')
+ auth_header_value = None
+ for s in auth_header_values:
+ if s[:5] == 'NTLM ':
+ auth_header_value = s[5:]
+ if auth_header_value is None:
+ raise Exception('Unexpected %s response header: %s' %
+ (resp_header, reshdr[resp_header]))
+
+ # Send authentication message
+ ServerChallenge, NegotiateFlags = \
+ ntlm.parse_NTLM_CHALLENGE_MESSAGE(auth_header_value)
+ auth_msg = ntlm.create_NTLM_AUTHENTICATE_MESSAGE(ServerChallenge,
+ self.user,
+ self.domain,
+ self.pw,
+ NegotiateFlags)
+ headers[req_header] = 'NTLM %s' % auth_msg
+ log.debug('Request headers: %s' % headers)
+ conn.request('GET', self.authurl, None, headers)
+ res = conn.getresponse()
+ log.debug('Response status: %s %s' % (res.status, res.reason))
+ log.debug('Response headers: %s' % dict(res.getheaders()))
+ log.debug('Response data: %s [...]' % res.read()[:100])
+ if res.status != 200:
+ if res.status == 401:
+ raise Exception('Server rejected request: wrong '
+ 'username or password')
+ raise Exception('Wrong server response: %s %s' %
+ (res.status, res.reason))
+
+ res.fp = None
+ log.debug('Connection established')
+ return conn
+
+ def urlopen(self, method, url, body=None, headers=None, retries=3,
+ redirect=True, assert_same_host=True):
+ if headers is None:
+ headers = {}
+ headers['Connection'] = 'Keep-Alive'
+ return super(NTLMConnectionPool, self).urlopen(method, url, body,
+ headers, retries,
+ redirect,
+ assert_same_host)
diff --git a/urllib3/exceptions.py b/urllib3/exceptions.py
new file mode 100644
index 0000000..15c9699
--- /dev/null
+++ b/urllib3/exceptions.py
@@ -0,0 +1,67 @@
+# urllib3/exceptions.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+
+## Base Exceptions
+
+class HTTPError(Exception):
+ "Base exception used by this module."
+ pass
+
+
+class PoolError(HTTPError):
+ "Base exception for errors caused within a pool."
+ def __init__(self, pool, message):
+ self.pool = pool
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
+
+
+class SSLError(HTTPError):
+ "Raised when SSL certificate fails in an HTTPS connection."
+ pass
+
+
+## Leaf Exceptions
+
+class MaxRetryError(PoolError):
+ "Raised when the maximum number of retries is exceeded."
+
+ def __init__(self, pool, url):
+ message = "Max retries exceeded with url: %s" % url
+ PoolError.__init__(self, pool, message)
+
+ self.url = url
+
+
+class HostChangedError(PoolError):
+ "Raised when an existing pool gets a request for a foreign host."
+
+ def __init__(self, pool, url, retries=3):
+ message = "Tried to open a foreign host with url: %s" % url
+ PoolError.__init__(self, pool, message)
+
+ self.url = url
+ self.retries = retries
+
+
+class TimeoutError(PoolError):
+ "Raised when a socket timeout occurs."
+ pass
+
+
+class EmptyPoolError(PoolError):
+ "Raised when a pool runs out of connections and no more are allowed."
+ pass
+
+
+class LocationParseError(ValueError, HTTPError):
+ "Raised when get_host or similar fails to parse the URL input."
+
+ def __init__(self, location):
+ message = "Failed to parse: %s" % location
+ super(LocationParseError, self).__init__(self, message)
+
+ self.location = location
diff --git a/urllib3/filepost.py b/urllib3/filepost.py
new file mode 100644
index 0000000..e1ec8af
--- /dev/null
+++ b/urllib3/filepost.py
@@ -0,0 +1,74 @@
+# urllib3/filepost.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import codecs
+import mimetypes
+
+try:
+ from mimetools import choose_boundary
+except ImportError:
+ from .packages.mimetools_choose_boundary import choose_boundary
+
+from io import BytesIO
+
+from .packages import six
+from .packages.six import b
+
+writer = codecs.lookup('utf-8')[3]
+
+
+def get_content_type(filename):
+ return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+def encode_multipart_formdata(fields, boundary=None):
+ """
+ Encode a dictionary of ``fields`` using the multipart/form-data mime format.
+
+ :param fields:
+ Dictionary of fields. The key is treated as the field name, and the
+ value as the body of the form-data. If the value is a tuple of two
+ elements, then the first element is treated as the filename of the
+ form-data section.
+
+ :param boundary:
+ If not specified, then a random boundary will be generated using
+ :func:`mimetools.choose_boundary`.
+ """
+ body = BytesIO()
+ if boundary is None:
+ boundary = choose_boundary()
+
+ for fieldname, value in six.iteritems(fields):
+ body.write(b('--%s\r\n' % (boundary)))
+
+ if isinstance(value, tuple):
+ filename, data = value
+ writer(body).write('Content-Disposition: form-data; name="%s"; '
+ 'filename="%s"\r\n' % (fieldname, filename))
+ body.write(b('Content-Type: %s\r\n\r\n' %
+ (get_content_type(filename))))
+ else:
+ data = value
+ writer(body).write('Content-Disposition: form-data; name="%s"\r\n'
+ % (fieldname))
+ body.write(b'Content-Type: text/plain\r\n\r\n')
+
+ if isinstance(data, int):
+ data = str(data) # Backwards compatibility
+
+ if isinstance(data, six.text_type):
+ writer(body).write(data)
+ else:
+ body.write(data)
+
+ body.write(b'\r\n')
+
+ body.write(b('--%s--\r\n' % (boundary)))
+
+ content_type = b('multipart/form-data; boundary=%s' % boundary)
+
+ return body.getvalue(), content_type
diff --git a/urllib3/packages/__init__.py b/urllib3/packages/__init__.py
new file mode 100644
index 0000000..37e8351
--- /dev/null
+++ b/urllib3/packages/__init__.py
@@ -0,0 +1,4 @@
+from __future__ import absolute_import
+
+from . import ssl_match_hostname
+
diff --git a/urllib3/packages/mimetools_choose_boundary/__init__.py b/urllib3/packages/mimetools_choose_boundary/__init__.py
new file mode 100644
index 0000000..a0109ab
--- /dev/null
+++ b/urllib3/packages/mimetools_choose_boundary/__init__.py
@@ -0,0 +1,47 @@
+"""The function mimetools.choose_boundary() from Python 2.7, which seems to
+have disappeared in Python 3 (although email.generator._make_boundary() might
+work as a replacement?).
+
+Tweaked to use lock from threading rather than thread.
+"""
+import os
+from threading import Lock
+_counter_lock = Lock()
+
+_counter = 0
+def _get_next_counter():
+ global _counter
+ with _counter_lock:
+ _counter += 1
+ return _counter
+
+_prefix = None
+
+def choose_boundary():
+ """Return a string usable as a multipart boundary.
+
+ The string chosen is unique within a single program run, and
+ incorporates the user id (if available), process id (if available),
+ and current time. So it's very unlikely the returned string appears
+ in message text, but there's no guarantee.
+
+ The boundary contains dots so you have to quote it in the header."""
+
+ global _prefix
+ import time
+ if _prefix is None:
+ import socket
+ try:
+ hostid = socket.gethostbyname(socket.gethostname())
+ except socket.gaierror:
+ hostid = '127.0.0.1'
+ try:
+ uid = repr(os.getuid())
+ except AttributeError:
+ uid = '1'
+ try:
+ pid = repr(os.getpid())
+ except AttributeError:
+ pid = '1'
+ _prefix = hostid + '.' + uid + '.' + pid
+ return "%s.%.3f.%d" % (_prefix, time.time(), _get_next_counter())
diff --git a/urllib3/packages/six.py b/urllib3/packages/six.py
new file mode 100644
index 0000000..a64f6fb
--- /dev/null
+++ b/urllib3/packages/six.py
@@ -0,0 +1,372 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+#Copyright (c) 2010-2011 Benjamin Peterson
+
+#Permission is hereby granted, free of charge, to any person obtaining a copy of
+#this software and associated documentation files (the "Software"), to deal in
+#the Software without restriction, including without limitation the rights to
+#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+#the Software, and to permit persons to whom the Software is furnished to do so,
+#subject to the following conditions:
+
+#The above copyright notice and this permission notice shall be included in all
+#copies or substantial portions of the Software.
+
+#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.1.0"
+
+
+# True if we are running on Python 3.
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result)
+ # This is a bit ugly, but it avoids running this again.
+ delattr(tp, self.name)
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+
+class _MovedItems(types.ModuleType):
+ """Lazy loading of moved objects"""
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("winreg", "_winreg"),
+]
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+del attr
+
+moves = sys.modules["six.moves"] = _MovedItems("moves")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+
+ _iterkeys = "keys"
+ _itervalues = "values"
+ _iteritems = "items"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+
+ _iterkeys = "iterkeys"
+ _itervalues = "itervalues"
+ _iteritems = "iteritems"
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+
+ advance_iterator = next
+
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+
+ def advance_iterator(it):
+ return it.next()
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+
+
+def iterkeys(d):
+ """Return an iterator over the keys of a dictionary."""
+ return getattr(d, _iterkeys)()
+
+def itervalues(d):
+ """Return an iterator over the values of a dictionary."""
+ return getattr(d, _itervalues)()
+
+def iteritems(d):
+ """Return an iterator over the (key, value) pairs of a dictionary."""
+ return getattr(d, _iteritems)()
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+ def u(s):
+ return s
+ if sys.version_info[1] <= 1:
+ def int2byte(i):
+ return bytes((i,))
+ else:
+ # This is about 2x faster than the implementation above on 3.2+
+ int2byte = operator.methodcaller("to_bytes", 1, "big")
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+else:
+ def b(s):
+ return s
+ def u(s):
+ return unicode(s, "unicode_escape")
+ int2byte = chr
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+if PY3:
+ import builtins
+ exec_ = getattr(builtins, "exec")
+
+
+ def reraise(tp, value, tb=None):
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+
+ print_ = getattr(builtins, "print")
+ del builtins
+
+else:
+ def exec_(code, globs=None, locs=None):
+ """Execute code in a namespace."""
+ if globs is None:
+ frame = sys._getframe(1)
+ globs = frame.f_globals
+ if locs is None:
+ locs = frame.f_locals
+ del frame
+ elif locs is None:
+ locs = globs
+ exec("""exec code in globs, locs""")
+
+
+ exec_("""def reraise(tp, value, tb=None):
+ raise tp, value, tb
+""")
+
+
+ def print_(*args, **kwargs):
+ """The new-style print function."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+
+_add_doc(reraise, """Reraise an exception.""")
+
+
+def with_metaclass(meta, base=object):
+ """Create a base class with a metaclass."""
+ return meta("NewBase", (base,), {})
diff --git a/urllib3/packages/ssl_match_hostname/__init__.py b/urllib3/packages/ssl_match_hostname/__init__.py
new file mode 100644
index 0000000..9560b04
--- /dev/null
+++ b/urllib3/packages/ssl_match_hostname/__init__.py
@@ -0,0 +1,61 @@
+"""The match_hostname() function from Python 3.2, essential when using SSL."""
+
+import re
+
+__version__ = '3.2.2'
+
+class CertificateError(ValueError):
+ pass
+
+def _dnsname_to_pat(dn):
+ pats = []
+ for frag in dn.split(r'.'):
+ if frag == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ else:
+ # Otherwise, '*' matches any dotless fragment.
+ frag = re.escape(frag)
+ pats.append(frag.replace(r'\*', '[^.]*'))
+ return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+
+def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
+ are mostly followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError("empty or no certificate")
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if _dnsname_to_pat(value).match(hostname):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _dnsname_to_pat(value).match(hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError("hostname %r "
+ "doesn't match either of %s"
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r "
+ "doesn't match %r"
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError("no appropriate commonName or "
+ "subjectAltName fields were found")
diff --git a/urllib3/poolmanager.py b/urllib3/poolmanager.py
new file mode 100644
index 0000000..d42f35b
--- /dev/null
+++ b/urllib3/poolmanager.py
@@ -0,0 +1,138 @@
+# urllib3/poolmanager.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import logging
+
+from ._collections import RecentlyUsedContainer
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from .connectionpool import get_host, connection_from_url, port_by_scheme
+from .exceptions import HostChangedError
+from .request import RequestMethods
+
+
+__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
+
+
+pool_classes_by_scheme = {
+ 'http': HTTPConnectionPool,
+ 'https': HTTPSConnectionPool,
+}
+
+log = logging.getLogger(__name__)
+
+
+class PoolManager(RequestMethods):
+ """
+ Allows for arbitrary requests while transparently keeping track of
+ necessary connection pools for you.
+
+ :param num_pools:
+ Number of connection pools to cache before discarding the least recently
+ used pool.
+
+ :param \**connection_pool_kw:
+ Additional parameters are used to create fresh
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
+
+ Example: ::
+
+ >>> manager = PoolManager()
+ >>> r = manager.urlopen("http://google.com/")
+ >>> r = manager.urlopen("http://google.com/mail")
+ >>> r = manager.urlopen("http://yahoo.com/")
+ >>> len(r.pools)
+ 2
+
+ """
+
+ # TODO: Make sure there are no memory leaks here.
+
+ def __init__(self, num_pools=10, **connection_pool_kw):
+ self.connection_pool_kw = connection_pool_kw
+ self.pools = RecentlyUsedContainer(num_pools)
+
+ def connection_from_host(self, host, port=80, scheme='http'):
+ """
+ Get a :class:`ConnectionPool` based on the host, port, and scheme.
+
+ Note that an appropriate ``port`` value is required here to normalize
+ connection pools in our container most effectively.
+ """
+ pool_key = (scheme, host, port)
+
+ # If the scheme, host, or port doesn't match existing open connections,
+ # open a new ConnectionPool.
+ pool = self.pools.get(pool_key)
+ if pool:
+ return pool
+
+ # Make a fresh ConnectionPool of the desired type
+ pool_cls = pool_classes_by_scheme[scheme]
+ pool = pool_cls(host, port, **self.connection_pool_kw)
+
+ self.pools[pool_key] = pool
+
+ return pool
+
+ def connection_from_url(self, url):
+ """
+ Similar to :func:`urllib3.connectionpool.connection_from_url` but
+ doesn't pass any additional parameters to the
+ :class:`urllib3.connectionpool.ConnectionPool` constructor.
+
+ Additional parameters are taken from the :class:`.PoolManager`
+ constructor.
+ """
+ scheme, host, port = get_host(url)
+
+ port = port or port_by_scheme.get(scheme, 80)
+
+ return self.connection_from_host(host, port=port, scheme=scheme)
+
+ def urlopen(self, method, url, **kw):
+ """
+ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`.
+
+ ``url`` must be absolute, such that an appropriate
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+ """
+ conn = self.connection_from_url(url)
+ try:
+ return conn.urlopen(method, url, **kw)
+
+ except HostChangedError as e:
+ kw['retries'] = e.retries # Persist retries countdown
+ return self.urlopen(method, e.url, **kw)
+
+
+class ProxyManager(RequestMethods):
+ """
+ Given a ConnectionPool to a proxy, the ProxyManager's ``urlopen`` method
+ will make requests to any url through the defined proxy.
+ """
+
+ def __init__(self, proxy_pool):
+ self.proxy_pool = proxy_pool
+
+ def _set_proxy_headers(self, headers=None):
+ headers = headers or {}
+
+ # Same headers are curl passes for --proxy1.0
+ headers['Accept'] = '*/*'
+ headers['Proxy-Connection'] = 'Keep-Alive'
+
+ return headers
+
+ def urlopen(self, method, url, **kw):
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+ kw['assert_same_host'] = False
+ kw['headers'] = self._set_proxy_headers(kw.get('headers'))
+ return self.proxy_pool.urlopen(method, url, **kw)
+
+
+def proxy_from_url(url, **pool_kw):
+ proxy_pool = connection_from_url(url, **pool_kw)
+ return ProxyManager(proxy_pool)
diff --git a/urllib3/request.py b/urllib3/request.py
new file mode 100644
index 0000000..5ea26a0
--- /dev/null
+++ b/urllib3/request.py
@@ -0,0 +1,147 @@
+# urllib3/request.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+try:
+ from urllib.parse import urlencode
+except ImportError:
+ from urllib import urlencode
+
+from .filepost import encode_multipart_formdata
+
+
+__all__ = ['RequestMethods']
+
+
+class RequestMethods(object):
+ """
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
+ as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
+ :class:`~urllib3.poolmanager.PoolManager`.
+
+ Provides behavior for making common types of HTTP request methods and
+ decides which type of request field encoding to use.
+
+ Specifically,
+
+ :meth:`.request_encode_url` is for sending requests whose fields are encoded
+ in the URL (such as GET, HEAD, DELETE).
+
+ :meth:`.request_encode_body` is for sending requests whose fields are
+ encoded in the *body* of the request using multipart or www-orm-urlencoded
+ (such as for POST, PUT, PATCH).
+
+ :meth:`.request` is for making any kind of request, it will look up the
+ appropriate encoding format and use one of the above two methods to make
+ the request.
+ """
+
+ _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
+
+ _encode_body_methods = set(['PATCH', 'POST', 'PUT', 'TRACE'])
+
+ def urlopen(self, method, url, body=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **kw):
+ raise NotImplemented("Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method.")
+
+ def request(self, method, url, fields=None, headers=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the appropriate encoding of
+ ``fields`` based on the ``method`` used.
+
+ This is a convenience method that requires the least amount of manual
+ effort. It can be used in most situations, while still having the option
+ to drop down to more specific methods when necessary, such as
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
+ or even the lowest level :meth:`urlopen`.
+ """
+ method = method.upper()
+
+ if method in self._encode_url_methods:
+ return self.request_encode_url(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+ else:
+ return self.request_encode_body(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+
+ def request_encode_url(self, method, url, fields=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
+ """
+ if fields:
+ url += '?' + urlencode(fields)
+ return self.urlopen(method, url, **urlopen_kw)
+
+ def request_encode_body(self, method, url, fields=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
+
+ When ``encode_multipart=True`` (default), then
+ :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the
+ payload with the appropriate content type. Otherwise
+ :meth:`urllib.urlencode` is used with the
+ 'application/x-www-form-urlencoded' content type.
+
+ Multipart encoding must be used when posting files, and it's reasonably
+ safe to use it in other times too. However, it may break request signing,
+ such as with OAuth.
+
+ Supports an optional ``fields`` parameter of key/value strings AND
+ key/filetuple. A filetuple is a (filename, data) tuple. For example: ::
+
+ fields = {
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'nonamefile': ('contents of nonamefile field'),
+ }
+
+ When uploading a file, providing a filename (the first parameter of the
+ tuple) is optional but recommended to best mimick behavior of browsers.
+
+ Note that if ``headers`` are supplied, the 'Content-Type' header will be
+ overwritten because it depends on the dynamic random boundary string
+ which is used to compose the body of the request. The random boundary
+ string can be explicitly set with the ``multipart_boundary`` parameter.
+ """
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(fields or {},
+ boundary=multipart_boundary)
+ else:
+ body, content_type = (urlencode(fields or {}),
+ 'application/x-www-form-urlencoded')
+
+ headers = headers or {}
+ headers.update({'Content-Type': content_type})
+
+ return self.urlopen(method, url, body=body, headers=headers,
+ **urlopen_kw)
+
+ # Deprecated:
+
+ def get_url(self, url, fields=None, **urlopen_kw):
+ """
+ .. deprecated:: 1.0
+ Use :meth:`request` instead.
+ """
+ return self.request_encode_url('GET', url, fields=fields,
+ **urlopen_kw)
+
+ def post_url(self, url, fields=None, headers=None, **urlopen_kw):
+ """
+ .. deprecated:: 1.0
+ Use :meth:`request` instead.
+ """
+ return self.request_encode_body('POST', url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
diff --git a/urllib3/response.py b/urllib3/response.py
new file mode 100644
index 0000000..4dd431e
--- /dev/null
+++ b/urllib3/response.py
@@ -0,0 +1,191 @@
+# urllib3/response.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import gzip
+import logging
+import zlib
+
+from io import BytesIO
+
+from .exceptions import HTTPError
+from .packages.six import string_types as basestring
+
+
+log = logging.getLogger(__name__)
+
+
+def decode_gzip(data):
+ gzipper = gzip.GzipFile(fileobj=BytesIO(data))
+ return gzipper.read()
+
+
+def decode_deflate(data):
+ try:
+ return zlib.decompress(data)
+ except zlib.error:
+ return zlib.decompress(data, -zlib.MAX_WBITS)
+
+
+class HTTPResponse(object):
+ """
+ HTTP Response container.
+
+ Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
+ loaded and decoded on-demand when the ``data`` property is accessed.
+
+ Extra parameters for behaviour not present in httplib.HTTPResponse:
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, attempts to decode specific content-encoding's based on headers
+ (like 'gzip' and 'deflate') will be skipped and raw data will be used
+ instead.
+
+ :param original_response:
+ When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
+ object, it's convenient to include the original for debug purposes. It's
+ otherwise unused.
+ """
+
+ CONTENT_DECODERS = {
+ 'gzip': decode_gzip,
+ 'deflate': decode_deflate,
+ }
+
+ def __init__(self, body='', headers=None, status=0, version=0, reason=None,
+ strict=0, preload_content=True, decode_content=True,
+ original_response=None, pool=None, connection=None):
+ self.headers = headers or {}
+ self.status = status
+ self.version = version
+ self.reason = reason
+ self.strict = strict
+
+ self._decode_content = decode_content
+ self._body = body if body and isinstance(body, basestring) else None
+ self._fp = None
+ self._original_response = original_response
+
+ self._pool = pool
+ self._connection = connection
+
+ if hasattr(body, 'read'):
+ self._fp = body
+
+ if preload_content and not self._body:
+ self._body = self.read(decode_content=decode_content)
+
+ def get_redirect_location(self):
+ """
+ Should we redirect and where to?
+
+ :returns: Truthy redirect location string if we got a redirect status
+ code and valid location. ``None`` if redirect status and no
+ location. ``False`` if not a redirect status code.
+ """
+ if self.status in [301, 302, 303, 307]:
+ return self.headers.get('location')
+
+ return False
+
+ def release_conn(self):
+ if not self._pool or not self._connection:
+ return
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ @property
+ def data(self):
+ # For backwords-compat with earlier urllib3 0.4 and earlier.
+ if self._body:
+ return self._body
+
+ if self._fp:
+ return self.read(cache_content=True)
+
+ def read(self, amt=None, decode_content=None, cache_content=False):
+ """
+ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
+ parameters: ``decode_content`` and ``cache_content``.
+
+ :param amt:
+ How much of the content to read. If specified, decoding and caching
+ is skipped because we can't decode partial content nor does it make
+ sense to cache partial content as the full response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header. (Overridden if ``amt`` is set.)
+
+ :param cache_content:
+ If True, will save the returned data such that the same result is
+ returned despite of the state of the underlying file object. This
+ is useful if you want the ``.data`` property to continue working
+ after having ``.read()`` the file object. (Overridden if ``amt`` is
+ set.)
+ """
+ content_encoding = self.headers.get('content-encoding')
+ decoder = self.CONTENT_DECODERS.get(content_encoding)
+ if decode_content is None:
+ decode_content = self._decode_content
+
+ if self._fp is None:
+ return
+
+ try:
+ if amt is None:
+ # cStringIO doesn't like amt=None
+ data = self._fp.read()
+ else:
+ return self._fp.read(amt)
+
+ try:
+ if decode_content and decoder:
+ data = decoder(data)
+ except IOError:
+ raise HTTPError("Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding)
+
+ if cache_content:
+ self._body = data
+
+ return data
+
+ finally:
+ if self._original_response and self._original_response.isclosed():
+ self.release_conn()
+
+ @classmethod
+ def from_httplib(ResponseCls, r, **response_kw):
+ """
+ Given an :class:`httplib.HTTPResponse` instance ``r``, return a
+ corresponding :class:`urllib3.response.HTTPResponse` object.
+
+ Remaining parameters are passed to the HTTPResponse constructor, along
+ with ``original_response=r``.
+ """
+
+ # HTTPResponse objects in Python 3 don't have a .strict attribute
+ strict = getattr(r, 'strict', 0)
+ return ResponseCls(body=r,
+ # In Python 3, the header keys are returned capitalised
+ headers=dict((k.lower(), v) for k,v in r.getheaders()),
+ status=r.status,
+ version=r.version,
+ reason=r.reason,
+ strict=strict,
+ original_response=r,
+ **response_kw)
+
+ # Backwards-compatibility methods for httplib.HTTPResponse
+ def getheaders(self):
+ return self.headers
+
+ def getheader(self, name, default=None):
+ return self.headers.get(name, default)