aboutsummaryrefslogtreecommitdiff
path: root/requests/packages
diff options
context:
space:
mode:
authorSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:41:22 -0700
committerSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:41:22 -0700
commit40337989ba5056432c9f2af3c42267e5ee9e3e18 (patch)
tree25a680529c68fcdd7886b4b064845c3e371e167e /requests/packages
parente75853fc04102c7f72f2e955b63f9692c472f64a (diff)
downloadpython-requests-40337989ba5056432c9f2af3c42267e5ee9e3e18.tar
python-requests-40337989ba5056432c9f2af3c42267e5ee9e3e18.tar.gz
Imported Upstream version 0.11.1
Diffstat (limited to 'requests/packages')
-rw-r--r--requests/packages/oreos/monkeys.py2
-rw-r--r--requests/packages/urllib3/__init__.py18
-rw-r--r--requests/packages/urllib3/connectionpool.py134
-rw-r--r--requests/packages/urllib3/filepost.py24
-rw-r--r--requests/packages/urllib3/poolmanager.py4
-rw-r--r--requests/packages/urllib3/request.py21
-rw-r--r--requests/packages/urllib3/response.py15
-rw-r--r--requests/packages/urllib3/util.py136
8 files changed, 191 insertions, 163 deletions
diff --git a/requests/packages/oreos/monkeys.py b/requests/packages/oreos/monkeys.py
index 72ce68d..2cf9016 100644
--- a/requests/packages/oreos/monkeys.py
+++ b/requests/packages/oreos/monkeys.py
@@ -255,7 +255,7 @@ class CookieError(Exception):
#
_RFC2965Forbidden = "[]:{}="
_LegalChars = ( string.ascii_letters + string.digits +
- "!#$%&'*+-.^_`|~_" + _RFC2965Forbidden )
+ "!#$%&'*+-.^_`|~_@" + _RFC2965Forbidden )
_Translator = {
'\000' : '\\000', '\001' : '\\001', '\002' : '\\002',
'\003' : '\\003', '\004' : '\\004', '\005' : '\\005',
diff --git a/requests/packages/urllib3/__init__.py b/requests/packages/urllib3/__init__.py
index 2e9c663..2d6fece 100644
--- a/requests/packages/urllib3/__init__.py
+++ b/requests/packages/urllib3/__init__.py
@@ -10,26 +10,20 @@ urllib3 - Thread-safe connection pooling and re-using.
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
-__version__ = '1.2.2'
+__version__ = '1.3'
from .connectionpool import (
HTTPConnectionPool,
HTTPSConnectionPool,
- connection_from_url,
- get_host,
- make_headers)
-
-
-from .exceptions import (
- HTTPError,
- MaxRetryError,
- SSLError,
- TimeoutError)
+ connection_from_url
+)
+from . import exceptions
+from .filepost import encode_multipart_formdata
from .poolmanager import PoolManager, ProxyManager, proxy_from_url
from .response import HTTPResponse
-from .filepost import encode_multipart_formdata
+from .util import make_headers, get_host
# Set default logging handler to avoid "No handler found" warnings.
diff --git a/requests/packages/urllib3/connectionpool.py b/requests/packages/urllib3/connectionpool.py
index 39e652e..c3cb3b1 100644
--- a/requests/packages/urllib3/connectionpool.py
+++ b/requests/packages/urllib3/connectionpool.py
@@ -7,15 +7,8 @@
import logging
import socket
-from base64 import b64encode
from socket import error as SocketError, timeout as SocketTimeout
-try:
- from select import poll, POLLIN
-except ImportError: # Doesn't exist on OSX and other platforms
- from select import select
- poll = False
-
try: # Python 3
from http.client import HTTPConnection, HTTPException
from http.client import HTTP_PORT, HTTPS_PORT
@@ -42,17 +35,16 @@ try: # Compiled with SSL?
import ssl
BaseSSLError = ssl.SSLError
-except ImportError:
+except (ImportError, AttributeError):
pass
-from .packages.ssl_match_hostname import match_hostname, CertificateError
from .request import RequestMethods
from .response import HTTPResponse
+from .util import get_host, is_connection_dropped
from .exceptions import (
EmptyPoolError,
HostChangedError,
- LocationParseError,
MaxRetryError,
SSLError,
TimeoutError,
@@ -61,6 +53,7 @@ from .exceptions import (
from .packages.ssl_match_hostname import match_hostname, CertificateError
from .packages import six
+
xrange = six.moves.xrange
log = logging.getLogger(__name__)
@@ -72,6 +65,7 @@ port_by_scheme = {
'https': HTTPS_PORT,
}
+
## Connection objects (extension of httplib)
class VerifiedHTTPSConnection(HTTPSConnection):
@@ -107,6 +101,7 @@ class VerifiedHTTPSConnection(HTTPSConnection):
if self.ca_certs:
match_hostname(self.sock.getpeercert(), self.host)
+
## Pool objects
class ConnectionPool(object):
@@ -212,7 +207,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn = self.pool.get(block=self.block, timeout=timeout)
# If this is a persistent connection, check if it got disconnected
- if conn and conn.sock and is_connection_dropped(conn):
+ if conn and is_connection_dropped(conn):
log.info("Resetting dropped connection: %s" % self.host)
conn.close()
@@ -256,9 +251,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
timeout = self.timeout
conn.timeout = timeout # This only does anything in Py26+
-
conn.request(method, url, **httplib_request_kw)
- conn.sock.settimeout(timeout)
+
+ # Set timeout
+ sock = getattr(conn, 'sock', False) # AppEngine doesn't have sock attr.
+ if sock:
+ sock.settimeout(timeout)
+
httplib_response = conn.getresponse()
log.debug("\"%s %s %s\" %s %s" %
@@ -295,7 +294,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
.. note::
More commonly, it's appropriate to use a convenience method provided
- by :class:`.RequestMethods`, such as :meth:`.request`.
+ by :class:`.RequestMethods`, such as :meth:`request`.
.. note::
@@ -495,94 +494,6 @@ class HTTPSConnectionPool(HTTPConnectionPool):
return connection
-## Helpers
-
-def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
- basic_auth=None):
- """
- Shortcuts for generating request headers.
-
- :param keep_alive:
- If ``True``, adds 'connection: keep-alive' header.
-
- :param accept_encoding:
- Can be a boolean, list, or string.
- ``True`` translates to 'gzip,deflate'.
- List will get joined by comma.
- String will be used as provided.
-
- :param user_agent:
- String representing the user-agent you want, such as
- "python-urllib3/0.6"
-
- :param basic_auth:
- Colon-separated username:password string for 'authorization: basic ...'
- auth header.
-
- Example: ::
-
- >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
- {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
- >>> make_headers(accept_encoding=True)
- {'accept-encoding': 'gzip,deflate'}
- """
- headers = {}
- if accept_encoding:
- if isinstance(accept_encoding, str):
- pass
- elif isinstance(accept_encoding, list):
- accept_encoding = ','.join(accept_encoding)
- else:
- accept_encoding = 'gzip,deflate'
- headers['accept-encoding'] = accept_encoding
-
- if user_agent:
- headers['user-agent'] = user_agent
-
- if keep_alive:
- headers['connection'] = 'keep-alive'
-
- if basic_auth:
- headers['authorization'] = 'Basic ' + \
- b64encode(six.b(basic_auth)).decode('utf-8')
-
- return headers
-
-
-def get_host(url):
- """
- Given a url, return its scheme, host and port (None if it's not there).
-
- For example: ::
-
- >>> get_host('http://google.com/mail/')
- ('http', 'google.com', None)
- >>> get_host('google.com:80')
- ('http', 'google.com', 80)
- """
-
- # This code is actually similar to urlparse.urlsplit, but much
- # simplified for our needs.
- port = None
- scheme = 'http'
-
- if '://' in url:
- scheme, url = url.split('://', 1)
- if '/' in url:
- url, _path = url.split('/', 1)
- if '@' in url:
- _auth, url = url.split('@', 1)
- if ':' in url:
- url, port = url.split(':', 1)
-
- if not port.isdigit():
- raise LocationParseError("Failed to parse: %s")
-
- port = int(port)
-
- return scheme, url, port
-
-
def connection_from_url(url, **kw):
"""
Given a url, return an :class:`.ConnectionPool` instance of its host.
@@ -608,22 +519,3 @@ def connection_from_url(url, **kw):
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
-
-
-def is_connection_dropped(conn):
- """
- Returns True if the connection is dropped and should be closed.
-
- :param conn:
- ``HTTPConnection`` object.
- """
- if not poll: # Platform-specific
- return select([conn.sock], [], [], 0.0)[0]
-
- # This version is better on platforms that support it.
- p = poll()
- p.register(conn.sock, POLLIN)
- for (fno, ev) in p.poll(0.0):
- if fno == conn.sock.fileno():
- # Either data is buffered (bad), or the connection is dropped.
- return True
diff --git a/requests/packages/urllib3/filepost.py b/requests/packages/urllib3/filepost.py
index e1ec8af..344a103 100644
--- a/requests/packages/urllib3/filepost.py
+++ b/requests/packages/urllib3/filepost.py
@@ -24,15 +24,29 @@ def get_content_type(filename):
return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+def iter_fields(fields):
+ """
+ Iterate over fields.
+
+ Supports list of (k, v) tuples and dicts.
+ """
+ if isinstance(fields, dict):
+ return ((k, v) for k, v in six.iteritems(fields))
+
+ return ((k, v) for k, v in fields)
+
+
def encode_multipart_formdata(fields, boundary=None):
"""
Encode a dictionary of ``fields`` using the multipart/form-data mime format.
:param fields:
- Dictionary of fields. The key is treated as the field name, and the
- value as the body of the form-data. If the value is a tuple of two
- elements, then the first element is treated as the filename of the
- form-data section.
+ Dictionary of fields or list of (key, value) field tuples. The key is
+ treated as the field name, and the value as the body of the form-data
+ bytes. If the value is a tuple of two elements, then the first element
+ is treated as the filename of the form-data section.
+
+ Field names and filenames must be unicode.
:param boundary:
If not specified, then a random boundary will be generated using
@@ -42,7 +56,7 @@ def encode_multipart_formdata(fields, boundary=None):
if boundary is None:
boundary = choose_boundary()
- for fieldname, value in six.iteritems(fields):
+ for fieldname, value in iter_fields(fields):
body.write(b('--%s\r\n' % (boundary)))
if isinstance(value, tuple):
diff --git a/requests/packages/urllib3/poolmanager.py b/requests/packages/urllib3/poolmanager.py
index d42f35b..310ea21 100644
--- a/requests/packages/urllib3/poolmanager.py
+++ b/requests/packages/urllib3/poolmanager.py
@@ -39,11 +39,11 @@ class PoolManager(RequestMethods):
Example: ::
- >>> manager = PoolManager()
+ >>> manager = PoolManager(num_pools=2)
>>> r = manager.urlopen("http://google.com/")
>>> r = manager.urlopen("http://google.com/mail")
>>> r = manager.urlopen("http://yahoo.com/")
- >>> len(r.pools)
+ >>> len(manager.pools)
2
"""
diff --git a/requests/packages/urllib3/request.py b/requests/packages/urllib3/request.py
index 5ea26a0..569ac96 100644
--- a/requests/packages/urllib3/request.py
+++ b/requests/packages/urllib3/request.py
@@ -44,7 +44,7 @@ class RequestMethods(object):
def urlopen(self, method, url, body=None, headers=None,
encode_multipart=True, multipart_boundary=None,
- **kw):
+ **kw): # Abstract
raise NotImplemented("Classes extending RequestMethods must implement "
"their own ``urlopen`` method.")
@@ -126,22 +126,3 @@ class RequestMethods(object):
return self.urlopen(method, url, body=body, headers=headers,
**urlopen_kw)
-
- # Deprecated:
-
- def get_url(self, url, fields=None, **urlopen_kw):
- """
- .. deprecated:: 1.0
- Use :meth:`request` instead.
- """
- return self.request_encode_url('GET', url, fields=fields,
- **urlopen_kw)
-
- def post_url(self, url, fields=None, headers=None, **urlopen_kw):
- """
- .. deprecated:: 1.0
- Use :meth:`request` instead.
- """
- return self.request_encode_body('POST', url, fields=fields,
- headers=headers,
- **urlopen_kw)
diff --git a/requests/packages/urllib3/response.py b/requests/packages/urllib3/response.py
index 4dd431e..5fab824 100644
--- a/requests/packages/urllib3/response.py
+++ b/requests/packages/urllib3/response.py
@@ -171,11 +171,22 @@ class HTTPResponse(object):
with ``original_response=r``.
"""
+ # Normalize headers between different versions of Python
+ headers = {}
+ for k, v in r.getheaders():
+ # Python 3: Header keys are returned capitalised
+ k = k.lower()
+
+ has_value = headers.get(k)
+ if has_value: # Python 3: Repeating header keys are unmerged.
+ v = ', '.join([has_value, v])
+
+ headers[k] = v
+
# HTTPResponse objects in Python 3 don't have a .strict attribute
strict = getattr(r, 'strict', 0)
return ResponseCls(body=r,
- # In Python 3, the header keys are returned capitalised
- headers=dict((k.lower(), v) for k,v in r.getheaders()),
+ headers=headers,
status=r.status,
version=r.version,
reason=r.reason,
diff --git a/requests/packages/urllib3/util.py b/requests/packages/urllib3/util.py
new file mode 100644
index 0000000..2684a2f
--- /dev/null
+++ b/requests/packages/urllib3/util.py
@@ -0,0 +1,136 @@
+# urllib3/util.py
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+
+from base64 import b64encode
+
+try:
+ from select import poll, POLLIN
+except ImportError: # `poll` doesn't exist on OSX and other platforms
+ poll = False
+ try:
+ from select import select
+ except ImportError: # `select` doesn't exist on AppEngine.
+ select = False
+
+from .packages import six
+from .exceptions import LocationParseError
+
+
+def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
+ basic_auth=None):
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'.
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ Example: ::
+
+ >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+ {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ >>> make_headers(accept_encoding=True)
+ {'accept-encoding': 'gzip,deflate'}
+ """
+ headers = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ','.join(accept_encoding)
+ else:
+ accept_encoding = 'gzip,deflate'
+ headers['accept-encoding'] = accept_encoding
+
+ if user_agent:
+ headers['user-agent'] = user_agent
+
+ if keep_alive:
+ headers['connection'] = 'keep-alive'
+
+ if basic_auth:
+ headers['authorization'] = 'Basic ' + \
+ b64encode(six.b(basic_auth)).decode('utf-8')
+
+ return headers
+
+
+def get_host(url):
+ """
+ Given a url, return its scheme, host and port (None if it's not there).
+
+ For example: ::
+
+ >>> get_host('http://google.com/mail/')
+ ('http', 'google.com', None)
+ >>> get_host('google.com:80')
+ ('http', 'google.com', 80)
+ """
+
+ # This code is actually similar to urlparse.urlsplit, but much
+ # simplified for our needs.
+ port = None
+ scheme = 'http'
+
+ if '://' in url:
+ scheme, url = url.split('://', 1)
+ if '/' in url:
+ url, _path = url.split('/', 1)
+ if '@' in url:
+ _auth, url = url.split('@', 1)
+ if ':' in url:
+ url, port = url.split(':', 1)
+
+ if not port.isdigit():
+ raise LocationParseError("Failed to parse: %s" % url)
+
+ port = int(port)
+
+ return scheme, url, port
+
+
+
+def is_connection_dropped(conn):
+ """
+ Returns True if the connection is dropped and should be closed.
+
+ :param conn:
+ ``HTTPConnection`` object.
+
+ Note: For platforms like AppEngine, this will always return ``False`` to
+ let the platform handle connection recycling transparently for us.
+ """
+ sock = getattr(conn, 'sock', False)
+ if not sock: #Platform-specific: AppEngine
+ return False
+
+ if not poll: # Platform-specific
+ if not select: #Platform-specific: AppEngine
+ return False
+
+ return select([sock], [], [], 0.0)[0]
+
+ # This version is better on platforms that support it.
+ p = poll()
+ p.register(sock, POLLIN)
+ for (fno, ev) in p.poll(0.0):
+ if fno == sock.fileno():
+ # Either data is buffered (bad), or the connection is dropped.
+ return True