aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--HISTORY.rst34
-rw-r--r--PKG-INFO36
-rw-r--r--requests.egg-info/PKG-INFO36
-rw-r--r--requests.egg-info/SOURCES.txt1
-rw-r--r--requests/__init__.py4
-rw-r--r--requests/adapters.py53
-rw-r--r--requests/api.py1
-rw-r--r--requests/auth.py27
-rw-r--r--requests/cacert.pem48
-rw-r--r--requests/certs.py9
-rw-r--r--requests/compat.py1
-rw-r--r--requests/cookies.py16
-rw-r--r--requests/exceptions.py6
-rw-r--r--requests/hooks.py6
-rw-r--r--requests/models.py102
-rw-r--r--requests/packages/charade/__init__.py7
-rw-r--r--requests/packages/charade/chardistribution.py3
-rw-r--r--requests/packages/charade/compat.py12
-rw-r--r--requests/packages/charade/cp949prober.py44
-rw-r--r--requests/packages/charade/langcyrillicmodel.py2
-rw-r--r--requests/packages/charade/langgreekmodel.py2
-rw-r--r--requests/packages/charade/langhebrewmodel.py2
-rw-r--r--requests/packages/charade/langhungarianmodel.py2
-rw-r--r--requests/packages/charade/mbcsgroupprober.py2
-rw-r--r--requests/packages/charade/mbcssm.py40
-rw-r--r--requests/packages/charade/universaldetector.py15
-rw-r--r--requests/packages/urllib3/connectionpool.py26
-rw-r--r--requests/packages/urllib3/poolmanager.py33
-rw-r--r--requests/packages/urllib3/response.py12
-rw-r--r--requests/packages/urllib3/util.py52
-rw-r--r--requests/sessions.py200
-rw-r--r--requests/structures.py2
-rw-r--r--requests/utils.py37
-rw-r--r--requirements.txt5
-rwxr-xr-xsetup.py2
-rw-r--r--test_requests.py161
36 files changed, 788 insertions, 253 deletions
diff --git a/HISTORY.rst b/HISTORY.rst
index a3491a1..26aa25f 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -3,6 +3,34 @@
History
-------
+1.2.0 (2013-03-31)
+++++++++++++++++++
+
+- Fixed cookies on sessions and on requests
+- Significantly change how hooks are dispatched - hooks now receive all the
+ arguments specified by the user when making a request so hooks can make a
+ secondary request with the same parameters. This is especially necessary for
+ authentication handler authors
+- certifi support was removed
+- Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
+- Major proxy work thanks to @Lukasa including parsing of proxy authentication
+ from the proxy url
+- Fix DigestAuth handling too many 401s
+- Update vendored urllib3 to include SSL bug fixes
+- Allow keyword arguments to be passed to ``json.loads()`` via the
+ ``Response.json()`` method
+- Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
+ requests
+- Add ``elapsed`` attribute to ``Response`` objects to time how long a request
+ took.
+- Fix ``RequestsCookieJar``
+- Sessions and Adapters are now picklable, i.e., can be used with the
+ mutiprocessing library
+- Update charade to version 1.0.3
+
+The change in how hooks are dispatched will likely cause a great deal of
+issues.
+
1.1.0 (2013-01-10)
++++++++++++++++++
@@ -40,7 +68,7 @@ History
- /s/prefetch/stream
- Removal of all configuration
- Standard library logging
-- Make Reponse.json() callable, not property.
+- Make Response.json() callable, not property.
- Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
- Removal of all hooks except 'response'
- Removal of all authentication helpers (OAuth, Kerberos)
@@ -575,10 +603,10 @@ This is not a backwards compatible change.
++++++++++++++++++
* New HTTPHandling Methods
- - Reponse.__nonzero__ (false if bad HTTP Status)
+ - Response.__nonzero__ (false if bad HTTP Status)
- Response.ok (True if expected HTTP Status)
- Response.error (Logged HTTPError if bad HTTP Status)
- - Reponse.raise_for_status() (Raises stored HTTPError)
+ - Response.raise_for_status() (Raises stored HTTPError)
0.2.2 (2011-02-14)
diff --git a/PKG-INFO b/PKG-INFO
index 39bb37f..69f0d3a 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: requests
-Version: 1.1.0
+Version: 1.2.0
Summary: Python HTTP for Humans.
Home-page: http://python-requests.org
Author: Kenneth Reitz
@@ -108,6 +108,34 @@ Description: Requests: HTTP for Humans
History
-------
+ 1.2.0 (2013-03-31)
+ ++++++++++++++++++
+
+ - Fixed cookies on sessions and on requests
+ - Significantly change how hooks are dispatched - hooks now receive all the
+ arguments specified by the user when making a request so hooks can make a
+ secondary request with the same parameters. This is especially necessary for
+ authentication handler authors
+ - certifi support was removed
+ - Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
+ - Major proxy work thanks to @Lukasa including parsing of proxy authentication
+ from the proxy url
+ - Fix DigestAuth handling too many 401s
+ - Update vendored urllib3 to include SSL bug fixes
+ - Allow keyword arguments to be passed to ``json.loads()`` via the
+ ``Response.json()`` method
+ - Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
+ requests
+ - Add ``elapsed`` attribute to ``Response`` objects to time how long a request
+ took.
+ - Fix ``RequestsCookieJar``
+ - Sessions and Adapters are now picklable, i.e., can be used with the
+ mutiprocessing library
+ - Update charade to version 1.0.3
+
+ The change in how hooks are dispatched will likely cause a great deal of
+ issues.
+
1.1.0 (2013-01-10)
++++++++++++++++++
@@ -145,7 +173,7 @@ Description: Requests: HTTP for Humans
- /s/prefetch/stream
- Removal of all configuration
- Standard library logging
- - Make Reponse.json() callable, not property.
+ - Make Response.json() callable, not property.
- Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
- Removal of all hooks except 'response'
- Removal of all authentication helpers (OAuth, Kerberos)
@@ -680,10 +708,10 @@ Description: Requests: HTTP for Humans
++++++++++++++++++
* New HTTPHandling Methods
- - Reponse.__nonzero__ (false if bad HTTP Status)
+ - Response.__nonzero__ (false if bad HTTP Status)
- Response.ok (True if expected HTTP Status)
- Response.error (Logged HTTPError if bad HTTP Status)
- - Reponse.raise_for_status() (Raises stored HTTPError)
+ - Response.raise_for_status() (Raises stored HTTPError)
0.2.2 (2011-02-14)
diff --git a/requests.egg-info/PKG-INFO b/requests.egg-info/PKG-INFO
index 39bb37f..69f0d3a 100644
--- a/requests.egg-info/PKG-INFO
+++ b/requests.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.1
Name: requests
-Version: 1.1.0
+Version: 1.2.0
Summary: Python HTTP for Humans.
Home-page: http://python-requests.org
Author: Kenneth Reitz
@@ -108,6 +108,34 @@ Description: Requests: HTTP for Humans
History
-------
+ 1.2.0 (2013-03-31)
+ ++++++++++++++++++
+
+ - Fixed cookies on sessions and on requests
+ - Significantly change how hooks are dispatched - hooks now receive all the
+ arguments specified by the user when making a request so hooks can make a
+ secondary request with the same parameters. This is especially necessary for
+ authentication handler authors
+ - certifi support was removed
+ - Fixed bug where using OAuth 1 with body ``signature_type`` sent no data
+ - Major proxy work thanks to @Lukasa including parsing of proxy authentication
+ from the proxy url
+ - Fix DigestAuth handling too many 401s
+ - Update vendored urllib3 to include SSL bug fixes
+ - Allow keyword arguments to be passed to ``json.loads()`` via the
+ ``Response.json()`` method
+ - Don't send ``Content-Length`` header by default on ``GET`` or ``HEAD``
+ requests
+ - Add ``elapsed`` attribute to ``Response`` objects to time how long a request
+ took.
+ - Fix ``RequestsCookieJar``
+ - Sessions and Adapters are now picklable, i.e., can be used with the
+ mutiprocessing library
+ - Update charade to version 1.0.3
+
+ The change in how hooks are dispatched will likely cause a great deal of
+ issues.
+
1.1.0 (2013-01-10)
++++++++++++++++++
@@ -145,7 +173,7 @@ Description: Requests: HTTP for Humans
- /s/prefetch/stream
- Removal of all configuration
- Standard library logging
- - Make Reponse.json() callable, not property.
+ - Make Response.json() callable, not property.
- Usage of new charade project, which provides python 2 and 3 simultaneous chardet.
- Removal of all hooks except 'response'
- Removal of all authentication helpers (OAuth, Kerberos)
@@ -680,10 +708,10 @@ Description: Requests: HTTP for Humans
++++++++++++++++++
* New HTTPHandling Methods
- - Reponse.__nonzero__ (false if bad HTTP Status)
+ - Response.__nonzero__ (false if bad HTTP Status)
- Response.ok (True if expected HTTP Status)
- Response.error (Logged HTTPError if bad HTTP Status)
- - Reponse.raise_for_status() (Raises stored HTTPError)
+ - Response.raise_for_status() (Raises stored HTTPError)
0.2.2 (2011-02-14)
diff --git a/requests.egg-info/SOURCES.txt b/requests.egg-info/SOURCES.txt
index 022fda9..bd619c6 100644
--- a/requests.egg-info/SOURCES.txt
+++ b/requests.egg-info/SOURCES.txt
@@ -36,6 +36,7 @@ requests/packages/charade/charsetprober.py
requests/packages/charade/codingstatemachine.py
requests/packages/charade/compat.py
requests/packages/charade/constants.py
+requests/packages/charade/cp949prober.py
requests/packages/charade/escprober.py
requests/packages/charade/escsm.py
requests/packages/charade/eucjpprober.py
diff --git a/requests/__init__.py b/requests/__init__.py
index 7ea7e62..1ea4aff 100644
--- a/requests/__init__.py
+++ b/requests/__init__.py
@@ -42,8 +42,8 @@ is at <http://python-requests.org>.
"""
__title__ = 'requests'
-__version__ = '1.1.0'
-__build__ = 0x010100
+__version__ = '1.2.0'
+__build__ = 0x010200
__author__ = 'Kenneth Reitz'
__license__ = 'Apache 2.0'
__copyright__ = 'Copyright 2013 Kenneth Reitz'
diff --git a/requests/adapters.py b/requests/adapters.py
index 5f9d9c7..5666e66 100644
--- a/requests/adapters.py
+++ b/requests/adapters.py
@@ -11,12 +11,11 @@ and maintain connections.
import socket
from .models import Response
-from .packages.urllib3.poolmanager import PoolManager, proxy_from_url
+from .packages.urllib3.poolmanager import PoolManager, ProxyManager
from .packages.urllib3.response import HTTPResponse
-from .hooks import dispatch_hook
-from .compat import urlparse, basestring, urldefrag
+from .compat import urlparse, basestring, urldefrag, unquote
from .utils import (DEFAULT_CA_BUNDLE_PATH, get_encoding_from_headers,
- prepend_scheme_if_needed)
+ prepend_scheme_if_needed, get_auth_from_url)
from .structures import CaseInsensitiveDict
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import TimeoutError
@@ -24,6 +23,7 @@ from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .cookies import extract_cookies_to_jar
from .exceptions import ConnectionError, Timeout, SSLError
+from .auth import _basic_auth_str
DEFAULT_POOLSIZE = 10
DEFAULT_RETRIES = 0
@@ -44,15 +44,34 @@ class BaseAdapter(object):
class HTTPAdapter(BaseAdapter):
"""Built-In HTTP Adapter for Urllib3."""
+ __attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize']
+
def __init__(self, pool_connections=DEFAULT_POOLSIZE, pool_maxsize=DEFAULT_POOLSIZE):
self.max_retries = DEFAULT_RETRIES
self.config = {}
super(HTTPAdapter, self).__init__()
+ self._pool_connections = pool_connections
+ self._pool_maxsize = pool_maxsize
+
self.init_poolmanager(pool_connections, pool_maxsize)
+ def __getstate__(self):
+ return dict((attr, getattr(self, attr, None)) for attr in
+ self.__attrs__)
+
+ def __setstate__(self, state):
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.init_poolmanager(self._pool_connections, self._pool_maxsize)
+
def init_poolmanager(self, connections, maxsize):
+ # save these values for pickling
+ self._pool_connections = connections
+ self._pool_maxsize = maxsize
+
self.poolmanager = PoolManager(num_pools=connections, maxsize=maxsize)
def cert_verify(self, conn, url, verify, cert):
@@ -109,8 +128,6 @@ class HTTPAdapter(BaseAdapter):
response.request = req
response.connection = self
- # Run the Response hook.
- response = dispatch_hook('response', req.hooks, response)
return response
def get_connection(self, url, proxies=None):
@@ -120,7 +137,7 @@ class HTTPAdapter(BaseAdapter):
if proxy:
proxy = prepend_scheme_if_needed(proxy, urlparse(url).scheme)
- conn = proxy_from_url(proxy)
+ conn = ProxyManager(self.poolmanager.connection_from_url(proxy))
else:
conn = self.poolmanager.connection_from_url(url)
@@ -149,6 +166,25 @@ class HTTPAdapter(BaseAdapter):
return url
+ def add_headers(self, request, **kwargs):
+ """Add any headers needed by the connection. Currently this adds a
+ Proxy-Authorization header."""
+ proxies = kwargs.get('proxies', {})
+
+ if proxies is None:
+ proxies = {}
+
+ proxy = proxies.get(urlparse(request.url).scheme)
+ username, password = get_auth_from_url(proxy)
+
+ if username and password:
+ # Proxy auth usernames and passwords will be urlencoded, we need
+ # to decode them.
+ username = unquote(username)
+ password = unquote(password)
+ request.headers['Proxy-Authorization'] = _basic_auth_str(username,
+ password)
+
def send(self, request, stream=False, timeout=None, verify=True, cert=None, proxies=None):
"""Sends PreparedRequest object. Returns Response object."""
@@ -156,6 +192,7 @@ class HTTPAdapter(BaseAdapter):
self.cert_verify(conn, request.url, verify, cert)
url = self.request_url(request, proxies)
+ self.add_headers(request, proxies=proxies)
chunked = not (request.body is None or 'Content-Length' in request.headers)
@@ -214,7 +251,7 @@ class HTTPAdapter(BaseAdapter):
elif isinstance(e, TimeoutError):
raise Timeout(e)
else:
- raise Timeout('Request timed out.')
+ raise
r = self.build_response(request, resp)
diff --git a/requests/api.py b/requests/api.py
index 4a39211..baf43dd 100644
--- a/requests/api.py
+++ b/requests/api.py
@@ -73,6 +73,7 @@ def head(url, **kwargs):
:param \*\*kwargs: Optional arguments that ``request`` takes.
"""
+ kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
diff --git a/requests/auth.py b/requests/auth.py
index 277e601..805f240 100644
--- a/requests/auth.py
+++ b/requests/auth.py
@@ -36,6 +36,7 @@ class AuthBase(object):
def __call__(self, r):
raise NotImplementedError('Auth hooks must be callable.')
+
class HTTPBasicAuth(AuthBase):
"""Attaches HTTP Basic Authentication to the given Request object."""
def __init__(self, username, password):
@@ -68,18 +69,21 @@ class HTTPDigestAuth(AuthBase):
realm = self.chal['realm']
nonce = self.chal['nonce']
qop = self.chal.get('qop')
- algorithm = self.chal.get('algorithm', 'MD5')
- opaque = self.chal.get('opaque', None)
+ algorithm = self.chal.get('algorithm')
+ opaque = self.chal.get('opaque')
- algorithm = algorithm.upper()
+ if algorithm is None:
+ _algorithm = 'MD5'
+ else:
+ _algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
- if algorithm == 'MD5':
+ if _algorithm == 'MD5':
def md5_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
return hashlib.md5(x).hexdigest()
hash_utf8 = md5_utf8
- elif algorithm == 'SHA':
+ elif _algorithm == 'SHA':
def sha_utf8(x):
if isinstance(x, str):
x = x.encode('utf-8')
@@ -126,25 +130,27 @@ class HTTPDigestAuth(AuthBase):
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
- 'response="%s"' % (self.username, realm, nonce, path, respdig)
+ 'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque:
base += ', opaque="%s"' % opaque
+ if algorithm:
+ base += ', algorithm="%s"' % algorithm
if entdig:
base += ', digest="%s"' % entdig
- base += ', algorithm="%s"' % algorithm
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
return 'Digest %s' % (base)
- def handle_401(self, r):
+ def handle_401(self, r, **kwargs):
"""Takes the given response and tries digest-auth, if needed."""
- num_401_calls = r.request.hooks['response'].count(self.handle_401)
+ num_401_calls = getattr(self, 'num_401_calls', 1)
s_auth = r.headers.get('www-authenticate', '')
if 'digest' in s_auth.lower() and num_401_calls < 2:
+ setattr(self, 'num_401_calls', num_401_calls + 1)
self.chal = parse_dict_header(s_auth.replace('Digest ', ''))
# Consume content and release the original connection
@@ -153,11 +159,12 @@ class HTTPDigestAuth(AuthBase):
r.raw.release_conn()
r.request.headers['Authorization'] = self.build_digest_header(r.request.method, r.request.url)
- _r = r.connection.send(r.request)
+ _r = r.connection.send(r.request, **kwargs)
_r.history.append(r)
return _r
+ setattr(self, 'num_401_calls', 1)
return r
def __call__(self, r):
diff --git a/requests/cacert.pem b/requests/cacert.pem
index 7da8447..504fdcc 100644
--- a/requests/cacert.pem
+++ b/requests/cacert.pem
@@ -1603,54 +1603,6 @@ vFcj4jjSm2jzVhKIT0J8uDHEtdvkyCE06UgRNe76x5JXxZ805Mf29w4LTJxoeHtxMcfrHuBnQfO3
oKfN5XozNmr6mis=
-----END CERTIFICATE-----
-TURKTRUST Certificate Services Provider Root 1
-==============================================
------BEGIN CERTIFICATE-----
-MIID+zCCAuOgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBtzE/MD0GA1UEAww2VMOcUktUUlVTVCBF
-bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGDAJUUjEP
-MA0GA1UEBwwGQU5LQVJBMVYwVAYDVQQKDE0oYykgMjAwNSBUw5xSS1RSVVNUIEJpbGdpIMSwbGV0
-acWfaW0gdmUgQmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLjAeFw0wNTA1MTMx
-MDI3MTdaFw0xNTAzMjIxMDI3MTdaMIG3MT8wPQYDVQQDDDZUw5xSS1RSVVNUIEVsZWt0cm9uaWsg
-U2VydGlmaWthIEhpem1ldCBTYcSfbGF5xLFjxLFzxLExCzAJBgNVBAYMAlRSMQ8wDQYDVQQHDAZB
-TktBUkExVjBUBgNVBAoMTShjKSAyMDA1IFTDnFJLVFJVU1QgQmlsZ2kgxLBsZXRpxZ9pbSB2ZSBC
-aWxpxZ9pbSBHw7x2ZW5sacSfaSBIaXptZXRsZXJpIEEuxZ4uMIIBIjANBgkqhkiG9w0BAQEFAAOC
-AQ8AMIIBCgKCAQEAylIF1mMD2Bxf3dJ7XfIMYGFbazt0K3gNfUW9InTojAPBxhEqPZW8qZSwu5GX
-yGl8hMW0kWxsE2qkVa2kheiVfrMArwDCBRj1cJ02i67L5BuBf5OI+2pVu32Fks66WJ/bMsW9Xe8i
-Si9BB35JYbOG7E6mQW6EvAPs9TscyB/C7qju6hJKjRTP8wrgUDn5CDX4EVmt5yLqS8oUBt5CurKZ
-8y1UiBAG6uEaPj1nH/vO+3yC6BFdSsG5FOpU2WabfIl9BJpiyelSPJ6c79L1JuTm5Rh8i27fbMx4
-W09ysstcP4wFjdFMjK2Sx+F4f2VsSQZQLJ4ywtdKxnWKWU51b0dewQIDAQABoxAwDjAMBgNVHRME
-BTADAQH/MA0GCSqGSIb3DQEBBQUAA4IBAQAV9VX/N5aAWSGk/KEVTCD21F/aAyT8z5Aa9CEKmu46
-sWrv7/hg0Uw2ZkUd82YCdAR7kjCo3gp2D++Vbr3JN+YaDayJSFvMgzbC9UZcWYJWtNX+I7TYVBxE
-q8Sn5RTOPEFhfEPmzcSBCYsk+1Ql1haolgxnB2+zUEfjHCQo3SqYpGH+2+oSN7wBGjSFvW5P55Fy
-B0SFHljKVETd96y5y4khctuPwGkplyqjrhgjlxxBKot8KsF8kOipKMDTkcatKIdAaLX/7KfS0zgY
-nNN9aV3wxqUeJBujR/xpB2jn5Jq07Q+hh4cCzofSSE7hvP/L8XKSRGQDJereW26fyfJOrN3H
------END CERTIFICATE-----
-
-TURKTRUST Certificate Services Provider Root 2
-==============================================
------BEGIN CERTIFICATE-----
-MIIEPDCCAySgAwIBAgIBATANBgkqhkiG9w0BAQUFADCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBF
-bGVrdHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJUUjEP
-MA0GA1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUg
-QmlsacWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwHhcN
-MDUxMTA3MTAwNzU3WhcNMTUwOTE2MTAwNzU3WjCBvjE/MD0GA1UEAww2VMOcUktUUlVTVCBFbGVr
-dHJvbmlrIFNlcnRpZmlrYSBIaXptZXQgU2HEn2xhecSxY8Sxc8SxMQswCQYDVQQGEwJUUjEPMA0G
-A1UEBwwGQW5rYXJhMV0wWwYDVQQKDFRUw5xSS1RSVVNUIEJpbGdpIMSwbGV0acWfaW0gdmUgQmls
-acWfaW0gR8O8dmVubGnEn2kgSGl6bWV0bGVyaSBBLsWeLiAoYykgS2FzxLFtIDIwMDUwggEiMA0G
-CSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCpNn7DkUNMwxmYCMjHWHtPFoylzkkBH3MOrHUTpvqe
-LCDe2JAOCtFp0if7qnefJ1Il4std2NiDUBd9irWCPwSOtNXwSadktx4uXyCcUHVPr+G1QRT0mJKI
-x+XlZEdhR3n9wFHxwZnn3M5q+6+1ATDcRhzviuyV79z/rxAc653YsKpqhRgNF8k+v/Gb0AmJQv2g
-QrSdiVFVKc8bcLyEVK3BEx+Y9C52YItdP5qtygy/p1Zbj3e41Z55SZI/4PGXJHpsmxcPbe9TmJEr
-5A++WXkHeLuXlfSfadRYhwqp48y2WBmfJiGxxFmNskF1wK1pzpwACPI2/z7woQ8arBT9pmAPAgMB
-AAGjQzBBMB0GA1UdDgQWBBTZN7NOBf3Zz58SFq62iS/rJTqIHDAPBgNVHQ8BAf8EBQMDBwYAMA8G
-A1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEFBQADggEBAHJglrfJ3NgpXiOFX7KzLXb7iNcX/ntt
-Rbj2hWyfIvwqECLsqrkw9qtY1jkQMZkpAL2JZkH7dN6RwRgLn7Vhy506vvWolKMiVW4XSf/SKfE4
-Jl3vpao6+XF75tpYHdN0wgH6PmlYX63LaL4ULptswLbcoCb6dxriJNoaN+BnrdFzgw2lGh1uEpJ+
-hGIAF728JRhX8tepb1mIvDS3LoV4nZbcFMMsilKbloxSZj2GFotHuFEJjOp9zYhys2AzsfAKRO8P
-9Qk3iCQOLGsgOqL6EfJANZxEaGM7rDNvY7wsu/LSy3Z9fYjYHcgFHW68lKlmjHdxx/qR+i9Rnuk5
-UrbnBEI=
------END CERTIFICATE-----
-
SwissSign Gold CA - G2
======================
-----BEGIN CERTIFICATE-----
diff --git a/requests/certs.py b/requests/certs.py
index 8148276..bc00826 100644
--- a/requests/certs.py
+++ b/requests/certs.py
@@ -14,17 +14,10 @@ packaged CA bundle.
import os.path
-certifi = None
-try:
- import certifi
-except ImportError:
- pass
def where():
"""Return the preferred certificate bundle."""
- if certifi:
- return certifi.where()
-
+ # vendored bundle inside Requests
return os.path.join(os.path.dirname(__file__), 'cacert.pem')
if __name__ == '__main__':
diff --git a/requests/compat.py b/requests/compat.py
index 5bd4fcb..bcf94b0 100644
--- a/requests/compat.py
+++ b/requests/compat.py
@@ -98,7 +98,6 @@ if is_py2:
numeric_types = (int, long, float)
-
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list
diff --git a/requests/cookies.py b/requests/cookies.py
index bd7289e..1235711 100644
--- a/requests/cookies.py
+++ b/requests/cookies.py
@@ -240,18 +240,28 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
"""Dict-like __getitem__() for compatibility with client code. Throws exception
if there are more than one cookie with name. In that case, use the more
explicit get() method instead. Caution: operation is O(n), not O(1)."""
+
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws exception
if there is already a cookie of that name in the jar. In that case, use the more
explicit set() method instead."""
+
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps cookielib.CookieJar's remove_cookie_by_name()."""
remove_cookie_by_name(self, name)
+ def update(self, other):
+ """Updates this jar with cookies from another CookieJar or dict-like"""
+ if isinstance(other, cookielib.CookieJar):
+ for cookie in other:
+ self.set_cookie(cookie)
+ else:
+ super(RequestsCookieJar, self).update(other)
+
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values. Takes as args name
and optional domain and path. Returns a cookie.value. If there are conflicting cookies,
@@ -297,8 +307,10 @@ class RequestsCookieJar(cookielib.CookieJar, collections.MutableMapping):
self._cookies_lock = threading.RLock()
def copy(self):
- """This is not implemented. Calling this will throw an exception."""
- raise NotImplementedError
+ """Return a copy of this RequestsCookieJar."""
+ new_cj = RequestsCookieJar()
+ new_cj.update(self)
+ return new_cj
def create_cookie(name, value, **kwargs):
diff --git a/requests/exceptions.py b/requests/exceptions.py
index 6759af5..c0588f6 100644
--- a/requests/exceptions.py
+++ b/requests/exceptions.py
@@ -16,7 +16,11 @@ class RequestException(RuntimeError):
class HTTPError(RequestException):
"""An HTTP error occurred."""
- response = None
+
+ def __init__(self, *args, **kwargs):
+ """ Initializes HTTPError with optional `response` object. """
+ self.response = kwargs.pop('response', None)
+ super(HTTPError, self).__init__(*args, **kwargs)
class ConnectionError(RequestException):
diff --git a/requests/hooks.py b/requests/hooks.py
index 6135033..5dfaf6b 100644
--- a/requests/hooks.py
+++ b/requests/hooks.py
@@ -16,6 +16,7 @@ Available hooks:
HOOKS = ['response']
+
def default_hooks():
hooks = {}
for event in HOOKS:
@@ -24,7 +25,8 @@ def default_hooks():
# TODO: response is the only one
-def dispatch_hook(key, hooks, hook_data):
+
+def dispatch_hook(key, hooks, hook_data, **kwargs):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
@@ -36,7 +38,7 @@ def dispatch_hook(key, hooks, hook_data):
hooks = [hooks]
for hook in hooks:
- _hook_data = hook(hook_data)
+ _hook_data = hook(hook_data, **kwargs)
if _hook_data is not None:
hook_data = _hook_data
diff --git a/requests/models.py b/requests/models.py
index 5202e6f..6ed2b59 100644
--- a/requests/models.py
+++ b/requests/models.py
@@ -9,27 +9,26 @@ This module contains the primary objects that power Requests.
import collections
import logging
+import datetime
from io import BytesIO
from .hooks import default_hooks
from .structures import CaseInsensitiveDict
-from .status_codes import codes
from .auth import HTTPBasicAuth
from .cookies import cookiejar_from_dict, get_cookie_header
from .packages.urllib3.filepost import encode_multipart_formdata
from .exceptions import HTTPError, RequestException, MissingSchema, InvalidURL
from .utils import (
- stream_untransfer, guess_filename, requote_uri,
+ stream_untransfer, guess_filename, get_auth_from_url, requote_uri,
stream_decode_response_unicode, to_key_val_list, parse_header_links,
iter_slices, guess_json_utf, super_len)
from .compat import (
cookielib, urlparse, urlunparse, urlsplit, urlencode, str, bytes, StringIO,
is_py2, chardet, json, builtin_str, basestring)
-REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
CONTENT_CHUNK_SIZE = 10 * 1024
-ITER_CHUNK_SIZE = 10 * 1024
+ITER_CHUNK_SIZE = 512
log = logging.getLogger(__name__)
@@ -121,7 +120,7 @@ class RequestEncodingMixin(object):
fp = StringIO(fp)
if isinstance(fp, bytes):
fp = BytesIO(fp)
-
+
if ft:
new_v = (fn, fp.read(), ft)
else:
@@ -188,7 +187,6 @@ class Request(RequestHooksMixin):
cookies=None,
hooks=None):
-
# Default empty dicts for dict params.
data = [] if data is None else data
files = [] if files is None else files
@@ -222,9 +220,12 @@ class Request(RequestHooksMixin):
p.prepare_headers(self.headers)
p.prepare_cookies(self.cookies)
p.prepare_body(self.data, self.files)
+ p.prepare_auth(self.auth, self.url)
# Note that prepare_auth must be last to enable authentication schemes
# such as OAuth to work on a fully prepared request.
- p.prepare_auth(self.auth)
+
+ # This MUST go after prepare_auth. Authenticators could add a hook
+ p.prepare_hooks(self.hooks)
return p
@@ -283,7 +284,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
# Support for unicode domain names and paths.
scheme, netloc, path, _params, query, fragment = urlparse(url)
- if not scheme:
+ if not (scheme and netloc):
raise MissingSchema("Invalid URL %r: No schema supplied" % url)
try:
@@ -323,6 +324,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
"""Prepares the given HTTP headers."""
if headers:
+ headers = dict((name.encode('ascii'), value) for name, value in headers.items())
self.headers = CaseInsensitiveDict(headers)
else:
self.headers = CaseInsensitiveDict()
@@ -342,6 +344,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
is_stream = all([
hasattr(data, '__iter__'),
not isinstance(data, basestring),
+ not isinstance(data, list),
not isinstance(data, dict)
])
@@ -357,7 +360,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
raise NotImplementedError('Streamed bodies and files are mutually exclusive.')
if length:
- self.headers['Content-Length'] = length
+ self.headers['Content-Length'] = str(length)
else:
self.headers['Transfer-Encoding'] = 'chunked'
# Check if file, fo, generator, iterator.
@@ -375,13 +378,7 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
else:
content_type = 'application/x-www-form-urlencoded'
- self.headers['Content-Length'] = '0'
- if hasattr(body, 'seek') and hasattr(body, 'tell'):
- body.seek(0, 2)
- self.headers['Content-Length'] = str(body.tell())
- body.seek(0, 0)
- elif body is not None:
- self.headers['Content-Length'] = str(len(body))
+ self.prepare_content_length(body)
# Add content-type if it wasn't explicitly provided.
if (content_type) and (not 'content-type' in self.headers):
@@ -389,8 +386,26 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
self.body = body
- def prepare_auth(self, auth):
+ def prepare_content_length(self, body):
+ if hasattr(body, 'seek') and hasattr(body, 'tell'):
+ body.seek(0, 2)
+ self.headers['Content-Length'] = str(body.tell())
+ body.seek(0, 0)
+ elif body is not None:
+ l = super_len(body)
+ if l:
+ self.headers['Content-Length'] = str(l)
+ elif self.method not in ('GET', 'HEAD'):
+ self.headers['Content-Length'] = '0'
+
+ def prepare_auth(self, auth, url=''):
"""Prepares the given HTTP auth data."""
+
+ # If no Auth is explicitly provided, extract it from the URL first.
+ if auth is None:
+ url_auth = get_auth_from_url(self.url)
+ auth = url_auth if any(url_auth) else None
+
if auth:
if isinstance(auth, tuple) and len(auth) == 2:
# special-case basic HTTP auth
@@ -402,6 +417,9 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
+ # Recompute Content-Length
+ self.prepare_content_length(self.body)
+
def prepare_cookies(self, cookies):
"""Prepares the given HTTP cookie data."""
@@ -415,6 +433,11 @@ class PreparedRequest(RequestEncodingMixin, RequestHooksMixin):
if cookie_header is not None:
self.headers['Cookie'] = cookie_header
+ def prepare_hooks(self, hooks):
+ """Prepares the given hooks."""
+ for event in hooks:
+ self.register_hook(event, hooks[event])
+
class Response(object):
"""The :class:`Response <Response>` object, which contains a
@@ -456,6 +479,10 @@ class Response(object):
#: A CookieJar of Cookies the server sent back.
self.cookies = cookiejar_from_dict({})
+ #: The amount of time elapsed between sending the request
+ #: and the arrival of the response (as a timedelta)
+ self.elapsed = datetime.timedelta(0)
+
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
@@ -467,6 +494,10 @@ class Response(object):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
+ def __iter__(self):
+ """Allows you to use a response as an iterator."""
+ return self.iter_content(128)
+
@property
def ok(self):
try:
@@ -482,10 +513,11 @@ class Response(object):
return chardet.detect(self.content)['encoding']
def iter_content(self, chunk_size=1, decode_unicode=False):
- """Iterates over the response data. This avoids reading the content
- at once into memory for large responses. The chunk size is the number
- of bytes it should read into memory. This is not necessarily the
- length of each item returned as decoding can take place.
+ """Iterates over the response data. When stream=True is set on the
+ request, this avoids reading the content at once into memory for
+ large responses. The chunk size is the number of bytes it should
+ read into memory. This is not necessarily the length of each item
+ returned as decoding can take place.
"""
if self._content_consumed:
# simulate reading small chunks of the content
@@ -507,16 +539,15 @@ class Response(object):
return gen
def iter_lines(self, chunk_size=ITER_CHUNK_SIZE, decode_unicode=None):
- """Iterates over the response data, one line at a time. This
- avoids reading the content at once into memory for large
- responses.
+ """Iterates over the response data, one line at a time. When
+ stream=True is set on the request, this avoids reading the
+ content at once into memory for large responses.
"""
pending = None
- for chunk in self.iter_content(
- chunk_size=chunk_size,
- decode_unicode=decode_unicode):
+ for chunk in self.iter_content(chunk_size=chunk_size,
+ decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
@@ -590,8 +621,11 @@ class Response(object):
return content
- def json(self):
- """Returns the json-encoded content of a response, if any."""
+ def json(self, **kwargs):
+ """Returns the json-encoded content of a response, if any.
+
+ :param \*\*kwargs: Optional arguments that ``json.loads`` takes.
+ """
if not self.encoding and len(self.content) > 3:
# No encoding set. JSON RFC 4627 section 3 states we should expect
@@ -600,8 +634,8 @@ class Response(object):
# a best guess).
encoding = guess_json_utf(self.content)
if encoding is not None:
- return json.loads(self.content.decode(encoding))
- return json.loads(self.text or self.content)
+ return json.loads(self.content.decode(encoding), **kwargs)
+ return json.loads(self.text or self.content, **kwargs)
@property
def links(self):
@@ -622,7 +656,7 @@ class Response(object):
return l
def raise_for_status(self):
- """Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred."""
+ """Raises stored :class:`HTTPError`, if one occurred."""
http_error_msg = ''
@@ -633,9 +667,7 @@ class Response(object):
http_error_msg = '%s Server Error: %s' % (self.status_code, self.reason)
if http_error_msg:
- http_error = HTTPError(http_error_msg)
- http_error.response = self
- raise http_error
+ raise HTTPError(http_error_msg, response=self)
def close(self):
return self.raw.release_conn()
diff --git a/requests/packages/charade/__init__.py b/requests/packages/charade/__init__.py
index 5d580b3..1aadf3e 100644
--- a/requests/packages/charade/__init__.py
+++ b/requests/packages/charade/__init__.py
@@ -15,10 +15,15 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-__version__ = "1.0.1"
+__version__ = "1.0.3"
+from sys import version_info
def detect(aBuf):
+ if ((version_info < (3, 0) and isinstance(aBuf, unicode)) or
+ (version_info >= (3, 0) and not isinstance(aBuf, bytes))):
+ raise ValueError('Expected a bytes object, not a unicode object')
+
from . import universaldetector
u = universaldetector.UniversalDetector()
u.reset()
diff --git a/requests/packages/charade/chardistribution.py b/requests/packages/charade/chardistribution.py
index 981bd1a..dfd3355 100644
--- a/requests/packages/charade/chardistribution.py
+++ b/requests/packages/charade/chardistribution.py
@@ -40,6 +40,7 @@ from .compat import wrap_ord
ENOUGH_DATA_THRESHOLD = 1024
SURE_YES = 0.99
SURE_NO = 0.01
+MINIMUM_DATA_THRESHOLD = 3
class CharDistributionAnalysis:
@@ -82,7 +83,7 @@ class CharDistributionAnalysis:
"""return confidence based on existing data"""
# if we didn't receive any character in our consideration range,
# return negative answer
- if self._mTotalChars <= 0:
+ if self._mTotalChars <= 0 or self._mFreqChars <= MINIMUM_DATA_THRESHOLD:
return SURE_NO
if self._mTotalChars != self._mFreqChars:
diff --git a/requests/packages/charade/compat.py b/requests/packages/charade/compat.py
index f86c46b..d9e30ad 100644
--- a/requests/packages/charade/compat.py
+++ b/requests/packages/charade/compat.py
@@ -18,9 +18,17 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
+import sys
+
+
+if sys.version_info < (3, 0):
+ base_str = (str, unicode)
+else:
+ base_str = (bytes, str)
+
def wrap_ord(a):
- if isinstance(a, str):
+ if sys.version_info < (3, 0) and isinstance(a, base_str):
return ord(a)
- elif isinstance(a, int):
+ else:
return a
diff --git a/requests/packages/charade/cp949prober.py b/requests/packages/charade/cp949prober.py
new file mode 100644
index 0000000..543501f
--- /dev/null
+++ b/requests/packages/charade/cp949prober.py
@@ -0,0 +1,44 @@
+######################## BEGIN LICENSE BLOCK ########################
+# The Original Code is mozilla.org code.
+#
+# The Initial Developer of the Original Code is
+# Netscape Communications Corporation.
+# Portions created by the Initial Developer are Copyright (C) 1998
+# the Initial Developer. All Rights Reserved.
+#
+# Contributor(s):
+# Mark Pilgrim - port to Python
+#
+# This library is free software; you can redistribute it and/or
+# modify it under the terms of the GNU Lesser General Public
+# License as published by the Free Software Foundation; either
+# version 2.1 of the License, or (at your option) any later version.
+#
+# This library is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
+# Lesser General Public License for more details.
+#
+# You should have received a copy of the GNU Lesser General Public
+# License along with this library; if not, write to the Free Software
+# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
+# 02110-1301 USA
+######################### END LICENSE BLOCK #########################
+
+from .mbcharsetprober import MultiByteCharSetProber
+from .codingstatemachine import CodingStateMachine
+from .chardistribution import EUCKRDistributionAnalysis
+from .mbcssm import CP949SMModel
+
+
+class CP949Prober(MultiByteCharSetProber):
+ def __init__(self):
+ MultiByteCharSetProber.__init__(self)
+ self._mCodingSM = CodingStateMachine(CP949SMModel)
+ # NOTE: CP949 is a superset of EUC-KR, so the distribution should be
+ # not different.
+ self._mDistributionAnalyzer = EUCKRDistributionAnalysis()
+ self.reset()
+
+ def get_charset_name(self):
+ return "CP949"
diff --git a/requests/packages/charade/langcyrillicmodel.py b/requests/packages/charade/langcyrillicmodel.py
index 4b69c82..15e338f 100644
--- a/requests/packages/charade/langcyrillicmodel.py
+++ b/requests/packages/charade/langcyrillicmodel.py
@@ -25,8 +25,6 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-from . import constants
-
# KOI8-R language model
# Character Mapping Table:
KOI8R_CharToOrderMap = (
diff --git a/requests/packages/charade/langgreekmodel.py b/requests/packages/charade/langgreekmodel.py
index 78e9ce6..93241ce 100644
--- a/requests/packages/charade/langgreekmodel.py
+++ b/requests/packages/charade/langgreekmodel.py
@@ -25,8 +25,6 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-from . import constants
-
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
diff --git a/requests/packages/charade/langhebrewmodel.py b/requests/packages/charade/langhebrewmodel.py
index 4c6b3ce..d871324 100644
--- a/requests/packages/charade/langhebrewmodel.py
+++ b/requests/packages/charade/langhebrewmodel.py
@@ -27,8 +27,6 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-from . import constants
-
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
diff --git a/requests/packages/charade/langhungarianmodel.py b/requests/packages/charade/langhungarianmodel.py
index bd7f505..6f59c61 100644
--- a/requests/packages/charade/langhungarianmodel.py
+++ b/requests/packages/charade/langhungarianmodel.py
@@ -25,8 +25,6 @@
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
-from . import constants
-
# 255: Control characters that usually does not exist in any text
# 254: Carriage/Return
# 253: symbol (punctuation) that does not belong to word
diff --git a/requests/packages/charade/mbcsgroupprober.py b/requests/packages/charade/mbcsgroupprober.py
index ebe93d0..2f6f5e8 100644
--- a/requests/packages/charade/mbcsgroupprober.py
+++ b/requests/packages/charade/mbcsgroupprober.py
@@ -33,6 +33,7 @@ from .sjisprober import SJISProber
from .eucjpprober import EUCJPProber
from .gb2312prober import GB2312Prober
from .euckrprober import EUCKRProber
+from .cp949prober import CP949Prober
from .big5prober import Big5Prober
from .euctwprober import EUCTWProber
@@ -46,6 +47,7 @@ class MBCSGroupProber(CharSetGroupProber):
EUCJPProber(),
GB2312Prober(),
EUCKRProber(),
+ CP949Prober(),
Big5Prober(),
EUCTWProber()
]
diff --git a/requests/packages/charade/mbcssm.py b/requests/packages/charade/mbcssm.py
index 3a720c9..55c02f0 100644
--- a/requests/packages/charade/mbcssm.py
+++ b/requests/packages/charade/mbcssm.py
@@ -78,6 +78,46 @@ Big5SMModel = {'classTable': BIG5_cls,
'charLenTable': Big5CharLenTable,
'name': 'Big5'}
+# CP949
+
+CP949_cls = (
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,0,0, # 00 - 0f
+ 1,1,1,1,1,1,1,1, 1,1,1,0,1,1,1,1, # 10 - 1f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 20 - 2f
+ 1,1,1,1,1,1,1,1, 1,1,1,1,1,1,1,1, # 30 - 3f
+ 1,4,4,4,4,4,4,4, 4,4,4,4,4,4,4,4, # 40 - 4f
+ 4,4,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 50 - 5f
+ 1,5,5,5,5,5,5,5, 5,5,5,5,5,5,5,5, # 60 - 6f
+ 5,5,5,5,5,5,5,5, 5,5,5,1,1,1,1,1, # 70 - 7f
+ 0,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 80 - 8f
+ 6,6,6,6,6,6,6,6, 6,6,6,6,6,6,6,6, # 90 - 9f
+ 6,7,7,7,7,7,7,7, 7,7,7,7,7,8,8,8, # a0 - af
+ 7,7,7,7,7,7,7,7, 7,7,7,7,7,7,7,7, # b0 - bf
+ 7,7,7,7,7,7,9,2, 2,3,2,2,2,2,2,2, # c0 - cf
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # d0 - df
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,2, # e0 - ef
+ 2,2,2,2,2,2,2,2, 2,2,2,2,2,2,2,0, # f0 - ff
+)
+
+CP949_st = (
+#cls= 0 1 2 3 4 5 6 7 8 9 # previous state =
+ eError,eStart, 3,eError,eStart,eStart, 4, 5,eError, 6, # eStart
+ eError,eError,eError,eError,eError,eError,eError,eError,eError,eError, # eError
+ eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe,eItsMe, # eItsMe
+ eError,eError,eStart,eStart,eError,eError,eError,eStart,eStart,eStart, # 3
+ eError,eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 4
+ eError,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart,eStart, # 5
+ eError,eStart,eStart,eStart,eStart,eError,eError,eStart,eStart,eStart, # 6
+)
+
+CP949CharLenTable = (0, 1, 2, 0, 1, 1, 2, 2, 0, 2)
+
+CP949SMModel = {'classTable': CP949_cls,
+ 'classFactor': 10,
+ 'stateTable': CP949_st,
+ 'charLenTable': CP949CharLenTable,
+ 'name': 'CP949'}
+
# EUC-JP
EUCJP_cls = (
diff --git a/requests/packages/charade/universaldetector.py b/requests/packages/charade/universaldetector.py
index adaae72..6175bfb 100644
--- a/requests/packages/charade/universaldetector.py
+++ b/requests/packages/charade/universaldetector.py
@@ -28,6 +28,7 @@
from . import constants
import sys
+import codecs
from .latin1prober import Latin1Prober # windows-1252
from .mbcsgroupprober import MBCSGroupProber # multi-byte character sets
from .sbcsgroupprober import SBCSGroupProber # single-byte character sets
@@ -70,31 +71,31 @@ class UniversalDetector:
if not self._mGotData:
# If the data starts with BOM, we know it is UTF
- if aBuf[:3] == '\xEF\xBB\xBF':
+ if aBuf[:3] == codecs.BOM:
# EF BB BF UTF-8 with BOM
self.result = {'encoding': "UTF-8", 'confidence': 1.0}
- elif aBuf[:4] == '\xFF\xFE\x00\x00':
+ elif aBuf[:4] == codecs.BOM_UTF32_LE:
# FF FE 00 00 UTF-32, little-endian BOM
self.result = {'encoding': "UTF-32LE", 'confidence': 1.0}
- elif aBuf[:4] == '\x00\x00\xFE\xFF':
+ elif aBuf[:4] == codecs.BOM_UTF32_BE:
# 00 00 FE FF UTF-32, big-endian BOM
self.result = {'encoding': "UTF-32BE", 'confidence': 1.0}
- elif aBuf[:4] == '\xFE\xFF\x00\x00':
+ elif aBuf[:4] == b'\xFE\xFF\x00\x00':
# FE FF 00 00 UCS-4, unusual octet order BOM (3412)
self.result = {
'encoding': "X-ISO-10646-UCS-4-3412",
'confidence': 1.0
}
- elif aBuf[:4] == '\x00\x00\xFF\xFE':
+ elif aBuf[:4] == b'\x00\x00\xFF\xFE':
# 00 00 FF FE UCS-4, unusual octet order BOM (2143)
self.result = {
'encoding': "X-ISO-10646-UCS-4-2143",
'confidence': 1.0
}
- elif aBuf[:2] == '\xFF\xFE':
+ elif aBuf[:2] == codecs.BOM_LE:
# FF FE UTF-16, little endian BOM
self.result = {'encoding': "UTF-16LE", 'confidence': 1.0}
- elif aBuf[:2] == '\xFE\xFF':
+ elif aBuf[:2] == codecs.BOM_BE:
# FE FF UTF-16, big endian BOM
self.result = {'encoding': "UTF-16BE", 'confidence': 1.0}
diff --git a/requests/packages/urllib3/connectionpool.py b/requests/packages/urllib3/connectionpool.py
index af8760d..51c87f5 100644
--- a/requests/packages/urllib3/connectionpool.py
+++ b/requests/packages/urllib3/connectionpool.py
@@ -9,6 +9,7 @@ import socket
import errno
from socket import error as SocketError, timeout as SocketTimeout
+from .util import resolve_cert_reqs, resolve_ssl_version
try: # Python 3
from http.client import HTTPConnection, HTTPException
@@ -80,31 +81,29 @@ class VerifiedHTTPSConnection(HTTPSConnection):
ssl_version = None
def set_cert(self, key_file=None, cert_file=None,
- cert_reqs='CERT_NONE', ca_certs=None):
- ssl_req_scheme = {
- 'CERT_NONE': ssl.CERT_NONE,
- 'CERT_OPTIONAL': ssl.CERT_OPTIONAL,
- 'CERT_REQUIRED': ssl.CERT_REQUIRED
- }
+ cert_reqs=None, ca_certs=None):
self.key_file = key_file
self.cert_file = cert_file
- self.cert_reqs = ssl_req_scheme.get(cert_reqs) or ssl.CERT_NONE
+ self.cert_reqs = cert_reqs
self.ca_certs = ca_certs
def connect(self):
# Add certificate verification
sock = socket.create_connection((self.host, self.port), self.timeout)
+ resolved_cert_reqs = resolve_cert_reqs(self.cert_reqs)
+ resolved_ssl_version = resolve_ssl_version(self.ssl_version)
+
# Wrap socket using verification with the root certs in
# trusted_root_certs
self.sock = ssl_wrap_socket(sock, self.key_file, self.cert_file,
- cert_reqs=self.cert_reqs,
+ cert_reqs=resolved_cert_reqs,
ca_certs=self.ca_certs,
server_hostname=self.host,
- ssl_version=self.ssl_version)
+ ssl_version=resolved_ssl_version)
- if self.ca_certs:
+ if resolved_cert_reqs != ssl.CERT_NONE:
match_hostname(self.sock.getpeercert(), self.host)
@@ -514,7 +513,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
strict=False, timeout=None, maxsize=1,
block=False, headers=None,
key_file=None, cert_file=None,
- cert_reqs='CERT_NONE', ca_certs=None, ssl_version=None):
+ cert_reqs=None, ca_certs=None, ssl_version=None):
HTTPConnectionPool.__init__(self, host, port,
strict, timeout, maxsize,
@@ -548,10 +547,7 @@ class HTTPSConnectionPool(HTTPConnectionPool):
connection.set_cert(key_file=self.key_file, cert_file=self.cert_file,
cert_reqs=self.cert_reqs, ca_certs=self.ca_certs)
- if self.ssl_version is None:
- connection.ssl_version = ssl.PROTOCOL_SSLv23
- else:
- connection.ssl_version = self.ssl_version
+ connection.ssl_version = self.ssl_version
return connection
diff --git a/requests/packages/urllib3/poolmanager.py b/requests/packages/urllib3/poolmanager.py
index a124202..6e7377c 100644
--- a/requests/packages/urllib3/poolmanager.py
+++ b/requests/packages/urllib3/poolmanager.py
@@ -58,6 +58,17 @@ class PoolManager(RequestMethods):
self.pools = RecentlyUsedContainer(num_pools,
dispose_func=lambda p: p.close())
+ def _new_pool(self, scheme, host, port):
+ """
+ Create a new :class:`ConnectionPool` based on host, port and scheme.
+
+ This method is used to actually create the connection pools handed out
+ by :meth:`connection_from_url` and companion methods. It is intended
+ to be overridden for customization.
+ """
+ pool_cls = pool_classes_by_scheme[scheme]
+ return pool_cls(host, port, **self.connection_pool_kw)
+
def clear(self):
"""
Empty our store of pools and direct them all to close.
@@ -74,6 +85,7 @@ class PoolManager(RequestMethods):
If ``port`` isn't given, it will be derived from the ``scheme`` using
``urllib3.connectionpool.port_by_scheme``.
"""
+ scheme = scheme or 'http'
port = port or port_by_scheme.get(scheme, 80)
pool_key = (scheme, host, port)
@@ -85,11 +97,8 @@ class PoolManager(RequestMethods):
return pool
# Make a fresh ConnectionPool of the desired type
- pool_cls = pool_classes_by_scheme[scheme]
- pool = pool_cls(host, port, **self.connection_pool_kw)
-
+ pool = self._new_pool(scheme, host, port)
self.pools[pool_key] = pool
-
return pool
def connection_from_url(self, url):
@@ -138,14 +147,24 @@ class PoolManager(RequestMethods):
class ProxyManager(RequestMethods):
"""
Given a ConnectionPool to a proxy, the ProxyManager's ``urlopen`` method
- will make requests to any url through the defined proxy.
+ will make requests to any url through the defined proxy. The ProxyManager
+ class will automatically set the 'Host' header if it is not provided.
"""
def __init__(self, proxy_pool):
self.proxy_pool = proxy_pool
- def _set_proxy_headers(self, headers=None):
+ def _set_proxy_headers(self, url, headers=None):
+ """
+ Sets headers needed by proxies: specifically, the Accept and Host
+ headers. Only sets headers not provided by the user.
+ """
headers_ = {'Accept': '*/*'}
+
+ host = parse_url(url).host
+ if host:
+ headers_['Host'] = host
+
if headers:
headers_.update(headers)
@@ -154,7 +173,7 @@ class ProxyManager(RequestMethods):
def urlopen(self, method, url, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
kw['assert_same_host'] = False
- kw['headers'] = self._set_proxy_headers(kw.get('headers'))
+ kw['headers'] = self._set_proxy_headers(url, headers=kw.get('headers'))
return self.proxy_pool.urlopen(method, url, **kw)
diff --git a/requests/packages/urllib3/response.py b/requests/packages/urllib3/response.py
index 833be62..0761dc0 100644
--- a/requests/packages/urllib3/response.py
+++ b/requests/packages/urllib3/response.py
@@ -145,7 +145,17 @@ class HTTPResponse(object):
# cStringIO doesn't like amt=None
data = self._fp.read()
else:
- return self._fp.read(amt)
+ data = self._fp.read(amt)
+ if amt != 0 and not data: # Platform-specific: Buggy versions of Python.
+ # Close the connection when no data is returned
+ #
+ # This is redundant to what httplib/http.client _should_
+ # already do. However, versions of python released before
+ # December 15, 2012 (http://bugs.python.org/issue16298) do not
+ # properly close the connection in all cases. There is no harm
+ # in redundantly calling close.
+ self._fp.close()
+ return data
try:
if decode_content and decoder:
diff --git a/requests/packages/urllib3/util.py b/requests/packages/urllib3/util.py
index 8d8654f..b827bc4 100644
--- a/requests/packages/urllib3/util.py
+++ b/requests/packages/urllib3/util.py
@@ -22,6 +22,7 @@ try: # Test for SSL features
SSLContext = None
HAS_SNI = False
+ import ssl
from ssl import wrap_socket, CERT_NONE, SSLError, PROTOCOL_SSLv23
from ssl import SSLContext # Modern SSL?
from ssl import HAS_SNI # Has SNI?
@@ -263,10 +264,48 @@ def is_connection_dropped(conn):
return True
+def resolve_cert_reqs(candidate):
+ """
+ Resolves the argument to a numeric constant, which can be passed to
+ the wrap_socket function/method from the ssl module.
+ Defaults to :data:`ssl.CERT_NONE`.
+ If given a string it is assumed to be the name of the constant in the
+ :mod:`ssl` module or its abbrevation.
+ (So you can specify `REQUIRED` instead of `CERT_REQUIRED`.
+ If it's neither `None` nor a string we assume it is already the numeric
+ constant which can directly be passed to wrap_socket.
+ """
+ if candidate is None:
+ return CERT_NONE
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'CERT_' + candidate)
+ return res
+
+ return candidate
+
+
+def resolve_ssl_version(candidate):
+ """
+ like resolve_cert_reqs
+ """
+ if candidate is None:
+ return PROTOCOL_SSLv23
+
+ if isinstance(candidate, str):
+ res = getattr(ssl, candidate, None)
+ if res is None:
+ res = getattr(ssl, 'PROTOCOL_' + candidate)
+ return res
+
+ return candidate
+
if SSLContext is not None: # Python 3.2+
- def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=CERT_NONE,
+ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
- ssl_version=PROTOCOL_SSLv23):
+ ssl_version=None):
"""
All arguments except `server_hostname` have the same meaning as for
:func:`ssl.wrap_socket`
@@ -279,8 +318,9 @@ if SSLContext is not None: # Python 3.2+
if ca_certs:
try:
context.load_verify_locations(ca_certs)
- except TypeError as e: # Reraise as SSLError
- # FIXME: This block needs a test.
+ # Py32 raises IOError
+ # Py33 raises FileNotFoundError
+ except Exception as e: # Reraise as SSLError
raise SSLError(e)
if certfile:
# FIXME: This block needs a test.
@@ -290,9 +330,9 @@ if SSLContext is not None: # Python 3.2+
return context.wrap_socket(sock)
else: # Python 3.1 and earlier
- def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=CERT_NONE,
+ def ssl_wrap_socket(sock, keyfile=None, certfile=None, cert_reqs=None,
ca_certs=None, server_hostname=None,
- ssl_version=PROTOCOL_SSLv23):
+ ssl_version=None):
return wrap_socket(sock, keyfile=keyfile, certfile=certfile,
ca_certs=ca_certs, cert_reqs=cert_reqs,
ssl_version=ssl_version)
diff --git a/requests/sessions.py b/requests/sessions.py
index d65877c..de0d9d6 100644
--- a/requests/sessions.py
+++ b/requests/sessions.py
@@ -9,11 +9,12 @@ requests (cookies, auth, proxies).
"""
import os
+from datetime import datetime
from .compat import cookielib
from .cookies import cookiejar_from_dict
-from .models import Request
-from .hooks import dispatch_hook, default_hooks
+from .models import Request, PreparedRequest
+from .hooks import default_hooks, dispatch_hook
from .utils import from_key_val_list, default_headers
from .exceptions import TooManyRedirects, InvalidSchema
@@ -23,7 +24,12 @@ from .adapters import HTTPAdapter
from .utils import requote_uri, get_environ_proxies, get_netrc_auth
from .status_codes import codes
-REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
+REDIRECT_STATI = (
+ codes.moved, # 301
+ codes.found, # 302
+ codes.other, # 303
+ codes.temporary_moved, # 307
+)
DEFAULT_REDIRECT_LIMIT = 30
@@ -73,11 +79,21 @@ def merge_kwargs(local_kwarg, default_kwarg):
class SessionRedirectMixin(object):
-
- def resolve_redirects(self, resp, req, stream=False, timeout=None, verify=True, cert=None, proxies=None):
+ def resolve_redirects(self, resp, req, stream=False, timeout=None,
+ verify=True, cert=None, proxies=None):
"""Receives a Response. Returns a generator of Responses."""
i = 0
+ prepared_request = PreparedRequest()
+ prepared_request.body = req.body
+ prepared_request.headers = req.headers.copy()
+ prepared_request.hooks = req.hooks
+ prepared_request.method = req.method
+ prepared_request.url = req.url
+
+ cookiejar = cookiejar_from_dict({})
+ cookiejar.update(self.cookies)
+ cookiejar.update(resp.cookies)
# ((resp.status_code is codes.see_other))
while (('location' in resp.headers and resp.status_code in REDIRECT_STATI)):
@@ -91,7 +107,7 @@ class SessionRedirectMixin(object):
resp.close()
url = resp.headers['location']
- method = req.method
+ method = prepared_request.method
# Handle redirection without scheme (see: RFC 1808 Section 4)
if url.startswith('//'):
@@ -104,38 +120,52 @@ class SessionRedirectMixin(object):
# Compliant with RFC3986, we percent encode the url.
url = urljoin(resp.url, requote_uri(url))
+ prepared_request.url = url
+
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
- if resp.status_code is codes.see_other and req.method != 'HEAD':
+ if (resp.status_code == codes.see_other and
+ prepared_request.method != 'HEAD'):
method = 'GET'
# Do what the browsers do, despite standards...
- if resp.status_code in (codes.moved, codes.found) and req.method == 'POST':
+ if (resp.status_code in (codes.moved, codes.found) and
+ prepared_request.method == 'POST'):
method = 'GET'
- # Remove the cookie headers that were sent.
- headers = req.headers
+ prepared_request.method = method
+
+ # https://github.com/kennethreitz/requests/issues/1084
+ if resp.status_code not in (codes.temporary, codes.resume):
+ if 'Content-Length' in prepared_request.headers:
+ del prepared_request.headers['Content-Length']
+
+ prepared_request.body = None
+
+ headers = prepared_request.headers
try:
del headers['Cookie']
except KeyError:
pass
- resp = self.request(
- url=url,
- method=method,
- headers=headers,
- auth=req.auth,
- cookies=req.cookies,
- allow_redirects=False,
- stream=stream,
- timeout=timeout,
- verify=verify,
- cert=cert,
- proxies=proxies
- )
+ prepared_request.prepare_cookies(cookiejar)
+
+ resp = self.send(
+ prepared_request,
+ stream=stream,
+ timeout=timeout,
+ verify=verify,
+ cert=cert,
+ proxies=proxies,
+ allow_redirects=False,
+ )
+
+ cookiejar.update(resp.cookies)
i += 1
yield resp
+ resp.cookies.update(cookiejar)
+
class Session(SessionRedirectMixin):
"""A Requests session.
@@ -150,6 +180,11 @@ class Session(SessionRedirectMixin):
200
"""
+ __attrs__ = [
+ 'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks',
+ 'params', 'verify', 'cert', 'prefetch', 'adapters', 'stream',
+ 'trust_env', 'max_redirects']
+
def __init__(self):
#: A case-insensitive dictionary of headers to be sent on each
@@ -217,6 +252,39 @@ class Session(SessionRedirectMixin):
stream=None,
verify=None,
cert=None):
+ """Constructs a :class:`Request <Request>`, prepares it and sends it.
+ Returns :class:`Response <Response>` object.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query
+ string for the :class:`Request`.
+ :param data: (optional) Dictionary or bytes to send in the body of the
+ :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the
+ :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the
+ :class:`Request`.
+ :param files: (optional) Dictionary of 'filename': file-like-objects
+ for multipart encoding upload.
+ :param auth: (optional) Auth tuple or callable to enable
+ Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) Float describing the timeout of the
+ request.
+ :param allow_redirects: (optional) Boolean. Set to True by default.
+ :param proxies: (optional) Dictionary mapping protocol to the URL of
+ the proxy.
+ :param return_response: (optional) If False, an un-sent Request object
+ will returned.
+ :param config: (optional) A configuration dictionary. See
+ ``request.defaults`` for allowed keys and their default values.
+ :param prefetch: (optional) whether to immediately download the response
+ content. Defaults to ``True``.
+ :param verify: (optional) if ``True``, the SSL cert will be verified.
+ A CA_BUNDLE path can also be provided.
+ :param cert: (optional) if String, path to ssl client cert file (.pem).
+ If Tuple, ('cert', 'key') pair.
+ """
cookies = cookies or {}
proxies = proxies or {}
@@ -225,9 +293,10 @@ class Session(SessionRedirectMixin):
if not isinstance(cookies, cookielib.CookieJar):
cookies = cookiejar_from_dict(cookies)
- # Bubble down session cookies.
- for cookie in self.cookies:
- cookies.set_cookie(cookie)
+ # Merge with session cookies
+ merged_cookies = self.cookies.copy()
+ merged_cookies.update(cookies)
+ cookies = merged_cookies
# Gather clues from the surrounding environment.
if self.trust_env:
@@ -248,7 +317,6 @@ class Session(SessionRedirectMixin):
if not verify and verify is not False:
verify = os.environ.get('CURL_CA_BUNDLE')
-
# Merge all the kwargs.
params = merge_kwargs(params, self.params)
headers = merge_kwargs(headers, self.headers)
@@ -259,7 +327,6 @@ class Session(SessionRedirectMixin):
verify = merge_kwargs(verify, self.verify)
cert = merge_kwargs(cert, self.cert)
-
# Create the Request.
req = Request()
req.method = method.upper()
@@ -276,26 +343,18 @@ class Session(SessionRedirectMixin):
prep = req.prepare()
# Send the request.
- resp = self.send(prep, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
+ send_kwargs = {
+ 'stream': stream,
+ 'timeout': timeout,
+ 'verify': verify,
+ 'cert': cert,
+ 'proxies': proxies,
+ 'allow_redirects': allow_redirects,
+ }
+ resp = self.send(prep, **send_kwargs)
# Persist cookies.
- for cookie in resp.cookies:
- self.cookies.set_cookie(cookie)
-
- # Redirect resolving generator.
- gen = self.resolve_redirects(resp, req, stream=stream, timeout=timeout, verify=verify, cert=cert, proxies=proxies)
-
- # Resolve redirects if allowed.
- history = [r for r in gen] if allow_redirects else []
-
- # Shuffle things around if there's history.
- if history:
- history.insert(0, resp)
- resp = history.pop()
- resp.history = tuple(history)
-
- # Response manipulation hook.
- self.response = dispatch_hook('response', hooks, resp)
+ self.cookies.update(resp.cookies)
return resp
@@ -370,8 +429,57 @@ class Session(SessionRedirectMixin):
def send(self, request, **kwargs):
"""Send a given PreparedRequest."""
+ # Set defaults that the hooks can utilize to ensure they always have
+ # the correct parameters to reproduce the previous request.
+ kwargs.setdefault('stream', self.stream)
+ kwargs.setdefault('verify', self.verify)
+ kwargs.setdefault('cert', self.cert)
+ kwargs.setdefault('proxies', self.proxies)
+
+ # It's possible that users might accidentally send a Request object.
+ # Guard against that specific failure case.
+ if getattr(request, 'prepare', None):
+ raise ValueError('You can only send PreparedRequests.')
+
+ # Set up variables needed for resolve_redirects and dispatching of
+ # hooks
+ allow_redirects = kwargs.pop('allow_redirects', True)
+ stream = kwargs.get('stream')
+ timeout = kwargs.get('timeout')
+ verify = kwargs.get('verify')
+ cert = kwargs.get('cert')
+ proxies = kwargs.get('proxies')
+ hooks = request.hooks
+
+ # Get the appropriate adapter to use
adapter = self.get_adapter(url=request.url)
+
+ # Start time (approximately) of the request
+ start = datetime.utcnow()
+ # Send the request
r = adapter.send(request, **kwargs)
+ # Total elapsed time of the request (approximately)
+ r.elapsed = datetime.utcnow() - start
+
+ # Response manipulation hooks
+ r = dispatch_hook('response', hooks, r, **kwargs)
+
+ # Redirect resolving generator.
+ gen = self.resolve_redirects(r, request, stream=stream,
+ timeout=timeout, verify=verify, cert=cert,
+ proxies=proxies)
+
+ # Resolve redirects if allowed.
+ history = [resp for resp in gen] if allow_redirects else []
+
+ # Shuffle things around if there's history.
+ if history:
+ # Insert the first (original) request at the start
+ history.insert(0, r)
+ # Get the last request made
+ r = history.pop()
+ r.history = tuple(history)
+
return r
def get_adapter(self, url):
diff --git a/requests/structures.py b/requests/structures.py
index 6c2e0b2..05f5ac1 100644
--- a/requests/structures.py
+++ b/requests/structures.py
@@ -11,6 +11,7 @@ Data structures that power Requests.
import os
from itertools import islice
+
class IteratorProxy(object):
"""docstring for IteratorProxy"""
def __init__(self, i):
@@ -31,6 +32,7 @@ class IteratorProxy(object):
def read(self, n):
return "".join(islice(self.i, None, n))
+
class CaseInsensitiveDict(dict):
"""Case-insensitive Dictionary
diff --git a/requests/utils.py b/requests/utils.py
index f5f6b95..a2d434e 100644
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -28,10 +28,9 @@ _hush_pyflakes = (RequestsCookieJar,)
NETRC_FILES = ('.netrc', '_netrc')
-# if certifi is installed, use its CA bundle;
-# otherwise, try and use the OS bundle
DEFAULT_CA_BUNDLE_PATH = certs.where()
+
def dict_to_sequence(d):
"""Returns an internal sequence dictionary update."""
@@ -40,6 +39,7 @@ def dict_to_sequence(d):
return d
+
def super_len(o):
if hasattr(o, '__len__'):
return len(o)
@@ -48,6 +48,7 @@ def super_len(o):
if hasattr(o, 'fileno'):
return os.fstat(o.fileno()).st_size
+
def get_netrc_auth(url):
"""Returns the Requests tuple auth for a given url from netrc."""
@@ -88,7 +89,7 @@ def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
if name and name[0] != '<' and name[-1] != '>':
- return name
+ return os.path.basename(name)
def from_key_val_list(value):
@@ -251,8 +252,7 @@ def add_dict_to_cookiejar(cj, cookie_dict):
"""
cj2 = cookiejar_from_dict(cookie_dict)
- for cookie in cj2:
- cj.set_cookie(cookie)
+ cj.update(cj2)
return cj
@@ -466,11 +466,9 @@ def default_user_agent():
if _implementation == 'CPython':
_implementation_version = platform.python_version()
elif _implementation == 'PyPy':
- _implementation_version = '%s.%s.%s' % (
- sys.pypy_version_info.major,
+ _implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
sys.pypy_version_info.minor,
- sys.pypy_version_info.micro
- )
+ sys.pypy_version_info.micro)
if sys.pypy_version_info.releaselevel != 'final':
_implementation_version = ''.join([_implementation_version, sys.pypy_version_info.releaselevel])
elif _implementation == 'Jython':
@@ -487,11 +485,10 @@ def default_user_agent():
p_system = 'Unknown'
p_release = 'Unknown'
- return " ".join([
- 'python-requests/%s' % __version__,
- '%s/%s' % (_implementation, _implementation_version),
- '%s/%s' % (p_system, p_release),
- ])
+ return " ".join(['python-requests/%s' % __version__,
+ '%s/%s' % (_implementation, _implementation_version),
+ '%s/%s' % (p_system, p_release)])
+
def default_headers():
return {
@@ -524,7 +521,7 @@ def parse_header_links(value):
for param in params.split(";"):
try:
- key,value = param.split("=")
+ key, value = param.split("=")
except ValueError:
break
@@ -582,3 +579,13 @@ def prepend_scheme_if_needed(url, new_scheme):
netloc, path = path, netloc
return urlunparse((scheme, netloc, path, params, query, fragment))
+
+
+def get_auth_from_url(url):
+ """Given a url with authentication components, extract them into a tuple of
+ username,password."""
+ if url:
+ parsed = urlparse(url)
+ return (parsed.username, parsed.password)
+ else:
+ return ('', '')
diff --git a/requirements.txt b/requirements.txt
index 657011a..c88bf43 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,2 +1,3 @@
-pytest
-sphinx \ No newline at end of file
+py==1.4.12
+pytest==2.3.4
+invoke==0.2.0 \ No newline at end of file
diff --git a/setup.py b/setup.py
index 795774e..52dfe02 100755
--- a/setup.py
+++ b/setup.py
@@ -17,7 +17,7 @@ if sys.argv[-1] == 'publish':
packages = [
'requests',
'requests.packages',
- 'requests.packages.charade',
+ 'requests.packages.charade',
'requests.packages.urllib3',
'requests.packages.urllib3.packages',
'requests.packages.urllib3.packages.ssl_match_hostname'
diff --git a/test_requests.py b/test_requests.py
index 4a70982..93b8123 100644
--- a/test_requests.py
+++ b/test_requests.py
@@ -3,19 +3,29 @@
"""Tests for Requests."""
+from __future__ import division
import json
import os
import unittest
+import pickle
import requests
from requests.auth import HTTPDigestAuth
+from requests.compat import str
+
+try:
+ import StringIO
+except ImportError:
+ import io as StringIO
HTTPBIN = os.environ.get('HTTPBIN_URL', 'http://httpbin.org/')
+
def httpbin(*suffix):
"""Returns url for HTTPBIN resource."""
return HTTPBIN + '/'.join(suffix)
+
class RequestsTestCase(unittest.TestCase):
_multiprocess_can_split_ = True
@@ -28,9 +38,6 @@ class RequestsTestCase(unittest.TestCase):
"""Teardown."""
pass
- def test_assertion(self):
- assert 1
-
def test_entry_points(self):
requests.session
@@ -54,6 +61,11 @@ class RequestsTestCase(unittest.TestCase):
assert pr.url == req.url
assert pr.body == 'life=42'
+ def test_no_content_length(self):
+ get_req = requests.Request('GET', httpbin('get')).prepare()
+ self.assertTrue('Content-Length' not in get_req.headers)
+ head_req = requests.Request('HEAD', httpbin('head')).prepare()
+ self.assertTrue('Content-Length' not in head_req.headers)
def test_path_is_not_double_encoded(self):
request = requests.Request('GET', "http://0.0.0.0/get/test case").prepare()
@@ -70,10 +82,6 @@ class RequestsTestCase(unittest.TestCase):
self.assertEqual(request.url,
"http://example.com/path?key=value&a=b#fragment")
- def test_HTTP_200_OK_GET(self):
- r = requests.get(httpbin('get'))
- self.assertEqual(r.status_code, 200)
-
def test_HTTP_200_OK_GET_ALTERNATIVE(self):
r = requests.Request('GET', httpbin('get'))
s = requests.Session()
@@ -104,19 +112,29 @@ class RequestsTestCase(unittest.TestCase):
r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)
self.assertEqual(r.status_code, 200)
+ def test_set_cookie_on_301(self):
+ s = requests.session()
+ url = httpbin('cookies/set?foo=bar')
+ r = s.get(url)
+ self.assertTrue(s.cookies['foo'] == 'bar')
+
+ def test_cookie_sent_on_redirect(self):
+ s = requests.session()
+ s.get(httpbin('cookies/set?foo=bar'))
+ r = s.get(httpbin('redirect/1')) # redirects to httpbin('get')
+ self.assertTrue("Cookie" in r.json()["headers"])
+
def test_user_agent_transfers(self):
heads = {
- 'User-agent':
- 'Mozilla/5.0 (github.com/kennethreitz/requests)'
+ 'User-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)'
}
r = requests.get(httpbin('user-agent'), headers=heads)
self.assertTrue(heads['User-agent'] in r.text)
heads = {
- 'user-agent':
- 'Mozilla/5.0 (github.com/kennethreitz/requests)'
+ 'user-agent': 'Mozilla/5.0 (github.com/kennethreitz/requests)'
}
r = requests.get(httpbin('user-agent'), headers=heads)
@@ -131,8 +149,6 @@ class RequestsTestCase(unittest.TestCase):
self.assertEqual(r.status_code, 200)
def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self):
-
-
auth = ('user', 'pass')
url = httpbin('basic-auth', 'user', 'pass')
@@ -163,6 +179,18 @@ class RequestsTestCase(unittest.TestCase):
r = s.get(url)
self.assertEqual(r.status_code, 200)
+ def test_DIGEST_STREAM(self):
+
+ auth = HTTPDigestAuth('user', 'pass')
+ url = httpbin('digest-auth', 'auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth, stream=True)
+ self.assertNotEqual(r.raw.read(), b'')
+
+ r = requests.get(url, auth=auth, stream=False)
+ self.assertEqual(r.raw.read(), b'')
+
+
def test_DIGESTAUTH_WRONG_HTTP_401_GET(self):
auth = HTTPDigestAuth('user', 'wrongpass')
@@ -242,6 +270,9 @@ class RequestsTestCase(unittest.TestCase):
requests.get(url, params={'foo': 'foo'})
requests.get(httpbin('ΓΈ'), params={'foo': 'foo'})
+ def test_unicode_header_name(self):
+ requests.put(httpbin('put'), headers={str('Content-Type'): 'application/octet-stream'}, data='\xff') # compat.str is unicode.
+
def test_urlencoded_get_query_multivalued_param(self):
r = requests.get(httpbin('get'), params=dict(test=['foo', 'baz']))
@@ -264,6 +295,110 @@ class RequestsTestCase(unittest.TestCase):
self.assertEqual(r.status_code, 200)
self.assertTrue(b"text/py-content-type" in r.request.body)
+ def test_hook_receives_request_arguments(self):
+ def hook(resp, **kwargs):
+ assert resp is not None
+ assert kwargs != {}
+
+ requests.Request('GET', HTTPBIN, hooks={'response': hook})
+
+ def test_prepared_request_hook(self):
+ def hook(resp, **kwargs):
+ resp.hook_working = True
+ return resp
+
+ req = requests.Request('GET', HTTPBIN, hooks={'response': hook})
+ prep = req.prepare()
+
+ s = requests.Session()
+ resp = s.send(prep)
+
+ self.assertTrue(hasattr(resp, 'hook_working'))
+
+ def test_links(self):
+ r = requests.Response()
+ r.headers = {
+ 'cache-control': 'public, max-age=60, s-maxage=60',
+ 'connection': 'keep-alive',
+ 'content-encoding': 'gzip',
+ 'content-type': 'application/json; charset=utf-8',
+ 'date': 'Sat, 26 Jan 2013 16:47:56 GMT',
+ 'etag': '"6ff6a73c0e446c1f61614769e3ceb778"',
+ 'last-modified': 'Sat, 26 Jan 2013 16:22:39 GMT',
+ 'link': ('<https://api.github.com/users/kennethreitz/repos?'
+ 'page=2&per_page=10>; rel="next", <https://api.github.'
+ 'com/users/kennethreitz/repos?page=7&per_page=10>; '
+ ' rel="last"'),
+ 'server': 'GitHub.com',
+ 'status': '200 OK',
+ 'vary': 'Accept',
+ 'x-content-type-options': 'nosniff',
+ 'x-github-media-type': 'github.beta',
+ 'x-ratelimit-limit': '60',
+ 'x-ratelimit-remaining': '57'
+ }
+ self.assertEqual(r.links['next']['rel'], 'next')
+
+ def test_cookie_parameters(self):
+ key = 'some_cookie'
+ value = 'some_value'
+ secure = True
+ domain = 'test.com'
+ rest = {'HttpOnly': True}
+
+ jar = requests.cookies.RequestsCookieJar()
+ jar.set(key, value, secure=secure, domain=domain, rest=rest)
+
+ self.assertEqual(len(jar), 1)
+ self.assertTrue('some_cookie' in jar)
+
+ cookie = list(jar)[0]
+ self.assertEqual(cookie.secure, secure)
+ self.assertEqual(cookie.domain, domain)
+ self.assertEqual(cookie._rest['HttpOnly'], rest['HttpOnly'])
+
+ def test_time_elapsed_blank(self):
+ r = requests.get(httpbin('get'))
+ td = r.elapsed
+ total_seconds = ((td.microseconds + (td.seconds + td.days * 24 * 3600)
+ * 10**6) / 10**6)
+ self.assertTrue(total_seconds > 0.0)
+
+ def test_response_is_iterable(self):
+ r = requests.Response()
+ io = StringIO.StringIO('abc')
+ r.raw = io
+ self.assertTrue(next(iter(r)))
+ io.close()
+
+ def test_get_auth_from_url(self):
+ url = 'http://user:pass@complex.url.com/path?query=yes'
+ self.assertEqual(('user', 'pass'),
+ requests.utils.get_auth_from_url(url))
+
+ def test_cannot_send_unprepared_requests(self):
+ r = requests.Request(url=HTTPBIN)
+ self.assertRaises(ValueError, requests.Session().send, r)
+
+ def test_http_error(self):
+ error = requests.exceptions.HTTPError()
+ self.assertEqual(error.response, None)
+ response = requests.Response()
+ error = requests.exceptions.HTTPError(response=response)
+ self.assertEqual(error.response, response)
+ error = requests.exceptions.HTTPError('message', response=response)
+ self.assertEqual(str(error), 'message')
+ self.assertEqual(error.response, response)
+
+ def test_session_pickling(self):
+ r = requests.Request('GET', httpbin('get'))
+ s = requests.Session()
+
+ s = pickle.loads(pickle.dumps(s))
+
+ r = s.send(r.prepare())
+ self.assertEqual(r.status_code, 200)
+
if __name__ == '__main__':
unittest.main()