aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
-rw-r--r--HISTORY.rst95
-rw-r--r--LICENSE2
-rw-r--r--MANIFEST.in2
-rw-r--r--NOTICE25
-rw-r--r--PKG-INFO110
-rw-r--r--README.rst10
-rw-r--r--requests.egg-info/PKG-INFO110
-rw-r--r--requests.egg-info/SOURCES.txt9
-rw-r--r--requests.egg-info/requires.txt2
-rw-r--r--requests/__init__.py9
-rw-r--r--requests/api.py47
-rw-r--r--requests/async.py37
-rw-r--r--requests/auth.py118
-rw-r--r--requests/compat.py102
-rw-r--r--requests/defaults.py8
-rw-r--r--requests/exceptions.py7
-rw-r--r--requests/hooks.py18
-rw-r--r--requests/models.py369
-rw-r--r--requests/packages/oreos/monkeys.py2
-rw-r--r--requests/packages/urllib3/__init__.py4
-rw-r--r--requests/packages/urllib3/_collections.py2
-rw-r--r--requests/packages/urllib3/connectionpool.py157
-rw-r--r--requests/packages/urllib3/exceptions.py41
-rw-r--r--requests/packages/urllib3/filepost.py33
-rw-r--r--requests/packages/urllib3/packages/__init__.py4
-rw-r--r--requests/packages/urllib3/packages/mimetools_choose_boundary/__init__.py47
-rw-r--r--requests/packages/urllib3/packages/six.py372
-rw-r--r--requests/packages/urllib3/packages/ssl_match_hostname/__init__.py61
-rw-r--r--requests/packages/urllib3/poolmanager.py28
-rw-r--r--requests/packages/urllib3/request.py8
-rw-r--r--requests/packages/urllib3/response.py83
-rw-r--r--requests/sessions.py25
-rw-r--r--requests/status_codes.py2
-rw-r--r--requests/structures.py4
-rw-r--r--requests/utils.py92
-rwxr-xr-xsetup.py30
-rwxr-xr-xtest_requests.py367
37 files changed, 1917 insertions, 525 deletions
diff --git a/HISTORY.rst b/HISTORY.rst
index d7b023a..bf824bd 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -1,6 +1,100 @@
History
-------
+0.10.1 (2012-01-23)
++++++++++++++++++++
+
+* PYTHON 3 SUPPORT!
+* Dropped 2.5 Support. (*Backwards Incompatible*)
+
+0.10.0 (2012-01-21)
++++++++++++++++++++
+
+* ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
+* New ``Response.text`` is unicode-only.
+* If no ``Response.encoding`` is specified and ``chardet`` is available, ``Respoonse.text`` will guess an encoding.
+* Default to ISO-8859-1 (Western) encoding for "text" subtypes.
+* Removal of `decode_unicode`. (*Backwards Incompatible*)
+* New multiple-hooks system.
+* New ``Response.register_hook`` for registering hooks within the pipeline.
+* ``Response.url`` is now Unicode.
+
+0.9.3 (2012-01-18)
+++++++++++++++++++
+
+* SSL verify=False bugfix (apparent on windows machines).
+
+0.9.2 (2012-01-18)
+++++++++++++++++++
+
+* Asyncronous async.send method.
+* Support for proper chunk streams with boundaries.
+* session argument for Session classes.
+* Print entire hook tracebacks, not just exception instance.
+* Fix response.iter_lines from pending next line.
+* Fix but in HTTP-digest auth w/ URI having query strings.
+* Fix in Event Hooks section.
+* Urllib3 update.
+
+
+0.9.1 (2012-01-06)
+++++++++++++++++++
+
+* danger_mode for automatic Response.raise_for_status()
+* Response.iter_lines refator
+
+0.9.0 (2011-12-28)
+++++++++++++++++++
+
+* verify ssl is default.
+
+
+0.8.9 (2011-12-28)
+++++++++++++++++++
+
+* Packaging fix.
+
+
+0.8.8 (2011-12-28)
+++++++++++++++++++
+
+* SSL CERT VERIFICATION!
+* Release of Cerifi: Mozilla's cert list.
+* New 'verify' argument for SSL requests.
+* Urllib3 update.
+
+0.8.7 (2011-12-24)
+++++++++++++++++++
+
+* iter_lines last-line truncation fix
+* Force safe_mode for async requests
+* Handle safe_mode exceptions more consistently
+* Fix iteration on null responses in safe_mode
+
+0.8.6 (2011-12-18)
+++++++++++++++++++
+
+* Socket timeout fixes.
+* Proxy Authorization support.
+
+0.8.5 (2011-12-14)
+++++++++++++++++++
+
+* Response.iter_lines!
+
+0.8.4 (2011-12-11)
+++++++++++++++++++
+
+* Prefetch bugfix.
+* Added license to installed version.
+
+0.8.3 (2011-11-27)
+++++++++++++++++++
+
+* Converted auth system to use simpler callable objects.
+* New session parameter to API methods.
+* Display full URL while logging.
+
0.8.2 (2011-11-19)
++++++++++++++++++
@@ -28,6 +122,7 @@ History
* OPTION method
* Async pool size throttling
* File uploads send real names
+* Vendored in urllib3
0.7.6 (2011-11-07)
++++++++++++++++++
diff --git a/LICENSE b/LICENSE
index 8a9ee98..e79211c 100644
--- a/LICENSE
+++ b/LICENSE
@@ -1,4 +1,4 @@
-Copyright (c) 2011 Kenneth Reitz.
+Copyright (c) 2012 Kenneth Reitz.
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
diff --git a/MANIFEST.in b/MANIFEST.in
index 39fbb99..ef350d0 100644
--- a/MANIFEST.in
+++ b/MANIFEST.in
@@ -1 +1 @@
-include README.rst LICENSE HISTORY.rst test_requests.py
+include README.rst LICENSE NOTICE HISTORY.rst test_requests.py
diff --git a/NOTICE b/NOTICE
new file mode 100644
index 0000000..41b5d20
--- /dev/null
+++ b/NOTICE
@@ -0,0 +1,25 @@
+Request includes some vendorized python libraries to ease installation.
+
+Urllib3 License
+===============
+
+This is the MIT license: http://www.opensource.org/licenses/mit-license.php
+
+Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt),
+Modifications copyright 2022 Kenneth Reitz.
+
+Permission is hereby granted, free of charge, to any person obtaining a copy of this
+software and associated documentation files (the "Software"), to deal in the Software
+without restriction, including without limitation the rights to use, copy, modify, merge,
+publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons
+to whom the Software is furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all copies or
+substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED,
+INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
+PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE
+FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
+DEALINGS IN THE SOFTWARE.
diff --git a/PKG-INFO b/PKG-INFO
index 4d9f7cd..5943191 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.0
Name: requests
-Version: 0.8.2
+Version: 0.10.1
Summary: Python HTTP for Humans.
Home-page: http://python-requests.org
Author: Kenneth Reitz
@@ -35,16 +35,18 @@ Description: Requests: HTTP for Humans
Requests allow you to send **HEAD**, **GET**, **POST**, **PUT**,
**PATCH**, and **DELETE** HTTP requests. You can add headers, form data,
multipart files, and parameters with simple Python dictionaries, and access the
- response data in the same way. It's powered by httplib, but it does
- all the hard work and crazy hacks for you.
+ response data in the same way. It's powered by httplib and `urllib3
+ <https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
+ hacks for you.
Features
--------
+ - Browser standard SSL verification.
- Extremely simple HEAD, GET, POST, PUT, PATCH, DELETE Requests.
- Gevent support for Asyncronous Requests.
- - Sessions with cookie persistience.
+ - Sessions with cookie persistence.
- Basic, Digest, and Custom Authentication support.
- Automatic form-encoding of dictionaries
- A simple dictionary interface for request/response cookies.
@@ -56,7 +58,7 @@ Description: Requests: HTTP for Humans
Usage
-----
- It couldn't be simpler. ::
+ It couldn't be simpler::
>>> import requests
>>> r = requests.get('http://google.com')
@@ -113,6 +115,100 @@ Description: Requests: HTTP for Humans
History
-------
+ 0.10.1 (2012-01-23)
+ +++++++++++++++++++
+
+ * PYTHON 3 SUPPORT!
+ * Dropped 2.5 Support. (*Backwards Incompatible*)
+
+ 0.10.0 (2012-01-21)
+ +++++++++++++++++++
+
+ * ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
+ * New ``Response.text`` is unicode-only.
+ * If no ``Response.encoding`` is specified and ``chardet`` is available, ``Respoonse.text`` will guess an encoding.
+ * Default to ISO-8859-1 (Western) encoding for "text" subtypes.
+ * Removal of `decode_unicode`. (*Backwards Incompatible*)
+ * New multiple-hooks system.
+ * New ``Response.register_hook`` for registering hooks within the pipeline.
+ * ``Response.url`` is now Unicode.
+
+ 0.9.3 (2012-01-18)
+ ++++++++++++++++++
+
+ * SSL verify=False bugfix (apparent on windows machines).
+
+ 0.9.2 (2012-01-18)
+ ++++++++++++++++++
+
+ * Asyncronous async.send method.
+ * Support for proper chunk streams with boundaries.
+ * session argument for Session classes.
+ * Print entire hook tracebacks, not just exception instance.
+ * Fix response.iter_lines from pending next line.
+ * Fix but in HTTP-digest auth w/ URI having query strings.
+ * Fix in Event Hooks section.
+ * Urllib3 update.
+
+
+ 0.9.1 (2012-01-06)
+ ++++++++++++++++++
+
+ * danger_mode for automatic Response.raise_for_status()
+ * Response.iter_lines refator
+
+ 0.9.0 (2011-12-28)
+ ++++++++++++++++++
+
+ * verify ssl is default.
+
+
+ 0.8.9 (2011-12-28)
+ ++++++++++++++++++
+
+ * Packaging fix.
+
+
+ 0.8.8 (2011-12-28)
+ ++++++++++++++++++
+
+ * SSL CERT VERIFICATION!
+ * Release of Cerifi: Mozilla's cert list.
+ * New 'verify' argument for SSL requests.
+ * Urllib3 update.
+
+ 0.8.7 (2011-12-24)
+ ++++++++++++++++++
+
+ * iter_lines last-line truncation fix
+ * Force safe_mode for async requests
+ * Handle safe_mode exceptions more consistently
+ * Fix iteration on null responses in safe_mode
+
+ 0.8.6 (2011-12-18)
+ ++++++++++++++++++
+
+ * Socket timeout fixes.
+ * Proxy Authorization support.
+
+ 0.8.5 (2011-12-14)
+ ++++++++++++++++++
+
+ * Response.iter_lines!
+
+ 0.8.4 (2011-12-11)
+ ++++++++++++++++++
+
+ * Prefetch bugfix.
+ * Added license to installed version.
+
+ 0.8.3 (2011-11-27)
+ ++++++++++++++++++
+
+ * Converted auth system to use simpler callable objects.
+ * New session parameter to API methods.
+ * Display full URL while logging.
+
0.8.2 (2011-11-19)
++++++++++++++++++
@@ -140,6 +236,7 @@ Description: Requests: HTTP for Humans
* OPTION method
* Async pool size throttling
* File uploads send real names
+ * Vendored in urllib3
0.7.6 (2011-11-07)
++++++++++++++++++
@@ -386,6 +483,7 @@ Classifier: Intended Audience :: Developers
Classifier: Natural Language :: English
Classifier: License :: OSI Approved :: ISC License (ISCL)
Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2.5
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.0
+Classifier: Programming Language :: Python :: 3.1
diff --git a/README.rst b/README.rst
index 5e8fa6d..bc6c291 100644
--- a/README.rst
+++ b/README.rst
@@ -27,16 +27,18 @@ See `the same code, without Requests <https://gist.github.com/973705>`_.
Requests allow you to send **HEAD**, **GET**, **POST**, **PUT**,
**PATCH**, and **DELETE** HTTP requests. You can add headers, form data,
multipart files, and parameters with simple Python dictionaries, and access the
-response data in the same way. It's powered by httplib, but it does
-all the hard work and crazy hacks for you.
+response data in the same way. It's powered by httplib and `urllib3
+<https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
+hacks for you.
Features
--------
+- Browser standard SSL verification.
- Extremely simple HEAD, GET, POST, PUT, PATCH, DELETE Requests.
- Gevent support for Asyncronous Requests.
-- Sessions with cookie persistience.
+- Sessions with cookie persistence.
- Basic, Digest, and Custom Authentication support.
- Automatic form-encoding of dictionaries
- A simple dictionary interface for request/response cookies.
@@ -48,7 +50,7 @@ Features
Usage
-----
-It couldn't be simpler. ::
+It couldn't be simpler::
>>> import requests
>>> r = requests.get('http://google.com')
diff --git a/requests.egg-info/PKG-INFO b/requests.egg-info/PKG-INFO
index 4d9f7cd..5943191 100644
--- a/requests.egg-info/PKG-INFO
+++ b/requests.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.0
Name: requests
-Version: 0.8.2
+Version: 0.10.1
Summary: Python HTTP for Humans.
Home-page: http://python-requests.org
Author: Kenneth Reitz
@@ -35,16 +35,18 @@ Description: Requests: HTTP for Humans
Requests allow you to send **HEAD**, **GET**, **POST**, **PUT**,
**PATCH**, and **DELETE** HTTP requests. You can add headers, form data,
multipart files, and parameters with simple Python dictionaries, and access the
- response data in the same way. It's powered by httplib, but it does
- all the hard work and crazy hacks for you.
+ response data in the same way. It's powered by httplib and `urllib3
+ <https://github.com/shazow/urllib3>`_, but it does all the hard work and crazy
+ hacks for you.
Features
--------
+ - Browser standard SSL verification.
- Extremely simple HEAD, GET, POST, PUT, PATCH, DELETE Requests.
- Gevent support for Asyncronous Requests.
- - Sessions with cookie persistience.
+ - Sessions with cookie persistence.
- Basic, Digest, and Custom Authentication support.
- Automatic form-encoding of dictionaries
- A simple dictionary interface for request/response cookies.
@@ -56,7 +58,7 @@ Description: Requests: HTTP for Humans
Usage
-----
- It couldn't be simpler. ::
+ It couldn't be simpler::
>>> import requests
>>> r = requests.get('http://google.com')
@@ -113,6 +115,100 @@ Description: Requests: HTTP for Humans
History
-------
+ 0.10.1 (2012-01-23)
+ +++++++++++++++++++
+
+ * PYTHON 3 SUPPORT!
+ * Dropped 2.5 Support. (*Backwards Incompatible*)
+
+ 0.10.0 (2012-01-21)
+ +++++++++++++++++++
+
+ * ``Response.content`` is now bytes-only. (*Backwards Incompatible*)
+ * New ``Response.text`` is unicode-only.
+ * If no ``Response.encoding`` is specified and ``chardet`` is available, ``Respoonse.text`` will guess an encoding.
+ * Default to ISO-8859-1 (Western) encoding for "text" subtypes.
+ * Removal of `decode_unicode`. (*Backwards Incompatible*)
+ * New multiple-hooks system.
+ * New ``Response.register_hook`` for registering hooks within the pipeline.
+ * ``Response.url`` is now Unicode.
+
+ 0.9.3 (2012-01-18)
+ ++++++++++++++++++
+
+ * SSL verify=False bugfix (apparent on windows machines).
+
+ 0.9.2 (2012-01-18)
+ ++++++++++++++++++
+
+ * Asyncronous async.send method.
+ * Support for proper chunk streams with boundaries.
+ * session argument for Session classes.
+ * Print entire hook tracebacks, not just exception instance.
+ * Fix response.iter_lines from pending next line.
+ * Fix but in HTTP-digest auth w/ URI having query strings.
+ * Fix in Event Hooks section.
+ * Urllib3 update.
+
+
+ 0.9.1 (2012-01-06)
+ ++++++++++++++++++
+
+ * danger_mode for automatic Response.raise_for_status()
+ * Response.iter_lines refator
+
+ 0.9.0 (2011-12-28)
+ ++++++++++++++++++
+
+ * verify ssl is default.
+
+
+ 0.8.9 (2011-12-28)
+ ++++++++++++++++++
+
+ * Packaging fix.
+
+
+ 0.8.8 (2011-12-28)
+ ++++++++++++++++++
+
+ * SSL CERT VERIFICATION!
+ * Release of Cerifi: Mozilla's cert list.
+ * New 'verify' argument for SSL requests.
+ * Urllib3 update.
+
+ 0.8.7 (2011-12-24)
+ ++++++++++++++++++
+
+ * iter_lines last-line truncation fix
+ * Force safe_mode for async requests
+ * Handle safe_mode exceptions more consistently
+ * Fix iteration on null responses in safe_mode
+
+ 0.8.6 (2011-12-18)
+ ++++++++++++++++++
+
+ * Socket timeout fixes.
+ * Proxy Authorization support.
+
+ 0.8.5 (2011-12-14)
+ ++++++++++++++++++
+
+ * Response.iter_lines!
+
+ 0.8.4 (2011-12-11)
+ ++++++++++++++++++
+
+ * Prefetch bugfix.
+ * Added license to installed version.
+
+ 0.8.3 (2011-11-27)
+ ++++++++++++++++++
+
+ * Converted auth system to use simpler callable objects.
+ * New session parameter to API methods.
+ * Display full URL while logging.
+
0.8.2 (2011-11-19)
++++++++++++++++++
@@ -140,6 +236,7 @@ Description: Requests: HTTP for Humans
* OPTION method
* Async pool size throttling
* File uploads send real names
+ * Vendored in urllib3
0.7.6 (2011-11-07)
++++++++++++++++++
@@ -386,6 +483,7 @@ Classifier: Intended Audience :: Developers
Classifier: Natural Language :: English
Classifier: License :: OSI Approved :: ISC License (ISCL)
Classifier: Programming Language :: Python
-Classifier: Programming Language :: Python :: 2.5
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3.0
+Classifier: Programming Language :: Python :: 3.1
diff --git a/requests.egg-info/SOURCES.txt b/requests.egg-info/SOURCES.txt
index f4cad3c..804f541 100644
--- a/requests.egg-info/SOURCES.txt
+++ b/requests.egg-info/SOURCES.txt
@@ -1,6 +1,7 @@
HISTORY.rst
LICENSE
MANIFEST.in
+NOTICE
README.rst
setup.py
test_requests.py
@@ -8,6 +9,7 @@ requests/__init__.py
requests/api.py
requests/async.py
requests/auth.py
+requests/compat.py
requests/defaults.py
requests/exceptions.py
requests/hooks.py
@@ -19,6 +21,7 @@ requests/utils.py
requests.egg-info/PKG-INFO
requests.egg-info/SOURCES.txt
requests.egg-info/dependency_links.txt
+requests.egg-info/requires.txt
requests.egg-info/top_level.txt
requests/packages/__init__.py
requests/packages/oreos/__init__.py
@@ -32,4 +35,8 @@ requests/packages/urllib3/exceptions.py
requests/packages/urllib3/filepost.py
requests/packages/urllib3/poolmanager.py
requests/packages/urllib3/request.py
-requests/packages/urllib3/response.py \ No newline at end of file
+requests/packages/urllib3/response.py
+requests/packages/urllib3/packages/__init__.py
+requests/packages/urllib3/packages/six.py
+requests/packages/urllib3/packages/mimetools_choose_boundary/__init__.py
+requests/packages/urllib3/packages/ssl_match_hostname/__init__.py \ No newline at end of file
diff --git a/requests.egg-info/requires.txt b/requests.egg-info/requires.txt
new file mode 100644
index 0000000..a8c0e5c
--- /dev/null
+++ b/requests.egg-info/requires.txt
@@ -0,0 +1,2 @@
+certifi>=0.0.7
+chardet>=1.0.0 \ No newline at end of file
diff --git a/requests/__init__.py b/requests/__init__.py
index 9d2319a..48fb389 100644
--- a/requests/__init__.py
+++ b/requests/__init__.py
@@ -9,17 +9,18 @@
requests
~~~~~~~~
-:copyright: (c) 2011 by Kenneth Reitz.
+:copyright: (c) 2012 by Kenneth Reitz.
:license: ISC, see LICENSE for more details.
"""
__title__ = 'requests'
-__version__ = '0.8.2'
-__build__ = 0x000802
+__version__ = '0.10.1'
+__build__ = 0x001001
__author__ = 'Kenneth Reitz'
__license__ = 'ISC'
-__copyright__ = 'Copyright 2011 Kenneth Reitz'
+__copyright__ = 'Copyright 2012 Kenneth Reitz'
+
from . import utils
diff --git a/requests/api.py b/requests/api.py
index 9e0c96f..b7d4158 100644
--- a/requests/api.py
+++ b/requests/api.py
@@ -6,28 +6,14 @@ requests.api
This module implements the Requests API.
-:copyright: (c) 2011 by Kenneth Reitz.
+:copyright: (c) 2012 by Kenneth Reitz.
:license: ISC, see LICENSE for more details.
"""
-from .sessions import session
-
-
-def request(method, url,
- params=None,
- data=None,
- headers=None,
- cookies=None,
- files=None,
- auth=None,
- timeout=None,
- allow_redirects=False,
- proxies=None,
- hooks=None,
- return_response=True,
- prefetch=False,
- config=None):
+from . import sessions
+
+def request(method, url, **kwargs):
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
@@ -38,32 +24,19 @@ def request(method, url,
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
- :param auth: (optional) Auth typle to enable Basic/Digest/Custom HTTP Auth.
+ :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the request.
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param return_response: (optional) If False, an un-sent Request object will returned.
+ :param session: (optional) A :class:`Session` object to be used for the request.
:param config: (optional) A configuration dictionary.
+ :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
+ :param prefetch: (optional) if ``True``, the response content will be immediately downloaded.
"""
- s = session()
- return s.request(
- method=method,
- url=url,
- params=params,
- data=data,
- headers=headers,
- cookies=cookies,
- files=files,
- auth=auth,
- timeout=timeout,
- allow_redirects=allow_redirects,
- proxies=proxies,
- hooks=hooks,
- return_response=return_response,
- config=config,
- prefetch=prefetch
- )
+ s = kwargs.pop('session') if 'session' in kwargs else sessions.session()
+ return s.request(method=method, url=url, **kwargs)
diff --git a/requests/async.py b/requests/async.py
index 8bafb1e..9488447 100644
--- a/requests/async.py
+++ b/requests/async.py
@@ -36,20 +36,25 @@ def patched(f):
kwargs['return_response'] = False
kwargs['prefetch'] = True
+ config = kwargs.get('config', {})
+ config.update(safe_mode=True)
+
+ kwargs['config'] = config
+
return f(*args, **kwargs)
return wrapped
-def send(r, pools=None):
- """Sends a given Request object."""
+def send(r, pool=None):
+ """Sends the request object using the specified pool. If a pool isn't
+ specified this method blocks. Pools are useful because you can specify size
+ and can hence limit concurrency."""
- if pools:
- r._pools = pools
+ if pool != None:
+ return pool.spawn(r.send)
- r.send()
-
- return r.response
+ return gevent.spawn(r.send)
# Patched requests.api functions.
@@ -71,19 +76,13 @@ def map(requests, prefetch=True, size=None):
:param size: Specifies the number of requests to make at a time. If None, no throttling occurs.
"""
- if size:
- pool = Pool(size)
- pool.map(send, requests)
- pool.join()
- else:
- jobs = [gevent.spawn(send, r) for r in requests]
- gevent.joinall(jobs)
+ requests = list(requests)
+
+ pool = Pool(size) if size else None
+ jobs = [send(r, pool) for r in requests]
+ gevent.joinall(jobs)
if prefetch:
[r.response.content for r in requests]
- return [r.response for r in requests]
-
-
-
-
+ return [r.response for r in requests] \ No newline at end of file
diff --git a/requests/auth.py b/requests/auth.py
index aabeb86..183731b 100644
--- a/requests/auth.py
+++ b/requests/auth.py
@@ -7,35 +7,55 @@ requests.auth
This module contains the authentication handlers for Requests.
"""
+from __future__ import unicode_literals
+
import time
import hashlib
from base64 import b64encode
-from urlparse import urlparse
-
+from .compat import urlparse, str, bytes
from .utils import randombytes, parse_dict_header
-def http_basic(r, username, password):
- """Attaches HTTP Basic Authentication to the given Request object.
- Arguments should be considered non-positional.
- """
- username = str(username)
- password = str(password)
+def _basic_auth_str(username, password):
+ """Returns a Basic Auth string."""
+
+ return 'Basic ' + b64encode(("%s:%s" % (username, password)).encode('utf-8')).strip().decode('utf-8')
+
+
+class AuthBase(object):
+ """Base class that all auth implementations derive from"""
+
+ def __call__(self, r):
+ raise NotImplementedError('Auth hooks must be callable.')
- auth_s = b64encode('%s:%s' % (username, password))
- r.headers['Authorization'] = ('Basic %s' % auth_s)
- return r
+class HTTPBasicAuth(AuthBase):
+ """Attaches HTTP Basic Authentication to the given Request object."""
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
+
+ def __call__(self, r):
+ r.headers['Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
+
+
+class HTTPProxyAuth(HTTPBasicAuth):
+ """Attaches HTTP Proxy Authenetication to a given Request object."""
+ def __call__(self, r):
+ r.headers['Proxy-Authorization'] = _basic_auth_str(self.username, self.password)
+ return r
-def http_digest(r, username, password):
- """Attaches HTTP Digest Authentication to the given Request object.
- Arguments should be considered non-positional.
- """
+class HTTPDigestAuth(AuthBase):
+ """Attaches HTTP Digest Authentication to the given Request object."""
+ def __init__(self, username, password):
+ self.username = username
+ self.password = password
- def handle_401(r):
+ def handle_401(self, r):
"""Takes the given response and tries digest-auth, if needed."""
s_auth = r.headers.get('www-authenticate', '')
@@ -56,9 +76,17 @@ def http_digest(r, username, password):
algorithm = algorithm.upper()
# lambdas assume digest modules are imported at the top level
if algorithm == 'MD5':
- H = lambda x: hashlib.md5(x).hexdigest()
+ def h(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.md5(x).hexdigest()
+ H = h
elif algorithm == 'SHA':
- H = lambda x: hashlib.sha1(x).hexdigest()
+ def h(x):
+ if isinstance(x, str):
+ x = x.encode('utf-8')
+ return hashlib.sha1(x).hexdigest()
+ H = h
# XXX MD5-sess
KD = lambda s, d: H("%s:%s" % (s, d))
@@ -68,10 +96,12 @@ def http_digest(r, username, password):
# XXX not implemented yet
entdig = None
p_parsed = urlparse(r.request.url)
- path = p_parsed.path + p_parsed.query
+ path = p_parsed.path
+ if p_parsed.query:
+ path += '?' + p_parsed.query
- A1 = "%s:%s:%s" % (username, realm, password)
- A2 = "%s:%s" % (r.request.method, path)
+ A1 = '%s:%s:%s' % (self.username, realm, self.password)
+ A2 = '%s:%s' % (r.request.method, path)
if qop == 'auth':
if nonce == last_nonce:
@@ -81,10 +111,12 @@ def http_digest(r, username, password):
last_nonce = nonce
ncvalue = '%08x' % nonce_count
- cnonce = (hashlib.sha1("%s:%s:%s:%s" % (
- nonce_count, nonce, time.ctime(), randombytes(8)))
- .hexdigest()[:16]
- )
+ s = str(nonce_count).encode('utf-8')
+ s += nonce.encode('utf-8')
+ s += time.ctime().encode('utf-8')
+ s += randombytes(8)
+
+ cnonce = (hashlib.sha1(s).hexdigest()[:16])
noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
respdig = KD(H(A1), noncebit)
elif qop is None:
@@ -95,7 +127,7 @@ def http_digest(r, username, password):
# XXX should the partial digests be encoded too?
base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
- 'response="%s"' % (username, realm, nonce, path, respdig)
+ 'response="%s"' % (self.username, realm, nonce, path, respdig)
if opaque:
base += ', opaque="%s"' % opaque
if entdig:
@@ -104,7 +136,6 @@ def http_digest(r, username, password):
if qop:
base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
-
r.request.headers['Authorization'] = 'Digest %s' % (base)
r.request.send(anyway=True)
_r = r.request.response
@@ -114,33 +145,6 @@ def http_digest(r, username, password):
return r
- r.hooks['response'] = handle_401
- return r
-
-
-def dispatch(t):
- """Given an auth tuple, return an expanded version."""
-
- if not t:
- return t
- else:
- t = list(t)
-
- # Make sure they're passing in something.
- assert len(t) >= 2
-
- # If only two items are passed in, assume HTTPBasic.
- if (len(t) == 2):
- t.insert(0, 'basic')
-
- # Allow built-in string referenced auths.
- if isinstance(t[0], basestring):
- if t[0] in ('basic', 'forced_basic'):
- t[0] = http_basic
- elif t[0] in ('digest',):
- t[0] = http_digest
-
- # Return a custom callable.
- return (t[0], tuple(t[1:]))
-
-
+ def __call__(self, r):
+ r.register_hook('response', self.handle_401)
+ return r
diff --git a/requests/compat.py b/requests/compat.py
new file mode 100644
index 0000000..224bfd0
--- /dev/null
+++ b/requests/compat.py
@@ -0,0 +1,102 @@
+# -*- coding: utf-8 -*-
+
+"""
+pythoncompat
+"""
+
+
+import sys
+
+# -------
+# Pythons
+# -------
+
+# Syntax sugar.
+_ver = sys.version_info
+
+#: Python 2.x?
+is_py2 = (_ver[0] == 2)
+
+#: Python 3.x?
+is_py3 = (_ver[0] == 3)
+
+#: Python 3.0.x
+is_py30 = (is_py3 and _ver[1] == 0)
+
+#: Python 3.1.x
+is_py31 = (is_py3 and _ver[1] == 1)
+
+#: Python 3.2.x
+is_py32 = (is_py3 and _ver[1] == 2)
+
+#: Python 3.3.x
+is_py33 = (is_py3 and _ver[1] == 3)
+
+#: Python 3.4.x
+is_py34 = (is_py3 and _ver[1] == 4)
+
+#: Python 2.7.x
+is_py27 = (is_py2 and _ver[1] == 7)
+
+#: Python 2.6.x
+is_py26 = (is_py2 and _ver[1] == 6)
+
+#: Python 2.5.x
+is_py25 = (is_py2 and _ver[1] == 5)
+
+#: Python 2.4.x
+is_py24 = (is_py2 and _ver[1] == 4) # I'm assuming this is not by choice.
+
+
+# ---------
+# Platforms
+# ---------
+
+
+# Syntax sugar.
+_ver = sys.version.lower()
+
+is_pypy = ('pypy' in _ver)
+is_jython = ('jython' in _ver)
+is_ironpython = ('iron' in _ver)
+
+# Assume CPython, if nothing else.
+is_cpython = not any((is_pypy, is_jython, is_ironpython))
+
+# Windows-based system.
+is_windows = 'win32' in str(sys.platform).lower()
+
+# Standard Linux 2+ system.
+is_linux = ('linux' in str(sys.platform).lower())
+is_osx = ('darwin' in str(sys.platform).lower())
+is_hpux = ('hpux' in str(sys.platform).lower()) # Complete guess.
+is_solaris = ('solar==' in str(sys.platform).lower()) # Complete guess.
+
+
+# ---------
+# Specifics
+# ---------
+
+
+if is_py2:
+ from urllib import quote, unquote, urlencode
+ from urlparse import urlparse, urlunparse, urljoin, urlsplit
+ from urllib2 import parse_http_list
+ import cookielib
+ from .packages.oreos.monkeys import SimpleCookie
+ from StringIO import StringIO
+
+ str = unicode
+ bytes = str
+
+
+elif is_py3:
+ from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote
+ from urllib.request import parse_http_list
+ from http import cookiejar as cookielib
+ from http.cookies import SimpleCookie
+ from io import StringIO
+
+ str = str
+ bytes = bytes
+
diff --git a/requests/defaults.py b/requests/defaults.py
index 7a5a3fb..424d373 100644
--- a/requests/defaults.py
+++ b/requests/defaults.py
@@ -10,15 +10,13 @@ Configurations:
:base_headers: Default HTTP headers.
:verbose: Stream to write request logging to.
-:timeout: Seconds until request timeout.
-:max_redirects: Maximum njumber of redirects allowed within a request.
-:decode_unicode: Decode unicode responses automatically?
+:max_redirects: Maximum number of redirects allowed within a request.s
:keep_alive: Reuse HTTP Connections?
:max_retries: The number of times a request should be retried in the event of a connection failure.
+:danger_mode: If true, Requests will raise errors immediately.
:safe_mode: If true, Requests will catch all errors.
:pool_maxsize: The maximium size of an HTTP connection pool.
:pool_connections: The number of active HTTP connection pools to use.
-
"""
from . import __version__
@@ -34,9 +32,9 @@ defaults['base_headers'] = {
defaults['verbose'] = None
defaults['max_redirects'] = 30
-defaults['decode_unicode'] = True
defaults['pool_connections'] = 10
defaults['pool_maxsize'] = 10
defaults['max_retries'] = 0
+defaults['danger_mode'] = False
defaults['safe_mode'] = False
defaults['keep_alive'] = True
diff --git a/requests/exceptions.py b/requests/exceptions.py
index d20a95c..c7b98e6 100644
--- a/requests/exceptions.py
+++ b/requests/exceptions.py
@@ -13,10 +13,13 @@ class RequestException(Exception):
request."""
class HTTPError(RequestException):
- """An HTTP error occured."""
+ """An HTTP error occurred."""
class ConnectionError(RequestException):
- """A Connection error occured."""
+ """A Connection error occurred."""
+
+class SSLError(ConnectionError):
+ """An SSL error occurred."""
class Timeout(RequestException):
"""The request timed out."""
diff --git a/requests/hooks.py b/requests/hooks.py
index f9cf480..3560b89 100644
--- a/requests/hooks.py
+++ b/requests/hooks.py
@@ -22,7 +22,10 @@ Available hooks:
"""
-import warnings
+import traceback
+
+
+HOOKS = ('args', 'pre_request', 'post_request', 'response')
def dispatch_hook(key, hooks, hook_data):
@@ -31,10 +34,15 @@ def dispatch_hook(key, hooks, hook_data):
hooks = hooks or dict()
if key in hooks:
- try:
- return hooks.get(key).__call__(hook_data) or hook_data
+ hooks = hooks.get(key)
+
+ if hasattr(hooks, '__call__'):
+ hooks = [hooks]
- except Exception, why:
- warnings.warn(str(why))
+ for hook in hooks:
+ try:
+ hook_data = hook(hook_data) or hook_data
+ except Exception:
+ traceback.print_exc()
return hook_data
diff --git a/requests/models.py b/requests/models.py
index 0be3e89..c200896 100644
--- a/requests/models.py
+++ b/requests/models.py
@@ -7,28 +7,34 @@ requests.models
This module contains the primary objects that power Requests.
"""
-import urllib
-import zlib
-
-from urlparse import urlparse, urlunparse, urljoin, urlsplit
+import os
from datetime import datetime
-from .auth import dispatch as auth_dispatch
-from .hooks import dispatch_hook
+from .hooks import dispatch_hook, HOOKS
from .structures import CaseInsensitiveDict
from .status_codes import codes
-from .packages import oreos
+
+from .auth import HTTPBasicAuth, HTTPProxyAuth
+from .packages.urllib3.response import HTTPResponse
from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3 import connectionpool, poolmanager
from .packages.urllib3.filepost import encode_multipart_formdata
from .exceptions import (
- Timeout, URLRequired, TooManyRedirects, HTTPError, ConnectionError)
+ ConnectionError, HTTPError, RequestException, Timeout, TooManyRedirects,
+ URLRequired, SSLError)
from .utils import (
get_encoding_from_headers, stream_decode_response_unicode,
- decode_gzip, stream_decode_gzip, guess_filename, requote_path)
+ stream_decompress, guess_filename, requote_path, dict_from_string)
+
+from .compat import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, str, bytes, SimpleCookie, is_py3, is_py2
+# Import chardet if it is available.
+try:
+ import chardet
+except ImportError:
+ pass
REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
@@ -54,13 +60,20 @@ class Request(object):
proxies=None,
hooks=None,
config=None,
- _poolmanager=None):
+ _poolmanager=None,
+ verify=None,
+ session=None):
#: Float describes the timeout of the request.
# (Use socket.setdefaulttimeout() as fallback)
self.timeout = timeout
#: Request URL.
+
+ # if isinstance(url, str):
+ # url = url.encode('utf-8')
+ # print(dir(url))
+
self.url = url
#: Dictionary of HTTP Headers to attach to the :class:`Request <Request>`.
@@ -79,7 +92,6 @@ class Request(object):
#: Dictionary or byte of querystring data to attach to the
#: :class:`Request <Request>`.
self.params = None
- self.params = dict(params or [])
#: True if :class:`Request <Request>` is part of a redirect chain (disables history
#: and HTTPError storage).
@@ -98,9 +110,8 @@ class Request(object):
#: content and metadata of HTTP Response, once :attr:`sent <send>`.
self.response = Response()
- #: Authentication tuple to attach to :class:`Request <Request>`.
- self._auth = auth
- self.auth = auth_dispatch(auth)
+ #: Authentication tuple or object to attach to :class:`Request <Request>`.
+ self.auth = auth
#: CookieJar to attach to :class:`Request <Request>`.
self.cookies = dict(cookies or [])
@@ -112,17 +123,29 @@ class Request(object):
self.sent = False
#: Event-handling hooks.
- self.hooks = hooks
+ self.hooks = {}
+
+ for event in HOOKS:
+ self.hooks[event] = []
+
+ hooks = hooks or {}
+
+ for (k, v) in list(hooks.items()):
+ self.register_hook(event=k, hook=v)
#: Session.
- self.session = None
+ self.session = session
+
+ #: SSL Verification.
+ self.verify = verify
if headers:
headers = CaseInsensitiveDict(self.headers)
else:
headers = CaseInsensitiveDict()
- for (k, v) in self.config.get('base_headers', {}).items():
+ # Add configured base headers.
+ for (k, v) in list(self.config.get('base_headers', {}).items()):
if k not in headers:
headers[k] = v
@@ -138,7 +161,7 @@ class Request(object):
return '<Request [%s]>' % (self.method)
- def _build_response(self, resp, is_error=False):
+ def _build_response(self, resp):
"""Build internal :class:`Response <Response>` object
from given response.
"""
@@ -152,7 +175,7 @@ class Request(object):
if resp:
- # Fallback to None if there's no staus_code, for whatever reason.
+ # Fallback to None if there's no status_code, for whatever reason.
response.status_code = getattr(resp, 'status', None)
# Make headers case-insensitive.
@@ -167,17 +190,16 @@ class Request(object):
# Add new cookies from the server.
if 'set-cookie' in response.headers:
cookie_header = response.headers['set-cookie']
- cookies = oreos.dict_from_string(cookie_header)
+ cookies = dict_from_string(cookie_header)
# Save cookies in Response.
response.cookies = cookies
- # Save original resopnse for later.
- response.raw = resp
-
- if is_error:
- response.error = resp
+ # No exceptions were harmed in the making of this request.
+ response.error = getattr(resp, 'error', None)
+ # Save original response for later.
+ response.raw = resp
response.url = self.full_url
return response
@@ -231,13 +253,15 @@ class Request(object):
files=self.files,
method=method,
params=self.session.params,
- auth=self._auth,
+ auth=self.auth,
cookies=cookies,
redirect=True,
config=self.config,
timeout=self.timeout,
_poolmanager=self._poolmanager,
proxies = self.proxies,
+ verify = self.verify,
+ session = self.session
)
request.send()
@@ -264,16 +288,17 @@ class Request(object):
returns it twice.
"""
- if hasattr(data, '__iter__'):
+ if hasattr(data, '__iter__') and not isinstance(data, str):
data = dict(data)
+
if hasattr(data, 'items'):
result = []
- for k, vs in data.items():
+ for k, vs in list(data.items()):
for v in isinstance(vs, list) and vs or [vs]:
- result.append((k.encode('utf-8') if isinstance(k, unicode) else k,
- v.encode('utf-8') if isinstance(v, unicode) else v))
- return result, urllib.urlencode(result, doseq=True)
+ result.append((k.encode('utf-8') if isinstance(k, str) else k,
+ v.encode('utf-8') if isinstance(v, str) else v))
+ return result, urlencode(result, doseq=True)
else:
return data, data
@@ -284,20 +309,27 @@ class Request(object):
if not self.url:
raise URLRequired()
+ url = self.url
+
# Support for unicode domain names and paths.
- scheme, netloc, path, params, query, fragment = urlparse(self.url)
+ scheme, netloc, path, params, query, fragment = urlparse(url)
+
if not scheme:
- raise ValueError()
+ raise ValueError("Invalid URL %r: No schema supplied" % url)
+
+ netloc = netloc.encode('idna').decode('utf-8')
- netloc = netloc.encode('idna')
+ if is_py2:
+ if isinstance(path, str):
+ path = path.encode('utf-8')
- if isinstance(path, unicode):
- path = path.encode('utf-8')
+ path = requote_path(path)
- path = requote_path(path)
+ # print([ scheme, netloc, path, params, query, fragment ])
+ # print('---------------------')
- url = str(urlunparse([ scheme, netloc, path, params, query, fragment ]))
+ url = (urlunparse([ scheme, netloc, path, params, query, fragment ]))
if self._enc_params:
if urlparse(url).query:
@@ -322,6 +354,10 @@ class Request(object):
path = p.path
if not path:
path = '/'
+
+ # if is_py3:
+ path = quote(path.encode('utf-8'))
+
url.append(path)
query = p.query
@@ -329,9 +365,16 @@ class Request(object):
url.append('?')
url.append(query)
+ # print(url)
+
return ''.join(url)
+ def register_hook(self, event, hook):
+ """Properly register a hook."""
+
+ return self.hooks[event].append(hook)
+
def send(self, anyway=False, prefetch=False):
"""Sends the request. Returns True of successful, false if not.
@@ -344,29 +387,29 @@ class Request(object):
already been sent.
"""
+ # Build the URL
+ url = self.full_url
+
# Logging
if self.config.get('verbose'):
self.config.get('verbose').write('%s %s %s\n' % (
- datetime.now().isoformat(), self.method, self.url
+ datetime.now().isoformat(), self.method, url
))
- # Build the URL
- url = self.full_url
-
# Nottin' on you.
body = None
content_type = None
# Multi-part file uploads.
if self.files:
- if not isinstance(self.data, basestring):
+ if not isinstance(self.data, str):
try:
fields = self.data.copy()
except AttributeError:
fields = dict(self.data)
- for (k, v) in self.files.items():
+ for (k, v) in list(self.files.items()):
# support for explicit filename
if isinstance(v, (tuple, list)):
fn, fp = v
@@ -383,7 +426,7 @@ class Request(object):
if self.data:
body = self._enc_data
- if isinstance(self.data, basestring):
+ if isinstance(self.data, str):
content_type = None
else:
content_type = 'application/x-www-form-urlencoded'
@@ -392,12 +435,13 @@ class Request(object):
if (content_type) and (not 'content-type' in self.headers):
self.headers['Content-Type'] = content_type
-
if self.auth:
- auth_func, auth_args = self.auth
+ if isinstance(self.auth, tuple) and len(self.auth) == 2:
+ # special-case basic HTTP auth
+ self.auth = HTTPBasicAuth(*self.auth)
# Allow auth to make its changes.
- r = auth_func(self, *auth_args)
+ r = self.auth(self)
# Update self to reflect the auth changes.
self.__dict__.update(r.__dict__)
@@ -407,6 +451,12 @@ class Request(object):
if proxy:
conn = poolmanager.proxy_from_url(proxy)
+ _proxy = urlparse(proxy)
+ if '@' in _proxy.netloc:
+ auth, url = _proxy.netloc.split('@', 1)
+ self.proxy_auth = HTTPProxyAuth(*auth.split(':', 1))
+ r = self.proxy_auth(self)
+ self.__dict__.update(r.__dict__)
else:
# Check to see if keep_alive is allowed.
if self.config.get('keep_alive'):
@@ -414,6 +464,33 @@ class Request(object):
else:
conn = connectionpool.connection_from_url(url)
+ if url.startswith('https') and self.verify:
+
+ cert_loc = None
+
+ # Allow self-specified cert location.
+ if self.verify is not True:
+ cert_loc = self.verify
+
+
+ # Look for configuration.
+ if not cert_loc:
+ cert_loc = os.environ.get('REQUESTS_CA_BUNDLE')
+
+ # Curl compatiblity.
+ if not cert_loc:
+ cert_loc = os.environ.get('CURL_CA_BUNDLE')
+
+ # Use the awesome certifi list.
+ if not cert_loc:
+ cert_loc = __import__('certifi').where()
+
+ conn.cert_reqs = 'CERT_REQUIRED'
+ conn.ca_certs = cert_loc
+ else:
+ conn.cert_reqs = 'CERT_NONE'
+ conn.ca_certs = None
+
if not self.sent or anyway:
if self.cookies:
@@ -422,8 +499,8 @@ class Request(object):
if 'cookie' not in self.headers:
# Simple cookie with our dict.
- c = oreos.monkeys.SimpleCookie()
- for (k, v) in self.cookies.items():
+ c = SimpleCookie()
+ for (k, v) in list(self.cookies.items()):
c[k] = v
# Turn it into a header.
@@ -433,31 +510,43 @@ class Request(object):
self.headers['Cookie'] = cookie_header
try:
- # Send the request.
- r = conn.urlopen(
- method=self.method,
- url=self.path_url,
- body=body,
- headers=self.headers,
- redirect=False,
- assert_same_host=False,
- preload_content=prefetch,
- decode_content=False,
- retries=self.config.get('max_retries', 0),
- timeout=self.timeout,
- )
-
-
- except MaxRetryError, e:
- if not self.config.get('safe_mode', False):
+ # The inner try .. except re-raises certain exceptions as
+ # internal exception types; the outer suppresses exceptions
+ # when safe mode is set.
+ try:
+ # Send the request.
+ r = conn.urlopen(
+ method=self.method,
+ url=self.path_url,
+ body=body,
+ headers=self.headers,
+ redirect=False,
+ assert_same_host=False,
+ preload_content=False,
+ decode_content=True,
+ retries=self.config.get('max_retries', 0),
+ timeout=self.timeout,
+ )
+ self.sent = True
+
+ except MaxRetryError as e:
raise ConnectionError(e)
- else:
- r = None
- except (_SSLError, _HTTPError), e:
- if not self.config.get('safe_mode', False):
+ except (_SSLError, _HTTPError) as e:
+ if self.verify and isinstance(e, _SSLError):
+ raise SSLError(e)
+
raise Timeout('Request timed out.')
+ except RequestException as e:
+ if self.config.get('safe_mode', False):
+ # In safe mode, catch the exception and attach it to
+ # a blank urllib3.HTTPResponse object.
+ r = HTTPResponse()
+ r.error = e
+ else:
+ raise
+
self._build_response(r)
# Response manipulation hook.
@@ -469,7 +558,11 @@ class Request(object):
# If prefetch is True, mark content as consumed.
if prefetch:
- self.response._content_consumed = True
+ # Save the response.
+ self.response.content
+
+ if self.config.get('danger_mode'):
+ self.response.raise_for_status()
return self.sent
@@ -524,6 +617,10 @@ class Response(object):
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
+ def __bool__(self):
+ """Returns true if :attr:`status_code` is 'OK'."""
+ return self.ok
+
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
return self.ok
@@ -537,7 +634,7 @@ class Response(object):
return True
- def iter_content(self, chunk_size=10 * 1024, decode_unicode=None):
+ def iter_content(self, chunk_size=10 * 1024, decode_unicode=False):
"""Iterates over the response data. This avoids reading the content
at once into memory for large responses. The chunk size is the number
of bytes it should read into memory. This is not necessarily the
@@ -556,13 +653,40 @@ class Response(object):
yield chunk
self._content_consumed = True
- gen = generate()
+ def generate_chunked():
+ resp = self.raw._original_response
+ fp = resp.fp
+ if resp.chunk_left is not None:
+ pending_bytes = resp.chunk_left
+ while pending_bytes:
+ chunk = fp.read(min(chunk_size, pending_bytes))
+ pending_bytes-=len(chunk)
+ yield chunk
+ fp.read(2) # throw away crlf
+ while 1:
+ #XXX correct line size? (httplib has 64kb, seems insane)
+ pending_bytes = fp.readline(40).strip()
+ pending_bytes = int(pending_bytes, 16)
+ if pending_bytes == 0:
+ break
+ while pending_bytes:
+ chunk = fp.read(min(chunk_size, pending_bytes))
+ pending_bytes-=len(chunk)
+ yield chunk
+ fp.read(2) # throw away crlf
+ self._content_consumed = True
+ fp.close()
- if 'gzip' in self.headers.get('content-encoding', ''):
- gen = stream_decode_gzip(gen)
- if decode_unicode is None:
- decode_unicode = self.config.get('decode_unicode')
+ if getattr(getattr(self.raw, '_original_response', None), 'chunked', False):
+ gen = generate_chunked()
+ else:
+ gen = generate()
+
+ if 'gzip' in self.headers.get('content-encoding', ''):
+ gen = stream_decompress(gen, mode='gzip')
+ elif 'deflate' in self.headers.get('content-encoding', ''):
+ gen = stream_decompress(gen, mode='deflate')
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
@@ -570,11 +694,44 @@ class Response(object):
return gen
+ def iter_lines(self, chunk_size=10 * 1024, decode_unicode=None):
+ """Iterates over the response data, one line at a time. This
+ avoids reading the content at once into memory for large
+ responses.
+ """
+
+ #TODO: why rstrip by default
+ pending = None
+
+ for chunk in self.iter_content(chunk_size, decode_unicode=decode_unicode):
+
+ if pending is not None:
+ chunk = pending + chunk
+ lines = chunk.splitlines(True)
+
+ for line in lines[:-1]:
+ yield line.rstrip()
+
+ # Save the last part of the chunk for next iteration, to keep full line together
+ # lines may be empty for the last chunk of a chunked response
+
+ if lines:
+ pending = lines[-1]
+ #if pending is a complete line, give it baack
+ if pending[-1] == '\n':
+ yield pending.rstrip()
+ pending = None
+ else:
+ pending = None
+
+ # Yield the last line
+ if pending is not None:
+ yield pending.rstrip()
+
+
@property
def content(self):
- """Content of the response, in bytes or unicode
- (if available).
- """
+ """Content of the response, in bytes."""
if self._content is None:
# Read the contents.
@@ -587,33 +744,45 @@ class Response(object):
except AttributeError:
self._content = None
- content = self._content
+ self._content_consumed = True
+ return self._content
- # Decode GZip'd content.
- if 'gzip' in self.headers.get('content-encoding', ''):
- try:
- content = decode_gzip(self._content)
- except zlib.error:
- pass
- # Decode unicode content.
- if self.config.get('decode_unicode'):
+ @property
+ def text(self):
+ """Content of the response, in unicode.
- # Try charset from content-type
+ if Response.encoding is None and chardet module is available, encoding
+ will be guessed.
+ """
- if self.encoding:
- try:
- content = unicode(content, self.encoding)
- except UnicodeError:
- pass
+ # Try charset from content-type
+ content = None
+ encoding = self.encoding
- # Fall back:
+ # Fallback to auto-detected encoding if chardet is available.
+ if self.encoding is None:
try:
- content = unicode(content, self.encoding, errors='replace')
- except TypeError:
+ detected = chardet.detect(self.content) or {}
+ encoding = detected.get('encoding')
+
+ # Trust that chardet isn't available or something went terribly wrong.
+ except Exception:
+ pass
+
+ # Decode unicode from given encoding.
+ try:
+ content = str(self.content, encoding)
+ except (UnicodeError, TypeError):
+ pass
+
+ # Try to fall back:
+ if not content:
+ try:
+ content = str(content, encoding, errors='replace')
+ except (UnicodeError, TypeError):
pass
- self._content_consumed = True
return content
diff --git a/requests/packages/oreos/monkeys.py b/requests/packages/oreos/monkeys.py
index 6be3074..2269e30 100644
--- a/requests/packages/oreos/monkeys.py
+++ b/requests/packages/oreos/monkeys.py
@@ -318,7 +318,7 @@ _Translator = {
'\375' : '\\375', '\376' : '\\376', '\377' : '\\377'
}
-_idmap = ''.join(chr(x) for x in xrange(256))
+_idmap = ''.join(chr(x) for x in range(256))
def _quote(str, LegalChars=_LegalChars,
idmap=_idmap, translate=string.translate):
diff --git a/requests/packages/urllib3/__init__.py b/requests/packages/urllib3/__init__.py
index 20b1fb4..5f70c56 100644
--- a/requests/packages/urllib3/__init__.py
+++ b/requests/packages/urllib3/__init__.py
@@ -1,5 +1,5 @@
# urllib3/__init__.py
-# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -10,7 +10,7 @@ urllib3 - Thread-safe connection pooling and re-using.
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
-__version__ = '1.0.2'
+__version__ = '1.1'
from .connectionpool import (
diff --git a/requests/packages/urllib3/_collections.py b/requests/packages/urllib3/_collections.py
index 00b2cd5..3cef081 100644
--- a/requests/packages/urllib3/_collections.py
+++ b/requests/packages/urllib3/_collections.py
@@ -1,5 +1,5 @@
# urllib3/_collections.py
-# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
diff --git a/requests/packages/urllib3/connectionpool.py b/requests/packages/urllib3/connectionpool.py
index 8b10dc7..52b1802 100644
--- a/requests/packages/urllib3/connectionpool.py
+++ b/requests/packages/urllib3/connectionpool.py
@@ -1,5 +1,5 @@
# urllib3/connectionpool.py
-# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -7,14 +7,27 @@
import logging
import socket
-
-from httplib import HTTPConnection, HTTPSConnection, HTTPException
-from Queue import Queue, Empty, Full
-from select import select
from socket import error as SocketError, timeout as SocketTimeout
-
try:
+ from select import poll, POLLIN
+except ImportError: # Doesn't exist on OSX and other platforms
+ from select import select
+ poll = False
+
+try: # Python 3
+ from http.client import HTTPConnection, HTTPSConnection, HTTPException
+ from http.client import HTTP_PORT, HTTPS_PORT
+except ImportError:
+ from httplib import HTTPConnection, HTTPSConnection, HTTPException
+ from httplib import HTTP_PORT, HTTPS_PORT
+
+try: # Python 3
+ from queue import Queue, Empty, Full
+except ImportError:
+ from Queue import Queue, Empty, Full
+
+try: # Compiled with SSL?
import ssl
BaseSSLError = ssl.SSLError
except ImportError:
@@ -22,21 +35,29 @@ except ImportError:
BaseSSLError = None
+from .packages.ssl_match_hostname import match_hostname, CertificateError
from .request import RequestMethods
from .response import HTTPResponse
-from .exceptions import (
- SSLError,
+from .exceptions import (SSLError,
MaxRetryError,
TimeoutError,
HostChangedError,
EmptyPoolError,
)
+from .packages.ssl_match_hostname import match_hostname, CertificateError
+from .packages import six
+
+xrange = six.moves.xrange
log = logging.getLogger(__name__)
_Default = object()
+port_by_scheme = {
+ 'http': HTTP_PORT,
+ 'https': HTTPS_PORT,
+}
## Connection objects (extension of httplib)
@@ -70,7 +91,8 @@ class VerifiedHTTPSConnection(HTTPSConnection):
self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
cert_reqs=self.cert_reqs,
ca_certs=self.ca_certs)
-
+ if self.ca_certs:
+ match_hostname(self.sock.getpeercert(), self.host)
## Pool objects
@@ -79,7 +101,16 @@ class ConnectionPool(object):
Base class for all connection pools, such as
:class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
"""
- pass
+
+ scheme = None
+
+ def __init__(self, host, port=None):
+ self.host = host
+ self.port = port
+
+ def __str__(self):
+ return '%s(host=%r, port=%r)' % (type(self).__name__,
+ self.host, self.port)
class HTTPConnectionPool(ConnectionPool, RequestMethods):
@@ -167,14 +198,14 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
conn = self.pool.get(block=self.block, timeout=timeout)
# If this is a persistent connection, check if it got disconnected
- if conn and conn.sock and select([conn.sock], [], [], 0.0)[0]:
- # Either data is buffered (bad), or the connection is dropped.
+ if conn and conn.sock and is_connection_dropped(conn):
log.info("Resetting dropped connection: %s" % self.host)
conn.close()
except Empty:
if self.block:
- raise EmptyPoolError("Pool reached maximum size and no more "
+ raise EmptyPoolError(self,
+ "Pool reached maximum size and no more "
"connections are allowed.")
pass # Oh well, we'll create a new connection then
@@ -210,6 +241,8 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if timeout is _Default:
timeout = self.timeout
+ conn.timeout = timeout # This only does anything in Py26+
+
conn.request(method, url, **httplib_request_kw)
conn.sock.settimeout(timeout)
httplib_response = conn.getresponse()
@@ -225,11 +258,17 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
def is_same_host(self, url):
"""
Check if the given ``url`` is a member of the same host as this
- conncetion pool.
+ connection pool.
"""
# TODO: Add optional support for socket.gethostbyname checking.
+ scheme, host, port = get_host(url)
+
+ if self.port and not port:
+ # Use explicit default port for comparison when none is given.
+ port = port_by_scheme.get(scheme)
+
return (url.startswith('/') or
- get_host(url) == (self.scheme, self.host, self.port))
+ (scheme, host, port) == (self.scheme, self.host, self.port))
def urlopen(self, method, url, body=None, headers=None, retries=3,
redirect=True, assert_same_host=True, timeout=_Default,
@@ -244,6 +283,13 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
More commonly, it's appropriate to use a convenience method provided
by :class:`.RequestMethods`, such as :meth:`.request`.
+ .. note::
+
+ `release_conn` will only behave as expected if
+ `preload_content=False` because we want to make
+ `preload_content=False` the default behaviour someday soon without
+ breaking backwards compatibility.
+
:param method:
HTTP request method (such as GET, POST, PUT, etc.)
@@ -279,10 +325,12 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
:param release_conn:
If False, then the urlopen call will not release the connection
- back into the pool once a response is received. This is useful if
- you're not preloading the response's content immediately. You will
- need to call ``r.release_conn()`` on the response ``r`` to return
- the connection back into the pool. If None, it takes the value of
+ back into the pool once a response is received (but will release if
+ you read the entire contents of the response such as when
+ `preload_content=True`). This is useful if you're not preloading
+ the response's content immediately. You will need to call
+ ``r.release_conn()`` on the response ``r`` to return the connection
+ back into the pool. If None, it takes the value of
``response_kw.get('preload_content', True)``.
:param \**response_kw:
@@ -293,7 +341,10 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
headers = self.headers
if retries < 0:
- raise MaxRetryError("Max retries exceeded for url: %s" % url)
+ raise MaxRetryError(self, url)
+
+ if timeout is _Default:
+ timeout = self.timeout
if release_conn is None:
release_conn = response_kw.get('preload_content', True)
@@ -304,8 +355,7 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if self.port:
host = "%s:%d" % (host, self.port)
- raise HostChangedError("Connection pool with host '%s' tried to "
- "open a foreign host: %s" % (host, url))
+ raise HostChangedError(self, url, retries - 1)
conn = None
@@ -336,18 +386,29 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
# ``response.release_conn()`` is called (implicitly by
# ``response.read()``)
- except (SocketTimeout, Empty), e:
- # Timed out either by socket or queue
- raise TimeoutError("Request timed out after %s seconds" %
- self.timeout)
+ except Empty as e:
+ # Timed out by queue
+ raise TimeoutError(self, "Request timed out. (pool_timeout=%s)" %
+ pool_timeout)
+
+ except SocketTimeout as e:
+ # Timed out by socket
+ raise TimeoutError(self, "Request timed out. (timeout=%s)" %
+ timeout)
- except (BaseSSLError), e:
+ except BaseSSLError as e:
# SSL certificate error
raise SSLError(e)
- except (HTTPException, SocketError), e:
+ except CertificateError as e:
+ # Name mismatch
+ raise SSLError(e)
+
+ except (HTTPException, SocketError) as e:
# Connection broken, discard. It will be replaced next _get_conn().
conn = None
+ # This is necessary so we can access e below
+ err = e
finally:
if conn and release_conn:
@@ -356,19 +417,16 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
if not conn:
log.warn("Retrying (%d attempts remain) after connection "
- "broken by '%r': %s" % (retries, e, url))
+ "broken by '%r': %s" % (retries, err, url))
return self.urlopen(method, url, body, headers, retries - 1,
redirect, assert_same_host) # Try again
- # Handle redirection
- if (redirect and
- response.status in [301, 302, 303, 307] and
- 'location' in response.headers): # Redirect, retry
- log.info("Redirecting %s -> %s" %
- (url, response.headers.get('location')))
- return self.urlopen(method, response.headers.get('location'), body,
- headers, retries - 1, redirect,
- assert_same_host)
+ # Handle redirect?
+ redirect_location = redirect and response.get_redirect_location()
+ if redirect_location:
+ log.info("Redirecting %s -> %s" % (url, redirect_location))
+ return self.urlopen(method, redirect_location, body, headers,
+ retries - 1, redirect, assert_same_host)
return response
@@ -488,10 +546,12 @@ def get_host(url):
# simplified for our needs.
port = None
scheme = 'http'
- if '//' in url:
+ if '://' in url:
scheme, url = url.split('://', 1)
if '/' in url:
url, _path = url.split('/', 1)
+ if '@' in url:
+ _auth, url = url.split('@', 1)
if ':' in url:
url, port = url.split(':', 1)
port = int(port)
@@ -523,3 +583,22 @@ def connection_from_url(url, **kw):
return HTTPSConnectionPool(host, port=port, **kw)
else:
return HTTPConnectionPool(host, port=port, **kw)
+
+
+def is_connection_dropped(conn):
+ """
+ Returns True if the connection is dropped and should be closed.
+
+ :param conn:
+ ``HTTPConnection`` object.
+ """
+ if not poll:
+ return select([conn.sock], [], [], 0.0)[0]
+
+ # This version is better on platforms that support it.
+ p = poll()
+ p.register(conn.sock, POLLIN)
+ for (fno, ev) in p.poll(0.0):
+ if fno == conn.sock.fileno():
+ # Either data is buffered (bad), or the connection is dropped.
+ return True
diff --git a/requests/packages/urllib3/exceptions.py b/requests/packages/urllib3/exceptions.py
index 69f459b..0bffeb4 100644
--- a/requests/packages/urllib3/exceptions.py
+++ b/requests/packages/urllib3/exceptions.py
@@ -1,35 +1,54 @@
# urllib3/exceptions.py
-# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-## Exceptions
+## Base Exceptions
class HTTPError(Exception):
"Base exception used by this module."
pass
-class SSLError(Exception):
+class PoolError(HTTPError):
+ "Base exception for errors caused within a pool."
+ def __init__(self, pool, message):
+ self.pool = pool
+ HTTPError.__init__(self, "%s: %s" % (pool, message))
+
+
+class SSLError(HTTPError):
"Raised when SSL certificate fails in an HTTPS connection."
pass
-class MaxRetryError(HTTPError):
- "Raised when the maximum number of retries is exceeded."
- pass
+## Leaf Exceptions
+class MaxRetryError(PoolError):
+ "Raised when the maximum number of retries is exceeded."
+ def __init__(self, pool, url):
+ PoolError.__init__(self, pool,
+ "Max retries exceeded with url: %s" % url)
-class TimeoutError(HTTPError):
- "Raised when a socket timeout occurs."
- pass
+ self.url = url
-class HostChangedError(HTTPError):
+class HostChangedError(PoolError):
"Raised when an existing pool gets a request for a foreign host."
+ def __init__(self, pool, url, retries=3):
+ PoolError.__init__(self, pool,
+ "Tried to open a foreign host with url: %s" % url)
+
+ self.url = url
+ self.retries = retries
+
+
+class TimeoutError(PoolError):
+ "Raised when a socket timeout occurs."
pass
-class EmptyPoolError(HTTPError):
+
+class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
diff --git a/requests/packages/urllib3/filepost.py b/requests/packages/urllib3/filepost.py
index 2ffea8b..e1ec8af 100644
--- a/requests/packages/urllib3/filepost.py
+++ b/requests/packages/urllib3/filepost.py
@@ -1,18 +1,21 @@
# urllib3/filepost.py
-# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
import codecs
-import mimetools
import mimetypes
try:
- from cStringIO import StringIO
+ from mimetools import choose_boundary
except ImportError:
- from StringIO import StringIO # pylint: disable-msg=W0404
+ from .packages.mimetools_choose_boundary import choose_boundary
+from io import BytesIO
+
+from .packages import six
+from .packages.six import b
writer = codecs.lookup('utf-8')[3]
@@ -35,37 +38,37 @@ def encode_multipart_formdata(fields, boundary=None):
If not specified, then a random boundary will be generated using
:func:`mimetools.choose_boundary`.
"""
- body = StringIO()
+ body = BytesIO()
if boundary is None:
- boundary = mimetools.choose_boundary()
+ boundary = choose_boundary()
- for fieldname, value in fields.iteritems():
- body.write('--%s\r\n' % (boundary))
+ for fieldname, value in six.iteritems(fields):
+ body.write(b('--%s\r\n' % (boundary)))
if isinstance(value, tuple):
filename, data = value
writer(body).write('Content-Disposition: form-data; name="%s"; '
'filename="%s"\r\n' % (fieldname, filename))
- body.write('Content-Type: %s\r\n\r\n' %
- (get_content_type(filename)))
+ body.write(b('Content-Type: %s\r\n\r\n' %
+ (get_content_type(filename))))
else:
data = value
writer(body).write('Content-Disposition: form-data; name="%s"\r\n'
% (fieldname))
- body.write('Content-Type: text/plain\r\n\r\n')
+ body.write(b'Content-Type: text/plain\r\n\r\n')
if isinstance(data, int):
data = str(data) # Backwards compatibility
- if isinstance(data, unicode):
+ if isinstance(data, six.text_type):
writer(body).write(data)
else:
body.write(data)
- body.write('\r\n')
+ body.write(b'\r\n')
- body.write('--%s--\r\n' % (boundary))
+ body.write(b('--%s--\r\n' % (boundary)))
- content_type = 'multipart/form-data; boundary=%s' % boundary
+ content_type = b('multipart/form-data; boundary=%s' % boundary)
return body.getvalue(), content_type
diff --git a/requests/packages/urllib3/packages/__init__.py b/requests/packages/urllib3/packages/__init__.py
new file mode 100644
index 0000000..37e8351
--- /dev/null
+++ b/requests/packages/urllib3/packages/__init__.py
@@ -0,0 +1,4 @@
+from __future__ import absolute_import
+
+from . import ssl_match_hostname
+
diff --git a/requests/packages/urllib3/packages/mimetools_choose_boundary/__init__.py b/requests/packages/urllib3/packages/mimetools_choose_boundary/__init__.py
new file mode 100644
index 0000000..a0109ab
--- /dev/null
+++ b/requests/packages/urllib3/packages/mimetools_choose_boundary/__init__.py
@@ -0,0 +1,47 @@
+"""The function mimetools.choose_boundary() from Python 2.7, which seems to
+have disappeared in Python 3 (although email.generator._make_boundary() might
+work as a replacement?).
+
+Tweaked to use lock from threading rather than thread.
+"""
+import os
+from threading import Lock
+_counter_lock = Lock()
+
+_counter = 0
+def _get_next_counter():
+ global _counter
+ with _counter_lock:
+ _counter += 1
+ return _counter
+
+_prefix = None
+
+def choose_boundary():
+ """Return a string usable as a multipart boundary.
+
+ The string chosen is unique within a single program run, and
+ incorporates the user id (if available), process id (if available),
+ and current time. So it's very unlikely the returned string appears
+ in message text, but there's no guarantee.
+
+ The boundary contains dots so you have to quote it in the header."""
+
+ global _prefix
+ import time
+ if _prefix is None:
+ import socket
+ try:
+ hostid = socket.gethostbyname(socket.gethostname())
+ except socket.gaierror:
+ hostid = '127.0.0.1'
+ try:
+ uid = repr(os.getuid())
+ except AttributeError:
+ uid = '1'
+ try:
+ pid = repr(os.getpid())
+ except AttributeError:
+ pid = '1'
+ _prefix = hostid + '.' + uid + '.' + pid
+ return "%s.%.3f.%d" % (_prefix, time.time(), _get_next_counter())
diff --git a/requests/packages/urllib3/packages/six.py b/requests/packages/urllib3/packages/six.py
new file mode 100644
index 0000000..a64f6fb
--- /dev/null
+++ b/requests/packages/urllib3/packages/six.py
@@ -0,0 +1,372 @@
+"""Utilities for writing code that runs on Python 2 and 3"""
+
+#Copyright (c) 2010-2011 Benjamin Peterson
+
+#Permission is hereby granted, free of charge, to any person obtaining a copy of
+#this software and associated documentation files (the "Software"), to deal in
+#the Software without restriction, including without limitation the rights to
+#use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
+#the Software, and to permit persons to whom the Software is furnished to do so,
+#subject to the following conditions:
+
+#The above copyright notice and this permission notice shall be included in all
+#copies or substantial portions of the Software.
+
+#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
+#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
+#FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
+#COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
+#IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
+#CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+import operator
+import sys
+import types
+
+__author__ = "Benjamin Peterson <benjamin@python.org>"
+__version__ = "1.1.0"
+
+
+# True if we are running on Python 3.
+PY3 = sys.version_info[0] == 3
+
+if PY3:
+ string_types = str,
+ integer_types = int,
+ class_types = type,
+ text_type = str
+ binary_type = bytes
+
+ MAXSIZE = sys.maxsize
+else:
+ string_types = basestring,
+ integer_types = (int, long)
+ class_types = (type, types.ClassType)
+ text_type = unicode
+ binary_type = str
+
+ # It's possible to have sizeof(long) != sizeof(Py_ssize_t).
+ class X(object):
+ def __len__(self):
+ return 1 << 31
+ try:
+ len(X())
+ except OverflowError:
+ # 32-bit
+ MAXSIZE = int((1 << 31) - 1)
+ else:
+ # 64-bit
+ MAXSIZE = int((1 << 63) - 1)
+ del X
+
+
+def _add_doc(func, doc):
+ """Add documentation to a function."""
+ func.__doc__ = doc
+
+
+def _import_module(name):
+ """Import module, returning the module after the last dot."""
+ __import__(name)
+ return sys.modules[name]
+
+
+class _LazyDescr(object):
+
+ def __init__(self, name):
+ self.name = name
+
+ def __get__(self, obj, tp):
+ result = self._resolve()
+ setattr(obj, self.name, result)
+ # This is a bit ugly, but it avoids running this again.
+ delattr(tp, self.name)
+ return result
+
+
+class MovedModule(_LazyDescr):
+
+ def __init__(self, name, old, new=None):
+ super(MovedModule, self).__init__(name)
+ if PY3:
+ if new is None:
+ new = name
+ self.mod = new
+ else:
+ self.mod = old
+
+ def _resolve(self):
+ return _import_module(self.mod)
+
+
+class MovedAttribute(_LazyDescr):
+
+ def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None):
+ super(MovedAttribute, self).__init__(name)
+ if PY3:
+ if new_mod is None:
+ new_mod = name
+ self.mod = new_mod
+ if new_attr is None:
+ if old_attr is None:
+ new_attr = name
+ else:
+ new_attr = old_attr
+ self.attr = new_attr
+ else:
+ self.mod = old_mod
+ if old_attr is None:
+ old_attr = name
+ self.attr = old_attr
+
+ def _resolve(self):
+ module = _import_module(self.mod)
+ return getattr(module, self.attr)
+
+
+
+class _MovedItems(types.ModuleType):
+ """Lazy loading of moved objects"""
+
+
+_moved_attributes = [
+ MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"),
+ MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"),
+ MovedAttribute("map", "itertools", "builtins", "imap", "map"),
+ MovedAttribute("reload_module", "__builtin__", "imp", "reload"),
+ MovedAttribute("reduce", "__builtin__", "functools"),
+ MovedAttribute("StringIO", "StringIO", "io"),
+ MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"),
+ MovedAttribute("zip", "itertools", "builtins", "izip", "zip"),
+
+ MovedModule("builtins", "__builtin__"),
+ MovedModule("configparser", "ConfigParser"),
+ MovedModule("copyreg", "copy_reg"),
+ MovedModule("http_cookiejar", "cookielib", "http.cookiejar"),
+ MovedModule("http_cookies", "Cookie", "http.cookies"),
+ MovedModule("html_entities", "htmlentitydefs", "html.entities"),
+ MovedModule("html_parser", "HTMLParser", "html.parser"),
+ MovedModule("http_client", "httplib", "http.client"),
+ MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"),
+ MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"),
+ MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"),
+ MovedModule("cPickle", "cPickle", "pickle"),
+ MovedModule("queue", "Queue"),
+ MovedModule("reprlib", "repr"),
+ MovedModule("socketserver", "SocketServer"),
+ MovedModule("tkinter", "Tkinter"),
+ MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"),
+ MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"),
+ MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"),
+ MovedModule("tkinter_tix", "Tix", "tkinter.tix"),
+ MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"),
+ MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"),
+ MovedModule("tkinter_colorchooser", "tkColorChooser",
+ "tkinter.colorchooser"),
+ MovedModule("tkinter_commondialog", "tkCommonDialog",
+ "tkinter.commondialog"),
+ MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"),
+ MovedModule("tkinter_font", "tkFont", "tkinter.font"),
+ MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"),
+ MovedModule("tkinter_tksimpledialog", "tkSimpleDialog",
+ "tkinter.simpledialog"),
+ MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"),
+ MovedModule("winreg", "_winreg"),
+]
+for attr in _moved_attributes:
+ setattr(_MovedItems, attr.name, attr)
+del attr
+
+moves = sys.modules["six.moves"] = _MovedItems("moves")
+
+
+def add_move(move):
+ """Add an item to six.moves."""
+ setattr(_MovedItems, move.name, move)
+
+
+def remove_move(name):
+ """Remove item from six.moves."""
+ try:
+ delattr(_MovedItems, name)
+ except AttributeError:
+ try:
+ del moves.__dict__[name]
+ except KeyError:
+ raise AttributeError("no such move, %r" % (name,))
+
+
+if PY3:
+ _meth_func = "__func__"
+ _meth_self = "__self__"
+
+ _func_code = "__code__"
+ _func_defaults = "__defaults__"
+
+ _iterkeys = "keys"
+ _itervalues = "values"
+ _iteritems = "items"
+else:
+ _meth_func = "im_func"
+ _meth_self = "im_self"
+
+ _func_code = "func_code"
+ _func_defaults = "func_defaults"
+
+ _iterkeys = "iterkeys"
+ _itervalues = "itervalues"
+ _iteritems = "iteritems"
+
+
+if PY3:
+ def get_unbound_function(unbound):
+ return unbound
+
+
+ advance_iterator = next
+
+ def callable(obj):
+ return any("__call__" in klass.__dict__ for klass in type(obj).__mro__)
+else:
+ def get_unbound_function(unbound):
+ return unbound.im_func
+
+
+ def advance_iterator(it):
+ return it.next()
+
+ callable = callable
+_add_doc(get_unbound_function,
+ """Get the function out of a possibly unbound function""")
+
+
+get_method_function = operator.attrgetter(_meth_func)
+get_method_self = operator.attrgetter(_meth_self)
+get_function_code = operator.attrgetter(_func_code)
+get_function_defaults = operator.attrgetter(_func_defaults)
+
+
+def iterkeys(d):
+ """Return an iterator over the keys of a dictionary."""
+ return getattr(d, _iterkeys)()
+
+def itervalues(d):
+ """Return an iterator over the values of a dictionary."""
+ return getattr(d, _itervalues)()
+
+def iteritems(d):
+ """Return an iterator over the (key, value) pairs of a dictionary."""
+ return getattr(d, _iteritems)()
+
+
+if PY3:
+ def b(s):
+ return s.encode("latin-1")
+ def u(s):
+ return s
+ if sys.version_info[1] <= 1:
+ def int2byte(i):
+ return bytes((i,))
+ else:
+ # This is about 2x faster than the implementation above on 3.2+
+ int2byte = operator.methodcaller("to_bytes", 1, "big")
+ import io
+ StringIO = io.StringIO
+ BytesIO = io.BytesIO
+else:
+ def b(s):
+ return s
+ def u(s):
+ return unicode(s, "unicode_escape")
+ int2byte = chr
+ import StringIO
+ StringIO = BytesIO = StringIO.StringIO
+_add_doc(b, """Byte literal""")
+_add_doc(u, """Text literal""")
+
+
+if PY3:
+ import builtins
+ exec_ = getattr(builtins, "exec")
+
+
+ def reraise(tp, value, tb=None):
+ if value.__traceback__ is not tb:
+ raise value.with_traceback(tb)
+ raise value
+
+
+ print_ = getattr(builtins, "print")
+ del builtins
+
+else:
+ def exec_(code, globs=None, locs=None):
+ """Execute code in a namespace."""
+ if globs is None:
+ frame = sys._getframe(1)
+ globs = frame.f_globals
+ if locs is None:
+ locs = frame.f_locals
+ del frame
+ elif locs is None:
+ locs = globs
+ exec("""exec code in globs, locs""")
+
+
+ exec_("""def reraise(tp, value, tb=None):
+ raise tp, value, tb
+""")
+
+
+ def print_(*args, **kwargs):
+ """The new-style print function."""
+ fp = kwargs.pop("file", sys.stdout)
+ if fp is None:
+ return
+ def write(data):
+ if not isinstance(data, basestring):
+ data = str(data)
+ fp.write(data)
+ want_unicode = False
+ sep = kwargs.pop("sep", None)
+ if sep is not None:
+ if isinstance(sep, unicode):
+ want_unicode = True
+ elif not isinstance(sep, str):
+ raise TypeError("sep must be None or a string")
+ end = kwargs.pop("end", None)
+ if end is not None:
+ if isinstance(end, unicode):
+ want_unicode = True
+ elif not isinstance(end, str):
+ raise TypeError("end must be None or a string")
+ if kwargs:
+ raise TypeError("invalid keyword arguments to print()")
+ if not want_unicode:
+ for arg in args:
+ if isinstance(arg, unicode):
+ want_unicode = True
+ break
+ if want_unicode:
+ newline = unicode("\n")
+ space = unicode(" ")
+ else:
+ newline = "\n"
+ space = " "
+ if sep is None:
+ sep = space
+ if end is None:
+ end = newline
+ for i, arg in enumerate(args):
+ if i:
+ write(sep)
+ write(arg)
+ write(end)
+
+_add_doc(reraise, """Reraise an exception.""")
+
+
+def with_metaclass(meta, base=object):
+ """Create a base class with a metaclass."""
+ return meta("NewBase", (base,), {})
diff --git a/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py b/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
new file mode 100644
index 0000000..9560b04
--- /dev/null
+++ b/requests/packages/urllib3/packages/ssl_match_hostname/__init__.py
@@ -0,0 +1,61 @@
+"""The match_hostname() function from Python 3.2, essential when using SSL."""
+
+import re
+
+__version__ = '3.2.2'
+
+class CertificateError(ValueError):
+ pass
+
+def _dnsname_to_pat(dn):
+ pats = []
+ for frag in dn.split(r'.'):
+ if frag == '*':
+ # When '*' is a fragment by itself, it matches a non-empty dotless
+ # fragment.
+ pats.append('[^.]+')
+ else:
+ # Otherwise, '*' matches any dotless fragment.
+ frag = re.escape(frag)
+ pats.append(frag.replace(r'\*', '[^.]*'))
+ return re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE)
+
+def match_hostname(cert, hostname):
+ """Verify that *cert* (in decoded format as returned by
+ SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 rules
+ are mostly followed, but IP addresses are not accepted for *hostname*.
+
+ CertificateError is raised on failure. On success, the function
+ returns nothing.
+ """
+ if not cert:
+ raise ValueError("empty or no certificate")
+ dnsnames = []
+ san = cert.get('subjectAltName', ())
+ for key, value in san:
+ if key == 'DNS':
+ if _dnsname_to_pat(value).match(hostname):
+ return
+ dnsnames.append(value)
+ if not dnsnames:
+ # The subject is only checked when there is no dNSName entry
+ # in subjectAltName
+ for sub in cert.get('subject', ()):
+ for key, value in sub:
+ # XXX according to RFC 2818, the most specific Common Name
+ # must be used.
+ if key == 'commonName':
+ if _dnsname_to_pat(value).match(hostname):
+ return
+ dnsnames.append(value)
+ if len(dnsnames) > 1:
+ raise CertificateError("hostname %r "
+ "doesn't match either of %s"
+ % (hostname, ', '.join(map(repr, dnsnames))))
+ elif len(dnsnames) == 1:
+ raise CertificateError("hostname %r "
+ "doesn't match %r"
+ % (hostname, dnsnames[0]))
+ else:
+ raise CertificateError("no appropriate commonName or "
+ "subjectAltName fields were found")
diff --git a/requests/packages/urllib3/poolmanager.py b/requests/packages/urllib3/poolmanager.py
index c08e327..f194b2e 100644
--- a/requests/packages/urllib3/poolmanager.py
+++ b/requests/packages/urllib3/poolmanager.py
@@ -1,32 +1,27 @@
# urllib3/poolmanager.py
-# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
+import logging
+
from ._collections import RecentlyUsedContainer
-from .connectionpool import (
- HTTPConnectionPool, HTTPSConnectionPool,
- get_host, connection_from_url,
-)
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+from .connectionpool import get_host, connection_from_url, port_by_scheme
+from .exceptions import HostChangedError
+from .request import RequestMethods
__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
-from .request import RequestMethods
-from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
-
-
pool_classes_by_scheme = {
'http': HTTPConnectionPool,
'https': HTTPSConnectionPool,
}
-port_by_scheme = {
- 'http': 80,
- 'https': 443,
-}
+log = logging.getLogger(__name__)
class PoolManager(RequestMethods):
@@ -105,7 +100,12 @@ class PoolManager(RequestMethods):
:class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
"""
conn = self.connection_from_url(url)
- return conn.urlopen(method, url, assert_same_host=False, **kw)
+ try:
+ return conn.urlopen(method, url, **kw)
+
+ except HostChangedError as e:
+ kw['retries'] = e.retries # Persist retries countdown
+ return self.urlopen(method, e.url, **kw)
class ProxyManager(RequestMethods):
diff --git a/requests/packages/urllib3/request.py b/requests/packages/urllib3/request.py
index a7e0b5d..5ea26a0 100644
--- a/requests/packages/urllib3/request.py
+++ b/requests/packages/urllib3/request.py
@@ -1,11 +1,13 @@
# urllib3/request.py
-# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
-
-from urllib import urlencode
+try:
+ from urllib.parse import urlencode
+except ImportError:
+ from urllib import urlencode
from .filepost import encode_multipart_formdata
diff --git a/requests/packages/urllib3/response.py b/requests/packages/urllib3/response.py
index 4cd15c1..e023970 100644
--- a/requests/packages/urllib3/response.py
+++ b/requests/packages/urllib3/response.py
@@ -1,5 +1,5 @@
# urllib3/response.py
-# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+# Copyright 2008-2012 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
#
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
@@ -8,21 +8,22 @@ import gzip
import logging
import zlib
+from io import BytesIO
-try:
- from cStringIO import StringIO
-except ImportError:
- from StringIO import StringIO # pylint: disable-msg=W0404
+from .exceptions import HTTPError
-from .exceptions import HTTPError
+try:
+ basestring = basestring
+except NameError: # Python 3
+ basestring = (str, bytes)
log = logging.getLogger(__name__)
def decode_gzip(data):
- gzipper = gzip.GzipFile(fileobj=StringIO(data))
+ gzipper = gzip.GzipFile(fileobj=BytesIO(data))
return gzipper.read()
@@ -71,7 +72,7 @@ class HTTPResponse(object):
self.strict = strict
self._decode_content = decode_content
- self._body = None
+ self._body = body if body and isinstance(body, basestring) else None
self._fp = None
self._original_response = original_response
@@ -81,9 +82,22 @@ class HTTPResponse(object):
if hasattr(body, 'read'):
self._fp = body
- if preload_content:
+ if preload_content and not self._body:
self._body = self.read(decode_content=decode_content)
+ def get_redirect_location(self):
+ """
+ Should we redirect and where to?
+
+ :returns: Truthy redirect location string if we got a redirect status
+ code and valid location. ``None`` if redirect status and no
+ location. ``False`` if not a redirect status code.
+ """
+ if self.status in [301, 302, 303, 307]:
+ return self.headers.get('location')
+
+ return False
+
def release_conn(self):
if not self._pool or not self._connection:
return
@@ -98,10 +112,9 @@ class HTTPResponse(object):
return self._body
if self._fp:
- return self.read(decode_content=self._decode_content,
- cache_content=True)
+ return self.read(cache_content=True)
- def read(self, amt=None, decode_content=True, cache_content=False):
+ def read(self, amt=None, decode_content=None, cache_content=False):
"""
Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
parameters: ``decode_content`` and ``cache_content``.
@@ -124,22 +137,22 @@ class HTTPResponse(object):
"""
content_encoding = self.headers.get('content-encoding')
decoder = self.CONTENT_DECODERS.get(content_encoding)
+ if decode_content is None:
+ decode_content = self._decode_content
- data = self._fp and self._fp.read(amt)
+ if self._fp is None:
+ return
try:
-
- if amt:
- return data
-
- if not decode_content or not decoder:
- if cache_content:
- self._body = data
-
- return data
+ if amt is None:
+ # cStringIO doesn't like amt=None
+ data = self._fp.read()
+ else:
+ return self._fp.read(amt)
try:
- data = decoder(data)
+ if decode_content and decoder:
+ data = decoder(data)
except IOError:
raise HTTPError("Received response with content-encoding: %s, but "
"failed to decode it." % content_encoding)
@@ -150,12 +163,11 @@ class HTTPResponse(object):
return data
finally:
-
if self._original_response and self._original_response.isclosed():
self.release_conn()
- @staticmethod
- def from_httplib(r, **response_kw):
+ @classmethod
+ def from_httplib(ResponseCls, r, **response_kw):
"""
Given an :class:`httplib.HTTPResponse` instance ``r``, return a
corresponding :class:`urllib3.response.HTTPResponse` object.
@@ -164,14 +176,17 @@ class HTTPResponse(object):
with ``original_response=r``.
"""
- return HTTPResponse(body=r,
- headers=dict(r.getheaders()),
- status=r.status,
- version=r.version,
- reason=r.reason,
- strict=r.strict,
- original_response=r,
- **response_kw)
+ # HTTPResponse objects in Python 3 don't have a .strict attribute
+ strict = getattr(r, 'strict', 0)
+ return ResponseCls(body=r,
+ # In Python 3, the header keys are returned capitalised
+ headers=dict((k.lower(), v) for k,v in r.getheaders()),
+ status=r.status,
+ version=r.version,
+ reason=r.reason,
+ strict=strict,
+ original_response=r,
+ **response_kw)
# Backwards-compatibility methods for httplib.HTTPResponse
def getheaders(self):
diff --git a/requests/sessions.py b/requests/sessions.py
index 247aa18..d9683b0 100644
--- a/requests/sessions.py
+++ b/requests/sessions.py
@@ -25,7 +25,7 @@ def merge_kwargs(local_kwarg, default_kwarg):
if default_kwarg is None:
return local_kwarg
- if isinstance(local_kwarg, basestring):
+ if isinstance(local_kwarg, str):
return local_kwarg
if local_kwarg is None:
@@ -40,7 +40,7 @@ def merge_kwargs(local_kwarg, default_kwarg):
kwargs.update(local_kwarg)
# Remove keys that are set to None.
- for (k,v) in local_kwarg.items():
+ for (k,v) in list(local_kwarg.items()):
if v is None:
del kwargs[k]
@@ -63,7 +63,8 @@ class Session(object):
proxies=None,
hooks=None,
params=None,
- config=None):
+ config=None,
+ verify=True):
self.headers = headers or {}
self.cookies = cookies or {}
@@ -73,8 +74,9 @@ class Session(object):
self.hooks = hooks or {}
self.params = params or {}
self.config = config or {}
+ self.verify = verify
- for (k, v) in defaults.items():
+ for (k, v) in list(defaults.items()):
self.config.setdefault(k, v)
self.poolmanager = PoolManager(
@@ -111,7 +113,8 @@ class Session(object):
hooks=None,
return_response=True,
config=None,
- prefetch=False):
+ prefetch=False,
+ verify=None):
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
@@ -123,13 +126,14 @@ class Session(object):
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
:param files: (optional) Dictionary of 'filename': file-like-objects for multipart encoding upload.
- :param auth: (optional) Auth typle to enable Basic/Digest/Custom HTTP Auth.
+ :param auth: (optional) Auth tuple to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the request.
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param return_response: (optional) If False, an un-sent Request object will returned.
:param config: (optional) A configuration dictionary.
:param prefetch: (optional) if ``True``, the response content will be immediately downloaded.
+ :param verify: (optional) if ``True``, the SSL cert will be verified. A CA_BUNDLE path can also be provided.
"""
method = str(method).upper()
@@ -141,13 +145,17 @@ class Session(object):
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
+
+ if verify is None:
+ verify = self.verify
+
# use session's hooks as defaults
- for key, cb in self.hooks.iteritems():
+ for key, cb in list(self.hooks.items()):
hooks.setdefault(key, cb)
# Expand header values.
if headers:
- for k, v in headers.items() or {}:
+ for k, v in list(headers.items()) or {}:
headers[k] = header_expand(v)
args = dict(
@@ -164,6 +172,7 @@ class Session(object):
allow_redirects=allow_redirects,
proxies=proxies,
config=config,
+ verify=verify,
_poolmanager=self.poolmanager
)
diff --git a/requests/status_codes.py b/requests/status_codes.py
index fab8e95..da74286 100644
--- a/requests/status_codes.py
+++ b/requests/status_codes.py
@@ -79,7 +79,7 @@ _codes = {
codes = LookupDict(name='status_codes')
-for (code, titles) in _codes.items():
+for (code, titles) in list(_codes.items()):
for title in titles:
setattr(codes, title, code)
if not title.startswith('\\'):
diff --git a/requests/structures.py b/requests/structures.py
index 35a903f..3746754 100644
--- a/requests/structures.py
+++ b/requests/structures.py
@@ -18,7 +18,7 @@ class CaseInsensitiveDict(dict):
@property
def lower_keys(self):
if not hasattr(self, '_lower_keys') or not self._lower_keys:
- self._lower_keys = dict((k.lower(), k) for k in self.iterkeys())
+ self._lower_keys = dict((k.lower(), k) for k in list(self.keys()))
return self._lower_keys
def _clear_lower_keys(self):
@@ -63,4 +63,4 @@ class LookupDict(dict):
return self.__dict__.get(key, None)
def get(self, key, default=None):
- return self.__dict__.get(key, default) \ No newline at end of file
+ return self.__dict__.get(key, default)
diff --git a/requests/utils.py b/requests/utils.py
index f31cad8..0e0f69e 100644
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -4,23 +4,35 @@
requests.utils
~~~~~~~~~~~~~~
-This module provides utlity functions that are used within Requests
+This module provides utility functions that are used within Requests
that are also useful for external consumption.
"""
import cgi
import codecs
-import cookielib
import os
import random
import re
import zlib
-import urllib
-from urllib2 import parse_http_list as _parse_list_header
+from .compat import parse_http_list as _parse_list_header
+from .compat import quote, unquote, cookielib, SimpleCookie, is_py2
+def dict_from_string(s):
+ """Returns a MultiDict with Cookies."""
+
+ cookies = dict()
+
+ c = SimpleCookie()
+ c.load(s)
+
+ for k,v in list(c.items()):
+ cookies.update({k: v.value})
+
+ return cookies
+
def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
@@ -132,16 +144,16 @@ def header_expand(headers):
collector = []
if isinstance(headers, dict):
- headers = headers.items()
+ headers = list(headers.items())
- elif isinstance(headers, basestring):
+ elif isinstance(headers, str):
return headers
for i, (value, params) in enumerate(headers):
_params = []
- for (p_k, p_v) in params.items():
+ for (p_k, p_v) in list(params.items()):
_params.append('%s=%s' % (p_k, p_v))
@@ -166,17 +178,11 @@ def header_expand(headers):
def randombytes(n):
"""Return n random bytes."""
- # Use /dev/urandom if it is available. Fall back to random module
- # if not. It might be worthwhile to extend this function to use
- # other platform-specific mechanisms for getting random bytes.
- if os.path.exists("/dev/urandom"):
- f = open("/dev/urandom")
- s = f.read(n)
- f.close()
- return s
- else:
+ if is_py2:
L = [chr(random.randrange(0, 256)) for i in range(n)]
- return "".join(L)
+ else:
+ L = [chr(random.randrange(0, 256)).encode('utf-8') for i in range(n)]
+ return b"".join(L)
def dict_from_cookiejar(cj):
@@ -187,9 +193,9 @@ def dict_from_cookiejar(cj):
cookie_dict = {}
- for _, cookies in cj._cookies.items():
- for _, cookies in cookies.items():
- for cookie in cookies.values():
+ for _, cookies in list(cj._cookies.items()):
+ for _, cookies in list(cookies.items()):
+ for cookie in list(cookies.values()):
# print cookie
cookie_dict[cookie.name] = cookie.value
@@ -221,7 +227,7 @@ def add_dict_to_cookiejar(cj, cookie_dict):
:param cookie_dict: Dict of key/values to insert into CookieJar.
"""
- for k, v in cookie_dict.items():
+ for k, v in list(cookie_dict.items()):
cookie = cookielib.Cookie(
version=0,
@@ -276,6 +282,9 @@ def get_encoding_from_headers(headers):
if 'charset' in params:
return params['charset'].strip("'\"")
+ if 'text' in content_type:
+ return 'ISO-8859-1'
+
def unicode_from_html(content):
"""Attempts to decode an HTML string into unicode.
@@ -287,7 +296,7 @@ def unicode_from_html(content):
for encoding in encodings:
try:
- return unicode(content, encoding)
+ return str(content, encoding)
except (UnicodeError, TypeError):
pass
@@ -334,13 +343,13 @@ def get_unicode_from_response(r):
if encoding:
try:
- return unicode(r.content, encoding)
+ return str(r.content, encoding)
except UnicodeError:
tried_encodings.append(encoding)
# Fall back:
try:
- return unicode(r.content, encoding, errors='replace')
+ return str(r.content, encoding, errors='replace')
except TypeError:
return r.content
@@ -354,28 +363,45 @@ def decode_gzip(content):
return zlib.decompress(content, 16 + zlib.MAX_WBITS)
-def stream_decode_gzip(iterator):
- """Stream decodes a gzip-encoded iterator"""
+def stream_decompress(iterator, mode='gzip'):
+ """
+ Stream decodes an iterator over compressed data
+
+ :param iterator: An iterator over compressed data
+ :param mode: 'gzip' or 'deflate'
+ :return: An iterator over decompressed data
+ """
+
+ if mode not in ['gzip', 'deflate']:
+ raise ValueError('stream_decompress mode must be gzip or deflate')
+
+ zlib_mode = 16 + zlib.MAX_WBITS if mode == 'gzip' else -zlib.MAX_WBITS
+ dec = zlib.decompressobj(zlib_mode)
try:
- dec = zlib.decompressobj(16 + zlib.MAX_WBITS)
for chunk in iterator:
rv = dec.decompress(chunk)
if rv:
yield rv
+ except zlib.error:
+ # If there was an error decompressing, just return the raw chunk
+ yield chunk
+ # Continue to return the rest of the raw data
+ for chunk in iterator:
+ yield chunk
+ else:
+ # Make sure everything has been returned from the decompression object
buf = dec.decompress('')
rv = buf + dec.flush()
if rv:
yield rv
- except zlib.error:
- pass
def requote_path(path):
"""Re-quote the given URL path component.
This function passes the given path through an unquote/quote cycle to
- ensure that it is fully and consistenty quoted.
+ ensure that it is fully and consistently quoted.
"""
- parts = path.split("/")
- parts = (urllib.quote(urllib.unquote(part), safe="") for part in parts)
- return "/".join(parts)
+ parts = path.split(b"/")
+ parts = (quote(unquote(part), safe=b"") for part in parts)
+ return b"/".join(parts)
diff --git a/setup.py b/setup.py
index 53e770a..641463d 100755
--- a/setup.py
+++ b/setup.py
@@ -4,6 +4,7 @@
import os
import sys
import requests
+from requests.compat import is_py3
try:
from setuptools import setup
@@ -12,18 +13,21 @@ except ImportError:
-if sys.argv[-1] == "publish":
- os.system("python setup.py sdist upload")
+if sys.argv[-1] == 'publish':
+ os.system('python setup.py sdist upload')
sys.exit()
-if sys.argv[-1] == "test":
- os.system("python test_requests.py")
+if sys.argv[-1] == 'test':
+ os.system('python test_requests.py')
sys.exit()
-required = []
+required = ['certifi>=0.0.7',]
+
+if is_py3:
+ required.append('chardet2')
+else:
+ required.append('chardet>=1.0.0')
-if sys.version_info[:2] < (2,6):
- required.append('simplejson')
setup(
name='requests',
@@ -34,12 +38,17 @@ setup(
author='Kenneth Reitz',
author_email='me@kennethreitz.com',
url='http://python-requests.org',
- packages= [
+ packages=[
'requests',
'requests.packages',
'requests.packages.urllib3',
+ 'requests.packages.urllib3.packages',
+ 'requests.packages.urllib3.packages.ssl_match_hostname',
+ 'requests.packages.urllib3.packages.mimetools_choose_boundary',
'requests.packages.oreos'
],
+ package_data={'': ['LICENSE', 'NOTICE']},
+ include_package_data=True,
install_requires=required,
license='ISC',
classifiers=(
@@ -48,10 +57,9 @@ setup(
'Natural Language :: English',
'License :: OSI Approved :: ISC License (ISCL)',
'Programming Language :: Python',
- 'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
- # 'Programming Language :: Python :: 3.0',
- # 'Programming Language :: Python :: 3.1',
+ 'Programming Language :: Python :: 3.0',
+ 'Programming Language :: Python :: 3.1',
),
)
diff --git a/test_requests.py b/test_requests.py
index 61953a3..29de3ed 100755
--- a/test_requests.py
+++ b/test_requests.py
@@ -1,27 +1,29 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
-from __future__ import with_statement
+
+import io
+import json
import time
import os
+import sys
import unittest
import requests
-import envoy
+from requests.compat import str, bytes, StringIO
+# import envoy
from requests import HTTPError
+from requests.auth import HTTPBasicAuth, HTTPDigestAuth
+
-try:
- import omnijson as json
-except ImportError:
- import json
+if (sys.platform == 'win32') and ('HTTPBIN_URL' not in os.environ):
+ os.environ['HTTPBIN_URL'] = 'http://httpbin.org/'
# TODO: Detect an open port.
PORT = os.environ.get('HTTPBIN_PORT', '7077')
-
-HTTPBIN_URL = 'http://0.0.0.0:%s/' % (PORT)
-# HTTPBIN_URL = 'http://127.0.0.1:8000/'
+HTTPBIN_URL = os.environ.get('HTTPBIN_URL', 'http://0.0.0.0:%s/' % (PORT))
def httpbin(*suffix):
@@ -44,20 +46,10 @@ class RequestsTestSuite(unittest.TestCase):
global _httpbin
- if not _httpbin:
-
- c = envoy.connect('gunicorn httpbin:app --bind=0.0.0.0:%s' % (PORT))
-
- self.httpbin = c
+ if (not 'HTTPBIN_URL' in os.environ) and not _httpbin:
+ # c = envoy.connect('httpbin %s' % (PORT))
+ # time.sleep(1)
_httpbin = True
- time.sleep(1)
-
-
-
- def tearDown(self):
- """Teardown."""
- # self.httpbin.kill()
- pass
def test_entry_points(self):
@@ -80,6 +72,11 @@ class RequestsTestSuite(unittest.TestCase):
r = requests.get(httpbin('/get'))
self.assertEqual(r.status_code, 200)
+ def test_response_sent(self):
+ r = requests.get(httpbin('/get'))
+
+ self.assertTrue(r.request.sent)
+
def test_HTTP_302_ALLOW_REDIRECT_GET(self):
r = requests.get(httpbin('redirect', '1'))
self.assertEqual(r.status_code, 200)
@@ -94,7 +91,7 @@ class RequestsTestSuite(unittest.TestCase):
r = requests.get(httpbin('user-agent'), headers=heads)
- assert heads['User-agent'] in r.content
+ assert heads['User-agent'] in r.text
self.assertEqual(r.status_code, 200)
@@ -114,7 +111,7 @@ class RequestsTestSuite(unittest.TestCase):
}
r = requests.get(httpbin('user-agent'), headers=heads);
- self.assertTrue(heads['User-agent'] in r.content)
+ self.assertTrue(heads['User-agent'] in r.text)
heads = {
'user-agent':
@@ -122,7 +119,7 @@ class RequestsTestSuite(unittest.TestCase):
}
r = requests.get(httpbin('user-agent'), headers=heads);
- self.assertTrue(heads['user-agent'] in r.content)
+ self.assertTrue(heads['user-agent'] in r.text)
def test_HTTP_200_OK_HEAD(self):
@@ -135,21 +132,39 @@ class RequestsTestSuite(unittest.TestCase):
self.assertEqual(r.status_code, 200)
- def test_HTTP_200_OK_PATCH(self):
- r = requests.patch(httpbin('patch'))
- self.assertEqual(r.status_code, 200)
+ def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self):
+
+ for service in SERVICES:
+
+ auth = ('user', 'pass')
+ url = service('basic-auth', 'user', 'pass')
+
+ r = requests.get(url, auth=auth)
+ self.assertEqual(r.status_code, 200)
+
+ r = requests.get(url)
+ self.assertEqual(r.status_code, 401)
+
+
+ s = requests.session(auth=auth)
+ r = s.get(url)
+ self.assertEqual(r.status_code, 200)
def test_BASICAUTH_HTTP_200_OK_GET(self):
for service in SERVICES:
- auth = ('user', 'pass')
+ auth = HTTPBasicAuth('user', 'pass')
url = service('basic-auth', 'user', 'pass')
r = requests.get(url, auth=auth)
self.assertEqual(r.status_code, 200)
+ auth = ('user', 'pass')
+ r = requests.get(url, auth=auth)
+ self.assertEqual(r.status_code, 200)
+
r = requests.get(url)
self.assertEqual(r.status_code, 401)
@@ -163,7 +178,7 @@ class RequestsTestSuite(unittest.TestCase):
for service in SERVICES:
- auth = ('digest', 'user', 'pass')
+ auth = HTTPDigestAuth('user', 'pass')
url = service('digest-auth', 'auth', 'user', 'pass')
r = requests.get(url, auth=auth)
@@ -249,11 +264,31 @@ class RequestsTestSuite(unittest.TestCase):
r.raise_for_status()
+ def test_default_status_raising(self):
+ config = {'danger_mode': True}
+ args = [httpbin('status', '404')]
+ kwargs = dict(config=config)
+ self.assertRaises(HTTPError, requests.get, *args, **kwargs)
+
+ r = requests.get(httpbin('status', '200'))
+ self.assertEqual(r.status_code, 200)
+
+
def test_decompress_gzip(self):
r = requests.get(httpbin('gzip'))
r.content.decode('ascii')
+ def test_response_has_unicode_url(self):
+
+ for service in SERVICES:
+
+ url = service('get')
+
+ response = requests.get(url)
+
+ assert isinstance(response.url, str)
+
def test_unicode_get(self):
@@ -261,20 +296,20 @@ class RequestsTestSuite(unittest.TestCase):
url = service('/get')
- requests.get(url, params={'foo': u'føø'})
- requests.get(url, params={u'føø': u'føø'})
+ requests.get(url, params={'foo': 'føø'})
+ requests.get(url, params={'føø': 'føø'})
requests.get(url, params={'føø': 'føø'})
- requests.get(url, params={'foo': u'foo'})
- requests.get(service('ø'), params={'foo': u'foo'})
+ requests.get(url, params={'foo': 'foo'})
+ requests.get(service('ø'), params={'foo': 'foo'})
def test_httpauth_recursion(self):
- http_auth = ('user', 'BADpass')
+ http_auth = HTTPBasicAuth('user', 'BADpass')
for service in SERVICES:
r = requests.get(service('basic-auth', 'user', 'pass'), auth=http_auth)
- self.assertEquals(r.status_code, 401)
+ self.assertEqual(r.status_code, 401)
def test_urlencoded_post_data(self):
@@ -283,14 +318,14 @@ class RequestsTestSuite(unittest.TestCase):
r = requests.post(service('post'), data=dict(test='fooaowpeuf'))
- self.assertEquals(r.status_code, 200)
- self.assertEquals(r.headers['content-type'], 'application/json')
- self.assertEquals(r.url, service('post'))
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.headers['content-type'], 'application/json')
+ self.assertEqual(r.url, service('post'))
- rbody = json.loads(r.content)
+ rbody = json.loads(r.text)
- self.assertEquals(rbody.get('form'), dict(test='fooaowpeuf'))
- self.assertEquals(rbody.get('data'), '')
+ self.assertEqual(rbody.get('form'), dict(test='fooaowpeuf'))
+ self.assertEqual(rbody.get('data'), '')
def test_nonurlencoded_post_data(self):
@@ -299,15 +334,16 @@ class RequestsTestSuite(unittest.TestCase):
r = requests.post(service('post'), data='fooaowpeuf')
- self.assertEquals(r.status_code, 200)
- self.assertEquals(r.headers['content-type'], 'application/json')
- self.assertEquals(r.url, service('post'))
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.headers['content-type'], 'application/json')
+ self.assertEqual(r.url, service('post'))
- rbody = json.loads(r.content)
+ rbody = json.loads(r.text)
# Body wasn't valid url encoded data, so the server returns None as
# "form" and the raw body as "data".
- self.assertEquals(rbody.get('form'), {})
- self.assertEquals(rbody.get('data'), 'fooaowpeuf')
+
+ assert rbody.get('form') in (None, {})
+ self.assertEqual(rbody.get('data'), 'fooaowpeuf')
def test_urlencoded_post_querystring(self):
@@ -316,13 +352,13 @@ class RequestsTestSuite(unittest.TestCase):
r = requests.post(service('post'), params=dict(test='fooaowpeuf'))
- self.assertEquals(r.status_code, 200)
- self.assertEquals(r.headers['content-type'], 'application/json')
- self.assertEquals(r.url, service('post?test=fooaowpeuf'))
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.headers['content-type'], 'application/json')
+ self.assertEqual(r.url, service('post?test=fooaowpeuf'))
- rbody = json.loads(r.content)
- self.assertEquals(rbody.get('form'), {}) # No form supplied
- self.assertEquals(rbody.get('data'), '')
+ rbody = json.loads(r.text)
+ self.assertEqual(rbody.get('form'), {}) # No form supplied
+ self.assertEqual(rbody.get('data'), '')
def test_urlencoded_post_query_and_data(self):
@@ -334,13 +370,13 @@ class RequestsTestSuite(unittest.TestCase):
params=dict(test='fooaowpeuf'),
data=dict(test2="foobar"))
- self.assertEquals(r.status_code, 200)
- self.assertEquals(r.headers['content-type'], 'application/json')
- self.assertEquals(r.url, service('post?test=fooaowpeuf'))
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.headers['content-type'], 'application/json')
+ self.assertEqual(r.url, service('post?test=fooaowpeuf'))
- rbody = json.loads(r.content)
- self.assertEquals(rbody.get('form'), dict(test2='foobar'))
- self.assertEquals(rbody.get('data'), '')
+ rbody = json.loads(r.text)
+ self.assertEqual(rbody.get('form'), dict(test2='foobar'))
+ self.assertEqual(rbody.get('data'), '')
def test_nonurlencoded_postdata(self):
@@ -349,13 +385,13 @@ class RequestsTestSuite(unittest.TestCase):
r = requests.post(service('post'), data="foobar")
- self.assertEquals(r.status_code, 200)
- self.assertEquals(r.headers['content-type'], 'application/json')
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.headers['content-type'], 'application/json')
- rbody = json.loads(r.content)
+ rbody = json.loads(r.text)
- self.assertEquals(rbody.get('form'), {})
- self.assertEquals(rbody.get('data'), 'foobar')
+ assert rbody.get('form') in (None, {})
+ self.assertEqual(rbody.get('data'), 'foobar')
# def test_idna(self):
@@ -368,8 +404,8 @@ class RequestsTestSuite(unittest.TestCase):
for service in SERVICES:
r = requests.get(service('get'), params=dict(test=['foo','baz']))
- self.assertEquals(r.status_code, 200)
- self.assertEquals(r.url, service('get?test=foo&test=baz'))
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.url, service('get?test=foo&test=baz'))
def test_urlencoded_post_querystring_multivalued(self):
@@ -377,13 +413,13 @@ class RequestsTestSuite(unittest.TestCase):
for service in SERVICES:
r = requests.post(service('post'), params=dict(test=['foo','baz']))
- self.assertEquals(r.status_code, 200)
- self.assertEquals(r.headers['content-type'], 'application/json')
- self.assertEquals(r.url, service('post?test=foo&test=baz'))
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.headers['content-type'], 'application/json')
+ self.assertEqual(r.url, service('post?test=foo&test=baz'))
- rbody = json.loads(r.content)
- self.assertEquals(rbody.get('form'), {}) # No form supplied
- self.assertEquals(rbody.get('data'), '')
+ rbody = json.loads(r.text)
+ self.assertEqual(rbody.get('form'), {}) # No form supplied
+ self.assertEqual(rbody.get('data'), '')
def test_urlencoded_post_query_multivalued_and_data(self):
@@ -395,12 +431,16 @@ class RequestsTestSuite(unittest.TestCase):
params=dict(test=['foo','baz']),
data=dict(test2="foobar",test3=['foo','baz']))
- self.assertEquals(r.status_code, 200)
- self.assertEquals(r.headers['content-type'], 'application/json')
- self.assertEquals(r.url, service('post?test=foo&test=baz'))
- rbody = json.loads(r.content)
- self.assertEquals(rbody.get('form'), dict(test2='foobar',test3='foo'))
- self.assertEquals(rbody.get('data'), '')
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(r.headers['content-type'], 'application/json')
+ self.assertEqual(r.url, service('post?test=foo&test=baz'))
+
+ # print(r.text)
+ # print('-----------------------')
+
+ rbody = json.loads(r.text)
+ self.assertEqual(rbody.get('form'), dict(test2='foobar',test3=['foo','baz']))
+ self.assertEqual(rbody.get('data'), '')
def test_GET_no_redirect(self):
@@ -408,8 +448,8 @@ class RequestsTestSuite(unittest.TestCase):
for service in SERVICES:
r = requests.get(service('redirect', '3'), allow_redirects=False)
- self.assertEquals(r.status_code, 302)
- self.assertEquals(len(r.history), 0)
+ self.assertEqual(r.status_code, 302)
+ self.assertEqual(len(r.history), 0)
def test_HEAD_no_redirect(self):
@@ -417,8 +457,8 @@ class RequestsTestSuite(unittest.TestCase):
for service in SERVICES:
r = requests.head(service('redirect', '3'), allow_redirects=False)
- self.assertEquals(r.status_code, 302)
- self.assertEquals(len(r.history), 0)
+ self.assertEqual(r.status_code, 302)
+ self.assertEqual(len(r.history), 0)
def test_redirect_history(self):
@@ -426,8 +466,8 @@ class RequestsTestSuite(unittest.TestCase):
for service in SERVICES:
r = requests.get(service('redirect', '3'))
- self.assertEquals(r.status_code, 200)
- self.assertEquals(len(r.history), 3)
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(len(r.history), 3)
def test_relative_redirect_history(self):
@@ -435,8 +475,8 @@ class RequestsTestSuite(unittest.TestCase):
for service in SERVICES:
r = requests.get(service('relative-redirect', '3'))
- self.assertEquals(r.status_code, 200)
- self.assertEquals(len(r.history), 3)
+ self.assertEqual(r.status_code, 200)
+ self.assertEqual(len(r.history), 3)
def test_session_HTTP_200_OK_GET(self):
@@ -455,17 +495,76 @@ class RequestsTestSuite(unittest.TestCase):
# Make 2 requests from Session object, should send header both times
r1 = s.get(httpbin('user-agent'))
- assert heads['User-agent'] in r1.content
+ assert heads['User-agent'] in r1.text
r2 = s.get(httpbin('user-agent'))
- assert heads['User-agent'] in r2.content
+ assert heads['User-agent'] in r2.text
new_heads = {'User-agent': 'blah'}
r3 = s.get(httpbin('user-agent'), headers=new_heads)
- assert new_heads['User-agent'] in r3.content
+ assert new_heads['User-agent'] in r3.text
self.assertEqual(r2.status_code, 200)
+ def test_single_hook(self):
+
+ def add_foo_header(args):
+ if not args.get('headers'):
+ args['headers'] = {}
+
+ args['headers'].update({
+ 'X-Foo': 'foo'
+ })
+
+ return args
+
+ for service in SERVICES:
+ url = service('headers')
+
+ response = requests.get(
+ url = url,
+ hooks = {
+ 'args': add_foo_header
+ }
+ )
+
+ assert 'foo' in response.text
+
+ def test_multiple_hooks(self):
+
+ def add_foo_header(args):
+ if not args.get('headers'):
+ args['headers'] = {}
+
+ args['headers'].update({
+ 'X-Foo': 'foo'
+ })
+
+ return args
+
+ def add_bar_header(args):
+ if not args.get('headers'):
+ args['headers'] = {}
+
+ args['headers'].update({
+ 'X-Bar': 'bar'
+ })
+
+ return args
+
+ for service in SERVICES:
+ url = service('headers')
+
+ response = requests.get(
+ url = url,
+ hooks = {
+ 'args': [add_foo_header, add_bar_header]
+ }
+ )
+
+ assert 'foo' in response.text
+ assert 'bar' in response.text
+
def test_session_persistent_cookies(self):
s = requests.session()
@@ -476,35 +575,35 @@ class RequestsTestSuite(unittest.TestCase):
r = s.get(httpbin('cookies'))
# Those cookies persist transparently.
- c = json.loads(r.content).get('cookies')
+ c = json.loads(r.text).get('cookies')
assert c == _c
# Double check.
r = s.get(httpbin('cookies'), cookies={})
- c = json.loads(r.content).get('cookies')
+ c = json.loads(r.text).get('cookies')
assert c == _c
# Remove a cookie by setting it's value to None.
r = s.get(httpbin('cookies'), cookies={'bessie': None})
- c = json.loads(r.content).get('cookies')
+ c = json.loads(r.text).get('cookies')
del _c['bessie']
assert c == _c
# Test session-level cookies.
s = requests.session(cookies=_c)
r = s.get(httpbin('cookies'))
- c = json.loads(r.content).get('cookies')
+ c = json.loads(r.text).get('cookies')
assert c == _c
# Have the server set a cookie.
r = s.get(httpbin('cookies', 'set', 'k', 'v'), allow_redirects=True)
- c = json.loads(r.content).get('cookies')
+ c = json.loads(r.text).get('cookies')
assert 'k' in c
# And server-set cookie persistience.
r = s.get(httpbin('cookies'))
- c = json.loads(r.content).get('cookies')
+ c = json.loads(r.text).get('cookies')
assert 'k' in c
@@ -519,23 +618,23 @@ class RequestsTestSuite(unittest.TestCase):
# Make 2 requests from Session object, should send header both times
r1 = s.get(httpbin('get'))
- assert params['a'] in r1.content
+ assert params['a'] in r1.text
params2 = {'b': 'b_test'}
r2 = s.get(httpbin('get'), params=params2)
- assert params['a'] in r2.content
- assert params2['b'] in r2.content
+ assert params['a'] in r2.text
+ assert params2['b'] in r2.text
params3 = {'b': 'b_test', 'a': None, 'c': 'c_test'}
r3 = s.get(httpbin('get'), params=params3)
- assert not params['a'] in r3.content
- assert params3['b'] in r3.content
- assert params3['c'] in r3.content
+ assert not params['a'] in r3.text
+ assert params3['b'] in r3.text
+ assert params3['c'] in r3.text
def test_invalid_content(self):
# WARNING: if you're using a terrible DNS provider (comcast),
@@ -553,6 +652,70 @@ class RequestsTestSuite(unittest.TestCase):
r = requests.get(hah, allow_redirects=False, config=config)
assert r.content == None
+ def test_cached_response(self):
+
+ r1 = requests.get(httpbin('get'), prefetch=False)
+ assert not r1._content
+ assert r1.content
+ assert r1.text
+
+ r2 = requests.get(httpbin('get'), prefetch=True)
+ assert r2._content
+ assert r2.content
+ assert r2.text
+
+ def test_iter_lines(self):
+
+ lines = (0, 2, 10, 100)
+
+ for i in lines:
+ r = requests.get(httpbin('stream', str(i)), prefetch=False)
+ lines = list(r.iter_lines())
+ len_lines = len(lines)
+
+ self.assertEqual(i, len_lines)
+
+ # Test 'dangling' fragment in responses that do not terminate in
+ # a newline.
+ quote = (
+ '''Why will he not upon our fair request\n'''
+ '''Untent his person and share the air with us?'''
+ )
+
+ # Make a request and monkey-patch its contents
+ r = requests.get(httpbin('get'))
+ r.raw = StringIO(quote)
+
+ # Make sure iter_lines doesn't chop the trailing bit
+ lines = '\n'.join(r.iter_lines())
+ self.assertEqual(lines, quote)
+
+ def test_safe_mode(self):
+
+ safe = requests.session(config=dict(safe_mode=True))
+
+ # Safe mode creates empty responses for failed requests.
+ # Iterating on these responses should produce empty sequences
+ r = safe.get('http://_/')
+ self.assertEqual(list(r.iter_lines()), [])
+ assert isinstance(r.error, requests.exceptions.ConnectionError)
+
+ r = safe.get('http://_/')
+ self.assertEqual(list(r.iter_content()), [])
+ assert isinstance(r.error, requests.exceptions.ConnectionError)
+
+ # When not in safe mode, should raise Timeout exception
+ self.assertRaises(
+ requests.exceptions.Timeout,
+ requests.get,
+ httpbin('stream', '1000'), timeout=0.0001)
+
+ # In safe mode, should return a blank response
+ r = requests.get(httpbin('stream', '1000'), timeout=0.0001,
+ config=dict(safe_mode=True))
+ assert r.content is None
+ assert isinstance(r.error, requests.exceptions.Timeout)
+
if __name__ == '__main__':
unittest.main()