aboutsummaryrefslogtreecommitdiff
diff options
context:
space:
mode:
authorSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:41:21 -0700
committerSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:41:21 -0700
commite75853fc04102c7f72f2e955b63f9692c472f64a (patch)
tree9847c33530102dcf8f42bba6a562a7df34d651e6
parent365ef510aa3581a79709673e078401724601fc71 (diff)
downloadpython-requests-e75853fc04102c7f72f2e955b63f9692c472f64a.tar
python-requests-e75853fc04102c7f72f2e955b63f9692c472f64a.tar.gz
Imported Upstream version 0.10.8
-rw-r--r--HISTORY.rst61
-rw-r--r--PKG-INFO118
-rw-r--r--README.rst54
-rw-r--r--requests.egg-info/PKG-INFO118
-rw-r--r--requests.egg-info/SOURCES.txt1
-rw-r--r--requests/__init__.py5
-rw-r--r--requests/api.py2
-rw-r--r--requests/async.py36
-rw-r--r--requests/auth.py4
-rw-r--r--requests/compat.py5
-rw-r--r--requests/defaults.py8
-rw-r--r--requests/exceptions.py3
-rw-r--r--requests/models.py232
-rw-r--r--requests/packages/oreos/monkeys.py5
-rw-r--r--requests/packages/oreos/structures.py10
-rw-r--r--requests/packages/urllib3/__init__.py2
-rw-r--r--requests/packages/urllib3/connectionpool.py55
-rw-r--r--requests/packages/urllib3/exceptions.py21
-rw-r--r--requests/packages/urllib3/poolmanager.py10
-rw-r--r--requests/packages/urllib3/response.py7
-rw-r--r--requests/sessions.py32
-rw-r--r--requests/utils.py140
-rwxr-xr-xsetup.py22
-rwxr-xr-xtest_requests.py721
24 files changed, 591 insertions, 1081 deletions
diff --git a/HISTORY.rst b/HISTORY.rst
index bf824bd..c05a71e 100644
--- a/HISTORY.rst
+++ b/HISTORY.rst
@@ -1,6 +1,59 @@
History
-------
+0.10.8 (2012-03-09)
++++++++++++++++++++
+
+* Generate chunked ValueError fix
+* Proxy configuration by environment variables
+* Simplification of iter_lines.
+* New `trust_env` configuration for disabling system/environment hints.
+* Suppress cookie errors.
+
+0.10.7 (2012-03-07)
++++++++++++++++++++
+
+* `encode_uri` = False
+
+0.10.6 (2012-02-25)
++++++++++++++++++++
+
+* Allow '=' in cookies.
+
+0.10.5 (2012-02-25)
++++++++++++++++++++
+
+* Response body with 0 content-length fix.
+* New async.imap.
+* Don't fail on netrc.
+
+
+0.10.4 (2012-02-20)
++++++++++++++++++++
+
+* Honor netrc.
+
+0.10.3 (2012-02-20)
++++++++++++++++++++
+
+* HEAD requests don't follow redirects anymore.
+* raise_for_status() doesn't raise for 3xx anymore.
+* Make Session objects picklable.
+* ValueError for invalid schema URLs.
+
+0.10.2 (2012-01-15)
++++++++++++++++++++
+
+* Vastly improved URL quoting.
+* Additional allowed cookie key values.
+* Attempted fix for "Too many open files" Error
+* Replace unicode errors on first pass, no need for second pass.
+* Append '/' to bare-domain urls before query insertion.
+* Exceptions now inherit from RuntimeError.
+* Binary uploads + auth fix.
+* Bugfixes.
+
+
0.10.1 (2012-01-23)
+++++++++++++++++++
@@ -27,7 +80,7 @@ History
0.9.2 (2012-01-18)
++++++++++++++++++
-* Asyncronous async.send method.
+* Asynchronous async.send method.
* Support for proper chunk streams with boundaries.
* session argument for Session classes.
* Print entire hook tracebacks, not just exception instance.
@@ -41,7 +94,7 @@ History
++++++++++++++++++
* danger_mode for automatic Response.raise_for_status()
-* Response.iter_lines refator
+* Response.iter_lines refactor
0.9.0 (2011-12-28)
++++++++++++++++++
@@ -98,7 +151,7 @@ History
0.8.2 (2011-11-19)
++++++++++++++++++
-* New unicode decoding system, based on overridable `Response.encoding`.
+* New Unicode decoding system, based on over-ridable `Response.encoding`.
* Proper URL slash-quote handling.
* Cookies with ``[``, ``]``, and ``_`` allowed.
@@ -138,7 +191,7 @@ History
0.7.4 (2011-10-26)
++++++++++++++++++
-* Sesion Hooks fix.
+* Session Hooks fix.
0.7.3 (2011-10-23)
++++++++++++++++++
diff --git a/PKG-INFO b/PKG-INFO
index 5943191..677b300 100644
--- a/PKG-INFO
+++ b/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.0
Name: requests
-Version: 0.10.1
+Version: 0.10.8
Summary: Python HTTP for Humans.
Home-page: http://python-requests.org
Author: Kenneth Reitz
@@ -9,6 +9,8 @@ License: ISC
Description: Requests: HTTP for Humans
=========================
+ .. image:: https://secure.travis-ci.org/kennethreitz/requests.png?branch=develop
+
Requests is an ISC Licensed HTTP library, written in Python, for human
beings.
@@ -27,7 +29,7 @@ Description: Requests: HTTP for Humans
204
>>> r.headers['content-type']
'application/json'
- >>> r.content
+ >>> r.text
...
See `the same code, without Requests <https://gist.github.com/973705>`_.
@@ -43,46 +45,16 @@ Description: Requests: HTTP for Humans
Features
--------
- - Browser standard SSL verification.
- - Extremely simple HEAD, GET, POST, PUT, PATCH, DELETE Requests.
- - Gevent support for Asyncronous Requests.
- - Sessions with cookie persistence.
- - Basic, Digest, and Custom Authentication support.
- - Automatic form-encoding of dictionaries
- - A simple dictionary interface for request/response cookies.
- - Multipart file uploads.
- - Automatc decoding of Unicode, gzip, and deflate responses.
- - Full support for unicode URLs and domain names.
-
-
- Usage
- -----
-
- It couldn't be simpler::
-
- >>> import requests
- >>> r = requests.get('http://google.com')
-
-
- HTTPS? Basic Authentication? ::
-
- >>> r = requests.get('https://httpbin.ep.io/basic-auth/user/pass')
- >>> r.status_code
- 401
-
-
- Uh oh, we're not authorized! Let's add authentication. ::
-
- >>> r = requests.get('https://httpbin.ep.io/basic-auth/user/pass', auth=('user', 'pass'))
-
- >>> r.status_code
- 200
-
- >>> r.headers['content-type']
- 'application/json'
-
- >>> r.content
- '{"authenticated": true, "user": "user"}'
+ - International Domains and URLs
+ - Keep-Alive & Connection Pooling
+ - Sessions with Cookie Persistence
+ - Browser-style SSL Verification
+ - Basic/Digest Authentication
+ - Elegant Key/Value Cookies
+ - Automatic Decompression
+ - Unicode Response Bodies
+ - Multipart File Uploads
+ - Connection Timeouts
Installation
@@ -115,6 +87,59 @@ Description: Requests: HTTP for Humans
History
-------
+ 0.10.8 (2012-03-09)
+ +++++++++++++++++++
+
+ * Generate chunked ValueError fix
+ * Proxy configuration by environment variables
+ * Simplification of iter_lines.
+ * New `trust_env` configuration for disabling system/environment hints.
+ * Suppress cookie errors.
+
+ 0.10.7 (2012-03-07)
+ +++++++++++++++++++
+
+ * `encode_uri` = False
+
+ 0.10.6 (2012-02-25)
+ +++++++++++++++++++
+
+ * Allow '=' in cookies.
+
+ 0.10.5 (2012-02-25)
+ +++++++++++++++++++
+
+ * Response body with 0 content-length fix.
+ * New async.imap.
+ * Don't fail on netrc.
+
+
+ 0.10.4 (2012-02-20)
+ +++++++++++++++++++
+
+ * Honor netrc.
+
+ 0.10.3 (2012-02-20)
+ +++++++++++++++++++
+
+ * HEAD requests don't follow redirects anymore.
+ * raise_for_status() doesn't raise for 3xx anymore.
+ * Make Session objects picklable.
+ * ValueError for invalid schema URLs.
+
+ 0.10.2 (2012-01-15)
+ +++++++++++++++++++
+
+ * Vastly improved URL quoting.
+ * Additional allowed cookie key values.
+ * Attempted fix for "Too many open files" Error
+ * Replace unicode errors on first pass, no need for second pass.
+ * Append '/' to bare-domain urls before query insertion.
+ * Exceptions now inherit from RuntimeError.
+ * Binary uploads + auth fix.
+ * Bugfixes.
+
+
0.10.1 (2012-01-23)
+++++++++++++++++++
@@ -141,7 +166,7 @@ Description: Requests: HTTP for Humans
0.9.2 (2012-01-18)
++++++++++++++++++
- * Asyncronous async.send method.
+ * Asynchronous async.send method.
* Support for proper chunk streams with boundaries.
* session argument for Session classes.
* Print entire hook tracebacks, not just exception instance.
@@ -155,7 +180,7 @@ Description: Requests: HTTP for Humans
++++++++++++++++++
* danger_mode for automatic Response.raise_for_status()
- * Response.iter_lines refator
+ * Response.iter_lines refactor
0.9.0 (2011-12-28)
++++++++++++++++++
@@ -212,7 +237,7 @@ Description: Requests: HTTP for Humans
0.8.2 (2011-11-19)
++++++++++++++++++
- * New unicode decoding system, based on overridable `Response.encoding`.
+ * New Unicode decoding system, based on over-ridable `Response.encoding`.
* Proper URL slash-quote handling.
* Cookies with ``[``, ``]``, and ``_`` allowed.
@@ -252,7 +277,7 @@ Description: Requests: HTTP for Humans
0.7.4 (2011-10-26)
++++++++++++++++++
- * Sesion Hooks fix.
+ * Session Hooks fix.
0.7.3 (2011-10-23)
++++++++++++++++++
@@ -485,5 +510,6 @@ Classifier: License :: OSI Approved :: ISC License (ISCL)
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.0
Classifier: Programming Language :: Python :: 3.1
diff --git a/README.rst b/README.rst
index bc6c291..ad88b42 100644
--- a/README.rst
+++ b/README.rst
@@ -1,6 +1,8 @@
Requests: HTTP for Humans
=========================
+.. image:: https://secure.travis-ci.org/kennethreitz/requests.png?branch=develop
+
Requests is an ISC Licensed HTTP library, written in Python, for human
beings.
@@ -19,7 +21,7 @@ Things shouldn't be this way. Not in Python.
204
>>> r.headers['content-type']
'application/json'
- >>> r.content
+ >>> r.text
...
See `the same code, without Requests <https://gist.github.com/973705>`_.
@@ -35,46 +37,16 @@ hacks for you.
Features
--------
-- Browser standard SSL verification.
-- Extremely simple HEAD, GET, POST, PUT, PATCH, DELETE Requests.
-- Gevent support for Asyncronous Requests.
-- Sessions with cookie persistence.
-- Basic, Digest, and Custom Authentication support.
-- Automatic form-encoding of dictionaries
-- A simple dictionary interface for request/response cookies.
-- Multipart file uploads.
-- Automatc decoding of Unicode, gzip, and deflate responses.
-- Full support for unicode URLs and domain names.
-
-
-Usage
------
-
-It couldn't be simpler::
-
- >>> import requests
- >>> r = requests.get('http://google.com')
-
-
-HTTPS? Basic Authentication? ::
-
- >>> r = requests.get('https://httpbin.ep.io/basic-auth/user/pass')
- >>> r.status_code
- 401
-
-
-Uh oh, we're not authorized! Let's add authentication. ::
-
- >>> r = requests.get('https://httpbin.ep.io/basic-auth/user/pass', auth=('user', 'pass'))
-
- >>> r.status_code
- 200
-
- >>> r.headers['content-type']
- 'application/json'
-
- >>> r.content
- '{"authenticated": true, "user": "user"}'
+- International Domains and URLs
+- Keep-Alive & Connection Pooling
+- Sessions with Cookie Persistence
+- Browser-style SSL Verification
+- Basic/Digest Authentication
+- Elegant Key/Value Cookies
+- Automatic Decompression
+- Unicode Response Bodies
+- Multipart File Uploads
+- Connection Timeouts
Installation
diff --git a/requests.egg-info/PKG-INFO b/requests.egg-info/PKG-INFO
index 5943191..677b300 100644
--- a/requests.egg-info/PKG-INFO
+++ b/requests.egg-info/PKG-INFO
@@ -1,6 +1,6 @@
Metadata-Version: 1.0
Name: requests
-Version: 0.10.1
+Version: 0.10.8
Summary: Python HTTP for Humans.
Home-page: http://python-requests.org
Author: Kenneth Reitz
@@ -9,6 +9,8 @@ License: ISC
Description: Requests: HTTP for Humans
=========================
+ .. image:: https://secure.travis-ci.org/kennethreitz/requests.png?branch=develop
+
Requests is an ISC Licensed HTTP library, written in Python, for human
beings.
@@ -27,7 +29,7 @@ Description: Requests: HTTP for Humans
204
>>> r.headers['content-type']
'application/json'
- >>> r.content
+ >>> r.text
...
See `the same code, without Requests <https://gist.github.com/973705>`_.
@@ -43,46 +45,16 @@ Description: Requests: HTTP for Humans
Features
--------
- - Browser standard SSL verification.
- - Extremely simple HEAD, GET, POST, PUT, PATCH, DELETE Requests.
- - Gevent support for Asyncronous Requests.
- - Sessions with cookie persistence.
- - Basic, Digest, and Custom Authentication support.
- - Automatic form-encoding of dictionaries
- - A simple dictionary interface for request/response cookies.
- - Multipart file uploads.
- - Automatc decoding of Unicode, gzip, and deflate responses.
- - Full support for unicode URLs and domain names.
-
-
- Usage
- -----
-
- It couldn't be simpler::
-
- >>> import requests
- >>> r = requests.get('http://google.com')
-
-
- HTTPS? Basic Authentication? ::
-
- >>> r = requests.get('https://httpbin.ep.io/basic-auth/user/pass')
- >>> r.status_code
- 401
-
-
- Uh oh, we're not authorized! Let's add authentication. ::
-
- >>> r = requests.get('https://httpbin.ep.io/basic-auth/user/pass', auth=('user', 'pass'))
-
- >>> r.status_code
- 200
-
- >>> r.headers['content-type']
- 'application/json'
-
- >>> r.content
- '{"authenticated": true, "user": "user"}'
+ - International Domains and URLs
+ - Keep-Alive & Connection Pooling
+ - Sessions with Cookie Persistence
+ - Browser-style SSL Verification
+ - Basic/Digest Authentication
+ - Elegant Key/Value Cookies
+ - Automatic Decompression
+ - Unicode Response Bodies
+ - Multipart File Uploads
+ - Connection Timeouts
Installation
@@ -115,6 +87,59 @@ Description: Requests: HTTP for Humans
History
-------
+ 0.10.8 (2012-03-09)
+ +++++++++++++++++++
+
+ * Generate chunked ValueError fix
+ * Proxy configuration by environment variables
+ * Simplification of iter_lines.
+ * New `trust_env` configuration for disabling system/environment hints.
+ * Suppress cookie errors.
+
+ 0.10.7 (2012-03-07)
+ +++++++++++++++++++
+
+ * `encode_uri` = False
+
+ 0.10.6 (2012-02-25)
+ +++++++++++++++++++
+
+ * Allow '=' in cookies.
+
+ 0.10.5 (2012-02-25)
+ +++++++++++++++++++
+
+ * Response body with 0 content-length fix.
+ * New async.imap.
+ * Don't fail on netrc.
+
+
+ 0.10.4 (2012-02-20)
+ +++++++++++++++++++
+
+ * Honor netrc.
+
+ 0.10.3 (2012-02-20)
+ +++++++++++++++++++
+
+ * HEAD requests don't follow redirects anymore.
+ * raise_for_status() doesn't raise for 3xx anymore.
+ * Make Session objects picklable.
+ * ValueError for invalid schema URLs.
+
+ 0.10.2 (2012-01-15)
+ +++++++++++++++++++
+
+ * Vastly improved URL quoting.
+ * Additional allowed cookie key values.
+ * Attempted fix for "Too many open files" Error
+ * Replace unicode errors on first pass, no need for second pass.
+ * Append '/' to bare-domain urls before query insertion.
+ * Exceptions now inherit from RuntimeError.
+ * Binary uploads + auth fix.
+ * Bugfixes.
+
+
0.10.1 (2012-01-23)
+++++++++++++++++++
@@ -141,7 +166,7 @@ Description: Requests: HTTP for Humans
0.9.2 (2012-01-18)
++++++++++++++++++
- * Asyncronous async.send method.
+ * Asynchronous async.send method.
* Support for proper chunk streams with boundaries.
* session argument for Session classes.
* Print entire hook tracebacks, not just exception instance.
@@ -155,7 +180,7 @@ Description: Requests: HTTP for Humans
++++++++++++++++++
* danger_mode for automatic Response.raise_for_status()
- * Response.iter_lines refator
+ * Response.iter_lines refactor
0.9.0 (2011-12-28)
++++++++++++++++++
@@ -212,7 +237,7 @@ Description: Requests: HTTP for Humans
0.8.2 (2011-11-19)
++++++++++++++++++
- * New unicode decoding system, based on overridable `Response.encoding`.
+ * New Unicode decoding system, based on over-ridable `Response.encoding`.
* Proper URL slash-quote handling.
* Cookies with ``[``, ``]``, and ``_`` allowed.
@@ -252,7 +277,7 @@ Description: Requests: HTTP for Humans
0.7.4 (2011-10-26)
++++++++++++++++++
- * Sesion Hooks fix.
+ * Session Hooks fix.
0.7.3 (2011-10-23)
++++++++++++++++++
@@ -485,5 +510,6 @@ Classifier: License :: OSI Approved :: ISC License (ISCL)
Classifier: Programming Language :: Python
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
+Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.0
Classifier: Programming Language :: Python :: 3.1
diff --git a/requests.egg-info/SOURCES.txt b/requests.egg-info/SOURCES.txt
index 804f541..40e762b 100644
--- a/requests.egg-info/SOURCES.txt
+++ b/requests.egg-info/SOURCES.txt
@@ -4,7 +4,6 @@ MANIFEST.in
NOTICE
README.rst
setup.py
-test_requests.py
requests/__init__.py
requests/api.py
requests/async.py
diff --git a/requests/__init__.py b/requests/__init__.py
index 48fb389..73d81f6 100644
--- a/requests/__init__.py
+++ b/requests/__init__.py
@@ -15,14 +15,13 @@ requests
"""
__title__ = 'requests'
-__version__ = '0.10.1'
-__build__ = 0x001001
+__version__ = '0.10.8'
+__build__ = 0x001008
__author__ = 'Kenneth Reitz'
__license__ = 'ISC'
__copyright__ = 'Copyright 2012 Kenneth Reitz'
-
from . import utils
from .models import Request, Response
from .api import request, get, head, post, patch, put, delete, options
diff --git a/requests/api.py b/requests/api.py
index b7d4158..b079eed 100644
--- a/requests/api.py
+++ b/requests/api.py
@@ -69,7 +69,7 @@ def head(url, **kwargs):
:param **kwargs: Optional arguments that ``request`` takes.
"""
- kwargs.setdefault('allow_redirects', True)
+ kwargs.setdefault('allow_redirects', False)
return request('head', url, **kwargs)
diff --git a/requests/async.py b/requests/async.py
index 9488447..f2dad69 100644
--- a/requests/async.py
+++ b/requests/async.py
@@ -23,7 +23,7 @@ from . import api
__all__ = (
- 'map',
+ 'map', 'imap',
'get', 'options', 'head', 'post', 'put', 'patch', 'delete', 'request'
)
@@ -46,15 +46,15 @@ def patched(f):
return wrapped
-def send(r, pool=None):
- """Sends the request object using the specified pool. If a pool isn't
+def send(r, pool=None, prefetch=False):
+ """Sends the request object using the specified pool. If a pool isn't
specified this method blocks. Pools are useful because you can specify size
and can hence limit concurrency."""
if pool != None:
- return pool.spawn(r.send)
+ return pool.spawn(r.send, prefetch=prefetch)
- return gevent.spawn(r.send)
+ return gevent.spawn(r.send, prefetch=prefetch)
# Patched requests.api functions.
@@ -79,10 +79,28 @@ def map(requests, prefetch=True, size=None):
requests = list(requests)
pool = Pool(size) if size else None
- jobs = [send(r, pool) for r in requests]
+ jobs = [send(r, pool, prefetch=prefetch) for r in requests]
gevent.joinall(jobs)
- if prefetch:
- [r.response.content for r in requests]
+ return [r.response for r in requests]
- return [r.response for r in requests] \ No newline at end of file
+
+def imap(requests, prefetch=True, size=2):
+ """Concurrently converts a generator object of Requests to
+ a generator of Responses.
+
+ :param requests: a generator of Request objects.
+ :param prefetch: If False, the content will not be downloaded immediately.
+ :param size: Specifies the number of requests to make at a time. default is 2
+ """
+
+ pool = Pool(size)
+
+ def send(r):
+ r.send(prefetch)
+ return r.response
+
+ for r in pool.imap_unordered(send, requests):
+ yield r
+
+ pool.join() \ No newline at end of file
diff --git a/requests/auth.py b/requests/auth.py
index 183731b..2e2bebc 100644
--- a/requests/auth.py
+++ b/requests/auth.py
@@ -7,8 +7,6 @@ requests.auth
This module contains the authentication handlers for Requests.
"""
-from __future__ import unicode_literals
-
import time
import hashlib
@@ -21,7 +19,7 @@ from .utils import randombytes, parse_dict_header
def _basic_auth_str(username, password):
"""Returns a Basic Auth string."""
- return 'Basic ' + b64encode(("%s:%s" % (username, password)).encode('utf-8')).strip().decode('utf-8')
+ return 'Basic ' + b64encode(('%s:%s' % (username, password)).encode('latin1')).strip().decode('latin1')
class AuthBase(object):
diff --git a/requests/compat.py b/requests/compat.py
index 224bfd0..fec7a01 100644
--- a/requests/compat.py
+++ b/requests/compat.py
@@ -86,8 +86,10 @@ if is_py2:
from .packages.oreos.monkeys import SimpleCookie
from StringIO import StringIO
- str = unicode
bytes = str
+ str = unicode
+ basestring = basestring
+
elif is_py3:
@@ -99,4 +101,5 @@ elif is_py3:
str = str
bytes = bytes
+ basestring = (str,bytes)
diff --git a/requests/defaults.py b/requests/defaults.py
index 424d373..9af9773 100644
--- a/requests/defaults.py
+++ b/requests/defaults.py
@@ -17,8 +17,12 @@ Configurations:
:safe_mode: If true, Requests will catch all errors.
:pool_maxsize: The maximium size of an HTTP connection pool.
:pool_connections: The number of active HTTP connection pools to use.
+:encode_uri: If true, URIs will automatically be percent-encoded.
+:trust_env: If true, the surrouding environment will be trusted (environ, netrc).
"""
+SCHEMAS = ['http', 'https']
+
from . import __version__
defaults = dict()
@@ -38,3 +42,7 @@ defaults['max_retries'] = 0
defaults['danger_mode'] = False
defaults['safe_mode'] = False
defaults['keep_alive'] = True
+defaults['encode_uri'] = True
+defaults['trust_env'] = True
+
+
diff --git a/requests/exceptions.py b/requests/exceptions.py
index c7b98e6..d5b2ab1 100644
--- a/requests/exceptions.py
+++ b/requests/exceptions.py
@@ -8,12 +8,13 @@ This module contains the set of Requests' exceptions.
"""
-class RequestException(Exception):
+class RequestException(RuntimeError):
"""There was an ambiguous exception that occurred while handling your
request."""
class HTTPError(RequestException):
"""An HTTP error occurred."""
+ response = None
class ConnectionError(RequestException):
"""A Connection error occurred."""
diff --git a/requests/models.py b/requests/models.py
index c200896..753e83a 100644
--- a/requests/models.py
+++ b/requests/models.py
@@ -21,14 +21,16 @@ from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
from .packages.urllib3 import connectionpool, poolmanager
from .packages.urllib3.filepost import encode_multipart_formdata
+from .defaults import SCHEMAS
from .exceptions import (
ConnectionError, HTTPError, RequestException, Timeout, TooManyRedirects,
URLRequired, SSLError)
from .utils import (
- get_encoding_from_headers, stream_decode_response_unicode,
- stream_decompress, guess_filename, requote_path, dict_from_string)
-
-from .compat import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, str, bytes, SimpleCookie, is_py3, is_py2
+ get_encoding_from_headers, stream_untransfer, guess_filename, requote_uri,
+ dict_from_string, stream_decode_response_unicode, get_netrc_auth)
+from .compat import (
+ urlparse, urlunparse, urljoin, urlsplit, urlencode, str, bytes,
+ SimpleCookie, is_py2)
# Import chardet if it is available.
try:
@@ -39,7 +41,6 @@ except ImportError:
REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
-
class Request(object):
"""The :class:`Request <Request>` object. It carries out all functionality of
Requests. Recommended interface is with the Requests functions.
@@ -64,16 +65,14 @@ class Request(object):
verify=None,
session=None):
+ #: Dictionary of configurations for this request.
+ self.config = dict(config or [])
+
#: Float describes the timeout of the request.
# (Use socket.setdefaulttimeout() as fallback)
self.timeout = timeout
#: Request URL.
-
- # if isinstance(url, str):
- # url = url.encode('utf-8')
- # print(dir(url))
-
self.url = url
#: Dictionary of HTTP Headers to attach to the :class:`Request <Request>`.
@@ -103,6 +102,14 @@ class Request(object):
# Dictionary mapping protocol to the URL of the proxy (e.g. {'http': 'foo.bar:3128'})
self.proxies = dict(proxies or [])
+ # If no proxies are given, allow configuration by environment variables
+ # HTTP_PROXY and HTTPS_PROXY.
+ if not self.proxies and self.config.get('trust_env'):
+ if 'HTTP_PROXY' in os.environ:
+ self.proxies['http'] = os.environ['HTTP_PROXY']
+ if 'HTTPS_PROXY' in os.environ:
+ self.proxies['https'] = os.environ['HTTPS_PROXY']
+
self.data, self._enc_data = self._encode_params(data)
self.params, self._enc_params = self._encode_params(params)
@@ -116,9 +123,6 @@ class Request(object):
#: CookieJar to attach to :class:`Request <Request>`.
self.cookies = dict(cookies or [])
- #: Dictionary of configurations for this request.
- self.config = dict(config or [])
-
#: True if Request has been sent.
self.sent = False
@@ -152,15 +156,9 @@ class Request(object):
self.headers = headers
self._poolmanager = _poolmanager
- # Pre-request hook.
- r = dispatch_hook('pre_request', hooks, self)
- self.__dict__.update(r.__dict__)
-
-
def __repr__(self):
return '<Request [%s]>' % (self.method)
-
def _build_response(self, resp):
"""Build internal :class:`Response <Response>` object
from given response.
@@ -200,26 +198,31 @@ class Request(object):
# Save original response for later.
response.raw = resp
- response.url = self.full_url
+ if isinstance(self.full_url, bytes):
+ response.url = self.full_url.decode('utf-8')
+ else:
+ response.url = self.full_url
return response
history = []
r = build(resp)
- cookies = self.cookies
+
self.cookies.update(r.cookies)
if r.status_code in REDIRECT_STATI and not self.redirect:
+ while (('location' in r.headers) and
+ ((r.status_code is codes.see_other) or (self.allow_redirects))):
- while (
- ('location' in r.headers) and
- ((r.status_code is codes.see_other) or (self.allow_redirects))
- ):
+ r.content # Consume socket so it can be released
if not len(history) < self.config.get('max_redirects'):
raise TooManyRedirects()
+ # Release the connection back into the pool.
+ r.raw.release_conn()
+
history.append(r)
url = r.headers['location']
@@ -232,7 +235,10 @@ class Request(object):
# Facilitate non-RFC2616-compliant 'location' headers
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
if not urlparse(url).netloc:
- url = urljoin(r.url, url)
+ url = urljoin(r.url,
+ # Compliant with RFC3986, we percent
+ # encode the url.
+ requote_uri(url))
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
if r.status_code is codes.see_other:
@@ -254,18 +260,17 @@ class Request(object):
method=method,
params=self.session.params,
auth=self.auth,
- cookies=cookies,
+ cookies=self.cookies,
redirect=True,
config=self.config,
timeout=self.timeout,
_poolmanager=self._poolmanager,
- proxies = self.proxies,
- verify = self.verify,
- session = self.session
+ proxies=self.proxies,
+ verify=self.verify,
+ session=self.session
)
request.send()
- cookies.update(request.response.cookies)
r = request.response
self.cookies.update(r.cookies)
@@ -275,7 +280,6 @@ class Request(object):
self.response.request = self
self.response.cookies.update(self.cookies)
-
@staticmethod
def _encode_params(data):
"""Encode parameters in a piece of data.
@@ -288,10 +292,12 @@ class Request(object):
returns it twice.
"""
+ if isinstance(data, bytes):
+ return data, data
+
if hasattr(data, '__iter__') and not isinstance(data, str):
data = dict(data)
-
if hasattr(data, 'items'):
result = []
for k, vs in list(data.items()):
@@ -314,30 +320,44 @@ class Request(object):
# Support for unicode domain names and paths.
scheme, netloc, path, params, query, fragment = urlparse(url)
-
if not scheme:
raise ValueError("Invalid URL %r: No schema supplied" % url)
+ if not scheme in SCHEMAS:
+ raise ValueError("Invalid scheme %r" % scheme)
+
netloc = netloc.encode('idna').decode('utf-8')
+ if not path:
+ path = '/'
+
+
if is_py2:
+ if isinstance(scheme, str):
+ scheme = scheme.encode('utf-8')
+ if isinstance(netloc, str):
+ netloc = netloc.encode('utf-8')
if isinstance(path, str):
path = path.encode('utf-8')
+ if isinstance(params, str):
+ params = params.encode('utf-8')
+ if isinstance(query, str):
+ query = query.encode('utf-8')
+ if isinstance(fragment, str):
+ fragment = fragment.encode('utf-8')
- path = requote_path(path)
-
- # print([ scheme, netloc, path, params, query, fragment ])
- # print('---------------------')
-
- url = (urlunparse([ scheme, netloc, path, params, query, fragment ]))
+ url = (urlunparse([scheme, netloc, path, params, query, fragment]))
if self._enc_params:
if urlparse(url).query:
- return '%s&%s' % (url, self._enc_params)
+ url = '%s&%s' % (url, self._enc_params)
else:
- return '%s?%s' % (url, self._enc_params)
- else:
- return url
+ url = '%s?%s' % (url, self._enc_params)
+
+ if self.config.get('encode_uri', True):
+ url = requote_uri(url)
+
+ return url
@property
def path_url(self):
@@ -355,9 +375,6 @@ class Request(object):
if not path:
path = '/'
- # if is_py3:
- path = quote(path.encode('utf-8'))
-
url.append(path)
query = p.query
@@ -365,19 +382,15 @@ class Request(object):
url.append('?')
url.append(query)
- # print(url)
-
return ''.join(url)
-
def register_hook(self, event, hook):
"""Properly register a hook."""
return self.hooks[event].append(hook)
-
def send(self, anyway=False, prefetch=False):
- """Sends the request. Returns True of successful, false if not.
+ """Sends the request. Returns True of successful, False if not.
If there was an HTTPError during transmission,
self.response.status_code will contain the HTTPError code.
@@ -435,6 +448,10 @@ class Request(object):
if (content_type) and (not 'content-type' in self.headers):
self.headers['Content-Type'] = content_type
+ # Use .netrc auth if none was provided.
+ if not self.auth and self.config.get('trust_env'):
+ self.auth = get_netrc_auth(url)
+
if self.auth:
if isinstance(self.auth, tuple) and len(self.auth) == 2:
# special-case basic HTTP auth
@@ -472,13 +489,12 @@ class Request(object):
if self.verify is not True:
cert_loc = self.verify
-
# Look for configuration.
- if not cert_loc:
+ if not cert_loc and self.config.get('trust_env'):
cert_loc = os.environ.get('REQUESTS_CA_BUNDLE')
# Curl compatiblity.
- if not cert_loc:
+ if not cert_loc and self.config.get('trust_env'):
cert_loc = os.environ.get('CURL_CA_BUNDLE')
# Use the awesome certifi list.
@@ -509,6 +525,10 @@ class Request(object):
# Attach Cookie header to request.
self.headers['Cookie'] = cookie_header
+ # Pre-request hook.
+ r = dispatch_hook('pre_request', self.hooks, self)
+ self.__dict__.update(r.__dict__)
+
try:
# The inner try .. except re-raises certain exceptions as
# internal exception types; the outer suppresses exceptions
@@ -523,7 +543,7 @@ class Request(object):
redirect=False,
assert_same_host=False,
preload_content=False,
- decode_content=True,
+ decode_content=False,
retries=self.config.get('max_retries', 0),
timeout=self.timeout,
)
@@ -613,7 +633,6 @@ class Response(object):
#: Dictionary of configurations for this request.
self.config = {}
-
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
@@ -633,7 +652,6 @@ class Response(object):
return False
return True
-
def iter_content(self, chunk_size=10 * 1024, decode_unicode=False):
"""Iterates over the response data. This avoids reading the content
at once into memory for large responses. The chunk size is the number
@@ -660,74 +678,64 @@ class Response(object):
pending_bytes = resp.chunk_left
while pending_bytes:
chunk = fp.read(min(chunk_size, pending_bytes))
- pending_bytes-=len(chunk)
+ pending_bytes -= len(chunk)
yield chunk
- fp.read(2) # throw away crlf
+ fp.read(2) # throw away crlf
while 1:
#XXX correct line size? (httplib has 64kb, seems insane)
pending_bytes = fp.readline(40).strip()
+ if not len(pending_bytes):
+ # No content, like a HEAD request. Break out.
+ break
pending_bytes = int(pending_bytes, 16)
if pending_bytes == 0:
break
while pending_bytes:
chunk = fp.read(min(chunk_size, pending_bytes))
- pending_bytes-=len(chunk)
+ pending_bytes -= len(chunk)
yield chunk
- fp.read(2) # throw away crlf
+ fp.read(2) # throw away crlf
self._content_consumed = True
fp.close()
-
if getattr(getattr(self.raw, '_original_response', None), 'chunked', False):
gen = generate_chunked()
else:
gen = generate()
- if 'gzip' in self.headers.get('content-encoding', ''):
- gen = stream_decompress(gen, mode='gzip')
- elif 'deflate' in self.headers.get('content-encoding', ''):
- gen = stream_decompress(gen, mode='deflate')
+ gen = stream_untransfer(gen, self)
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
return gen
-
def iter_lines(self, chunk_size=10 * 1024, decode_unicode=None):
"""Iterates over the response data, one line at a time. This
avoids reading the content at once into memory for large
responses.
"""
- #TODO: why rstrip by default
pending = None
- for chunk in self.iter_content(chunk_size, decode_unicode=decode_unicode):
+ for chunk in self.iter_content(
+ chunk_size=chunk_size,
+ decode_unicode=decode_unicode):
if pending is not None:
chunk = pending + chunk
- lines = chunk.splitlines(True)
+ lines = chunk.splitlines()
- for line in lines[:-1]:
- yield line.rstrip()
-
- # Save the last part of the chunk for next iteration, to keep full line together
- # lines may be empty for the last chunk of a chunked response
-
- if lines:
- pending = lines[-1]
- #if pending is a complete line, give it baack
- if pending[-1] == '\n':
- yield pending.rstrip()
- pending = None
+ if lines[-1][-1] == chunk[-1]:
+ pending = lines.pop()
else:
pending = None
- # Yield the last line
- if pending is not None:
- yield pending.rstrip()
+ for line in lines:
+ yield line
+ if pending is not None:
+ yield pending
@property
def content(self):
@@ -740,13 +748,26 @@ class Response(object):
raise RuntimeError(
'The content for this response was already consumed')
- self._content = self.raw.read()
+ if self.status_code is 0:
+ self._content = None
+ else:
+ self._content = bytes().join(self.iter_content()) or bytes()
+
except AttributeError:
self._content = None
self._content_consumed = True
return self._content
+ def _detected_encoding(self):
+ try:
+ detected = chardet.detect(self.content) or {}
+ return detected.get('encoding')
+
+ # Trust that chardet isn't available or something went terribly wrong.
+ except Exception:
+ pass
+
@property
def text(self):
@@ -762,43 +783,34 @@ class Response(object):
# Fallback to auto-detected encoding if chardet is available.
if self.encoding is None:
- try:
- detected = chardet.detect(self.content) or {}
- encoding = detected.get('encoding')
-
- # Trust that chardet isn't available or something went terribly wrong.
- except Exception:
- pass
+ encoding = self._detected_encoding()
# Decode unicode from given encoding.
try:
- content = str(self.content, encoding)
+ content = str(self.content, encoding, errors='replace')
except (UnicodeError, TypeError):
pass
- # Try to fall back:
- if not content:
- try:
- content = str(content, encoding, errors='replace')
- except (UnicodeError, TypeError):
- pass
-
return content
-
- def raise_for_status(self):
+ def raise_for_status(self, allow_redirects=True):
"""Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred."""
if self.error:
raise self.error
- if (self.status_code >= 300) and (self.status_code < 400):
- raise HTTPError('%s Redirection' % self.status_code)
+ if (self.status_code >= 300) and (self.status_code < 400) and not allow_redirects:
+ http_error = HTTPError('%s Redirection' % self.status_code)
+ http_error.response = self
+ raise http_error
elif (self.status_code >= 400) and (self.status_code < 500):
- raise HTTPError('%s Client Error' % self.status_code)
-
- elif (self.status_code >= 500) and (self.status_code < 600):
- raise HTTPError('%s Server Error' % self.status_code)
+ http_error = HTTPError('%s Client Error' % self.status_code)
+ http_error.response = self
+ raise http_error
+ elif (self.status_code >= 500) and (self.status_code < 600):
+ http_error = HTTPError('%s Server Error' % self.status_code)
+ http_error.response = self
+ raise http_error
diff --git a/requests/packages/oreos/monkeys.py b/requests/packages/oreos/monkeys.py
index 2269e30..72ce68d 100644
--- a/requests/packages/oreos/monkeys.py
+++ b/requests/packages/oreos/monkeys.py
@@ -249,10 +249,13 @@ class CookieError(Exception):
# quoted with a preceeding '\' slash.
#
# These are taken from RFC2068 and RFC2109.
+# _RFC2965Forbidden is the list of forbidden chars we accept anyway
# _LegalChars is the list of chars which don't require "'s
# _Translator hash-table for fast quoting
#
-_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~[]_"
+_RFC2965Forbidden = "[]:{}="
+_LegalChars = ( string.ascii_letters + string.digits +
+ "!#$%&'*+-.^_`|~_" + _RFC2965Forbidden )
_Translator = {
'\000' : '\\000', '\001' : '\\001', '\002' : '\\002',
'\003' : '\\003', '\004' : '\\004', '\005' : '\\005',
diff --git a/requests/packages/oreos/structures.py b/requests/packages/oreos/structures.py
index 063d5f9..8329277 100644
--- a/requests/packages/oreos/structures.py
+++ b/requests/packages/oreos/structures.py
@@ -1,8 +1,8 @@
# -*- coding: utf-8 -*-
"""
-oreos.sructures
-~~~~~~~~~~~~~~~
+oreos.structures
+~~~~~~~~~~~~~~~~
The plastic blue packaging.
@@ -362,7 +362,7 @@ class MultiDict(TypeConversionDict):
"""
try:
return dict.pop(self, key)[0]
- except KeyError, e:
+ except KeyError as e:
if default is not _missing:
return default
raise KeyError(str(e))
@@ -372,7 +372,7 @@ class MultiDict(TypeConversionDict):
try:
item = dict.popitem(self)
return (item[0], item[1][0])
- except KeyError, e:
+ except KeyError as e:
raise KeyError(str(e))
def poplist(self, key):
@@ -389,7 +389,7 @@ class MultiDict(TypeConversionDict):
"""Pop a ``(key, list)`` tuple from the dict."""
try:
return dict.popitem(self)
- except KeyError, e:
+ except KeyError as e:
raise KeyError(str(e))
def __copy__(self):
diff --git a/requests/packages/urllib3/__init__.py b/requests/packages/urllib3/__init__.py
index 5f70c56..2e9c663 100644
--- a/requests/packages/urllib3/__init__.py
+++ b/requests/packages/urllib3/__init__.py
@@ -10,7 +10,7 @@ urllib3 - Thread-safe connection pooling and re-using.
__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
__license__ = 'MIT'
-__version__ = '1.1'
+__version__ = '1.2.2'
from .connectionpool import (
diff --git a/requests/packages/urllib3/connectionpool.py b/requests/packages/urllib3/connectionpool.py
index 52b1802..39e652e 100644
--- a/requests/packages/urllib3/connectionpool.py
+++ b/requests/packages/urllib3/connectionpool.py
@@ -7,6 +7,7 @@
import logging
import socket
+from base64 import b64encode
from socket import error as SocketError, timeout as SocketTimeout
try:
@@ -16,33 +17,45 @@ except ImportError: # Doesn't exist on OSX and other platforms
poll = False
try: # Python 3
- from http.client import HTTPConnection, HTTPSConnection, HTTPException
+ from http.client import HTTPConnection, HTTPException
from http.client import HTTP_PORT, HTTPS_PORT
except ImportError:
- from httplib import HTTPConnection, HTTPSConnection, HTTPException
+ from httplib import HTTPConnection, HTTPException
from httplib import HTTP_PORT, HTTPS_PORT
try: # Python 3
- from queue import Queue, Empty, Full
+ from queue import LifoQueue, Empty, Full
except ImportError:
- from Queue import Queue, Empty, Full
+ from Queue import LifoQueue, Empty, Full
+
try: # Compiled with SSL?
+ HTTPSConnection = object
+ BaseSSLError = None
+ ssl = None
+
+ try: # Python 3
+ from http.client import HTTPSConnection
+ except ImportError:
+ from httplib import HTTPSConnection
+
import ssl
BaseSSLError = ssl.SSLError
+
except ImportError:
- ssl = None
- BaseSSLError = None
+ pass
from .packages.ssl_match_hostname import match_hostname, CertificateError
from .request import RequestMethods
from .response import HTTPResponse
-from .exceptions import (SSLError,
+from .exceptions import (
+ EmptyPoolError,
+ HostChangedError,
+ LocationParseError,
MaxRetryError,
+ SSLError,
TimeoutError,
- HostChangedError,
- EmptyPoolError,
)
from .packages.ssl_match_hostname import match_hostname, CertificateError
@@ -103,6 +116,7 @@ class ConnectionPool(object):
"""
scheme = None
+ QueueCls = LifoQueue
def __init__(self, host, port=None):
self.host = host
@@ -156,11 +170,11 @@ class HTTPConnectionPool(ConnectionPool, RequestMethods):
def __init__(self, host, port=None, strict=False, timeout=None, maxsize=1,
block=False, headers=None):
- self.host = host
- self.port = port
+ super(HTTPConnectionPool, self).__init__(host, port)
+
self.strict = strict
self.timeout = timeout
- self.pool = Queue(maxsize)
+ self.pool = self.QueueCls(maxsize)
self.block = block
self.headers = headers or {}
@@ -468,7 +482,11 @@ class HTTPSConnectionPool(HTTPConnectionPool):
log.info("Starting new HTTPS connection (%d): %s"
% (self.num_connections, self.host))
- if not ssl:
+ if not ssl: # Platform-specific: Python compiled without +ssl
+ if not HTTPSConnection or HTTPSConnection is object:
+ raise SSLError("Can't connect to HTTPS URL because the SSL "
+ "module is not available.")
+
return HTTPSConnection(host=self.host, port=self.port)
connection = VerifiedHTTPSConnection(host=self.host, port=self.port)
@@ -526,7 +544,7 @@ def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
if basic_auth:
headers['authorization'] = 'Basic ' + \
- basic_auth.encode('base64').strip()
+ b64encode(six.b(basic_auth)).decode('utf-8')
return headers
@@ -542,10 +560,12 @@ def get_host(url):
>>> get_host('google.com:80')
('http', 'google.com', 80)
"""
+
# This code is actually similar to urlparse.urlsplit, but much
# simplified for our needs.
port = None
scheme = 'http'
+
if '://' in url:
scheme, url = url.split('://', 1)
if '/' in url:
@@ -554,7 +574,12 @@ def get_host(url):
_auth, url = url.split('@', 1)
if ':' in url:
url, port = url.split(':', 1)
+
+ if not port.isdigit():
+ raise LocationParseError("Failed to parse: %s")
+
port = int(port)
+
return scheme, url, port
@@ -592,7 +617,7 @@ def is_connection_dropped(conn):
:param conn:
``HTTPConnection`` object.
"""
- if not poll:
+ if not poll: # Platform-specific
return select([conn.sock], [], [], 0.0)[0]
# This version is better on platforms that support it.
diff --git a/requests/packages/urllib3/exceptions.py b/requests/packages/urllib3/exceptions.py
index 0bffeb4..15c9699 100644
--- a/requests/packages/urllib3/exceptions.py
+++ b/requests/packages/urllib3/exceptions.py
@@ -4,6 +4,7 @@
# This module is part of urllib3 and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
## Base Exceptions
class HTTPError(Exception):
@@ -27,18 +28,20 @@ class SSLError(HTTPError):
class MaxRetryError(PoolError):
"Raised when the maximum number of retries is exceeded."
+
def __init__(self, pool, url):
- PoolError.__init__(self, pool,
- "Max retries exceeded with url: %s" % url)
+ message = "Max retries exceeded with url: %s" % url
+ PoolError.__init__(self, pool, message)
self.url = url
class HostChangedError(PoolError):
"Raised when an existing pool gets a request for a foreign host."
+
def __init__(self, pool, url, retries=3):
- PoolError.__init__(self, pool,
- "Tried to open a foreign host with url: %s" % url)
+ message = "Tried to open a foreign host with url: %s" % url
+ PoolError.__init__(self, pool, message)
self.url = url
self.retries = retries
@@ -52,3 +55,13 @@ class TimeoutError(PoolError):
class EmptyPoolError(PoolError):
"Raised when a pool runs out of connections and no more are allowed."
pass
+
+
+class LocationParseError(ValueError, HTTPError):
+ "Raised when get_host or similar fails to parse the URL input."
+
+ def __init__(self, location):
+ message = "Failed to parse: %s" % location
+ super(LocationParseError, self).__init__(self, message)
+
+ self.location = location
diff --git a/requests/packages/urllib3/poolmanager.py b/requests/packages/urllib3/poolmanager.py
index f194b2e..d42f35b 100644
--- a/requests/packages/urllib3/poolmanager.py
+++ b/requests/packages/urllib3/poolmanager.py
@@ -117,9 +117,19 @@ class ProxyManager(RequestMethods):
def __init__(self, proxy_pool):
self.proxy_pool = proxy_pool
+ def _set_proxy_headers(self, headers=None):
+ headers = headers or {}
+
+ # Same headers are curl passes for --proxy1.0
+ headers['Accept'] = '*/*'
+ headers['Proxy-Connection'] = 'Keep-Alive'
+
+ return headers
+
def urlopen(self, method, url, **kw):
"Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
kw['assert_same_host'] = False
+ kw['headers'] = self._set_proxy_headers(kw.get('headers'))
return self.proxy_pool.urlopen(method, url, **kw)
diff --git a/requests/packages/urllib3/response.py b/requests/packages/urllib3/response.py
index e023970..4dd431e 100644
--- a/requests/packages/urllib3/response.py
+++ b/requests/packages/urllib3/response.py
@@ -11,12 +11,7 @@ import zlib
from io import BytesIO
from .exceptions import HTTPError
-
-
-try:
- basestring = basestring
-except NameError: # Python 3
- basestring = (str, bytes)
+from .packages.six import string_types as basestring
log = logging.getLogger(__name__)
diff --git a/requests/sessions.py b/requests/sessions.py
index d9683b0..87320d6 100644
--- a/requests/sessions.py
+++ b/requests/sessions.py
@@ -40,7 +40,7 @@ def merge_kwargs(local_kwarg, default_kwarg):
kwargs.update(local_kwarg)
# Remove keys that are set to None.
- for (k,v) in list(local_kwarg.items()):
+ for (k, v) in list(local_kwarg.items()):
if v is None:
del kwargs[k]
@@ -52,7 +52,7 @@ class Session(object):
__attrs__ = [
'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks',
- 'params', 'config']
+ 'params', 'config', 'verify']
def __init__(self,
@@ -64,6 +64,7 @@ class Session(object):
hooks=None,
params=None,
config=None,
+ prefetch=False,
verify=True):
self.headers = headers or {}
@@ -74,15 +75,13 @@ class Session(object):
self.hooks = hooks or {}
self.params = params or {}
self.config = config or {}
+ self.prefetch = prefetch
self.verify = verify
for (k, v) in list(defaults.items()):
self.config.setdefault(k, v)
- self.poolmanager = PoolManager(
- num_pools=self.config.get('pool_connections'),
- maxsize=self.config.get('pool_maxsize')
- )
+ self.init_poolmanager()
# Set up a CookieJar to be used by default
self.cookies = {}
@@ -91,6 +90,12 @@ class Session(object):
if cookies is not None:
self.cookies.update(cookies)
+ def init_poolmanager(self):
+ self.poolmanager = PoolManager(
+ num_pools=self.config.get('pool_connections'),
+ maxsize=self.config.get('pool_maxsize')
+ )
+
def __repr__(self):
return '<requests-client at 0x%x>' % (id(self))
@@ -145,9 +150,7 @@ class Session(object):
headers = {} if headers is None else headers
params = {} if params is None else params
hooks = {} if hooks is None else hooks
-
- if verify is None:
- verify = self.verify
+ prefetch = self.prefetch or prefetch
# use session's hooks as defaults
for key, cb in list(self.hooks.items()):
@@ -235,7 +238,7 @@ class Session(object):
:param **kwargs: Optional arguments that ``request`` takes.
"""
- kwargs.setdefault('allow_redirects', True)
+ kwargs.setdefault('allow_redirects', False)
return self.request('head', url, **kwargs)
@@ -281,6 +284,15 @@ class Session(object):
return self.request('delete', url, **kwargs)
+ def __getstate__(self):
+ return dict((attr, getattr(self, attr, None)) for attr in self.__attrs__)
+
+ def __setstate__(self, state):
+ for attr, value in state.items():
+ setattr(self, attr, value)
+
+ self.init_poolmanager()
+
def session(**kwargs):
"""Returns a :class:`Session` for context-management."""
diff --git a/requests/utils.py b/requests/utils.py
index 0e0f69e..6952a99 100644
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -14,10 +14,47 @@ import codecs
import os
import random
import re
+import traceback
import zlib
+from netrc import netrc, NetrcParseError
from .compat import parse_http_list as _parse_list_header
-from .compat import quote, unquote, cookielib, SimpleCookie, is_py2
+from .compat import quote, cookielib, SimpleCookie, is_py2, urlparse
+from .compat import basestring, bytes, str
+
+
+NETRC_FILES = ('.netrc', '_netrc')
+
+
+def get_netrc_auth(url):
+ """Returns the Requests tuple auth for a given url from netrc."""
+
+ locations = (os.path.expanduser('~/{0}'.format(f)) for f in NETRC_FILES)
+ netrc_path = None
+
+ for loc in locations:
+ if os.path.exists(loc) and not netrc_path:
+ netrc_path = loc
+
+ # Abort early if there isn't one.
+ if netrc_path is None:
+ return netrc_path
+
+ ri = urlparse(url)
+
+ # Strip port numbers from netloc
+ host = ri.netloc.split(':')[0]
+
+ try:
+ _netrc = netrc(netrc_path).authenticators(host)
+ if _netrc:
+ # Return with login / password
+ login_i = (0 if _netrc[0] else 1)
+ return (_netrc[login_i], _netrc[2])
+ except (NetrcParseError, IOError):
+ # If there was a parsing error or a permissions issue reading the file,
+ # we'll just skip netrc auth
+ pass
def dict_from_string(s):
@@ -25,20 +62,26 @@ def dict_from_string(s):
cookies = dict()
- c = SimpleCookie()
- c.load(s)
+ try:
+ c = SimpleCookie()
+ c.load(s)
- for k,v in list(c.items()):
- cookies.update({k: v.value})
+ for k, v in list(c.items()):
+ cookies.update({k: v.value})
+ # This stuff is not to be trusted.
+ except Exception:
+ pass
return cookies
+
def guess_filename(obj):
"""Tries to guess the filename of the given object."""
name = getattr(obj, 'name', None)
if name and name[0] != '<' and name[-1] != '>':
return name
+
# From mitsuhiko/werkzeug (used with permission).
def parse_list_header(value):
"""Parse lists as described by RFC 2068 Section 2.
@@ -145,8 +188,14 @@ def header_expand(headers):
if isinstance(headers, dict):
headers = list(headers.items())
-
+ elif isinstance(headers, basestring):
+ return headers
elif isinstance(headers, str):
+ # As discussed in https://github.com/kennethreitz/requests/issues/400
+ # latin-1 is the most conservative encoding used on the web. Anyone
+ # who needs more can encode to a byte-string before calling
+ return headers.encode("latin-1")
+ elif headers is None:
return headers
for i, (value, params) in enumerate(headers):
@@ -164,10 +213,9 @@ def header_expand(headers):
collector.append('; '.join(_params))
- if not len(headers) == i+1:
+ if not len(headers) == i + 1:
collector.append(', ')
-
# Remove trailing separators.
if collector[-1] in (', ', '; '):
del collector[-1]
@@ -175,7 +223,6 @@ def header_expand(headers):
return ''.join(collector)
-
def randombytes(n):
"""Return n random bytes."""
if is_py2:
@@ -286,23 +333,6 @@ def get_encoding_from_headers(headers):
return 'ISO-8859-1'
-def unicode_from_html(content):
- """Attempts to decode an HTML string into unicode.
- If unsuccessful, the original content is returned.
- """
-
- encodings = get_encodings_from_content(content)
-
- for encoding in encodings:
-
- try:
- return str(content, encoding)
- except (UnicodeError, TypeError):
- pass
-
- return content
-
-
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
@@ -354,15 +384,6 @@ def get_unicode_from_response(r):
return r.content
-def decode_gzip(content):
- """Return gzip-decoded string.
-
- :param content: bytestring to gzip-decode.
- """
-
- return zlib.decompress(content, 16 + zlib.MAX_WBITS)
-
-
def stream_decompress(iterator, mode='gzip'):
"""
Stream decodes an iterator over compressed data
@@ -390,18 +411,53 @@ def stream_decompress(iterator, mode='gzip'):
yield chunk
else:
# Make sure everything has been returned from the decompression object
- buf = dec.decompress('')
+ buf = dec.decompress(bytes())
rv = buf + dec.flush()
if rv:
yield rv
-def requote_path(path):
- """Re-quote the given URL path component.
+def stream_untransfer(gen, resp):
+ if 'gzip' in resp.headers.get('content-encoding', ''):
+ gen = stream_decompress(gen, mode='gzip')
+ elif 'deflate' in resp.headers.get('content-encoding', ''):
+ gen = stream_decompress(gen, mode='deflate')
+
+ return gen
- This function passes the given path through an unquote/quote cycle to
+
+# The unreserved URI characters (RFC 3986)
+UNRESERVED_SET = frozenset(
+ "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
+ + "0123456789-._~")
+
+
+def unquote_unreserved(uri):
+ """Un-escape any percent-escape sequences in a URI that are unreserved
+ characters.
+ This leaves all reserved, illegal and non-ASCII bytes encoded.
+ """
+ parts = uri.split('%')
+ for i in range(1, len(parts)):
+ h = parts[i][0:2]
+ if len(h) == 2:
+ c = chr(int(h, 16))
+ if c in UNRESERVED_SET:
+ parts[i] = c + parts[i][2:]
+ else:
+ parts[i] = '%' + parts[i]
+ else:
+ parts[i] = '%' + parts[i]
+ return ''.join(parts)
+
+
+def requote_uri(uri):
+ """Re-quote the given URI.
+
+ This function passes the given URI through an unquote/quote cycle to
ensure that it is fully and consistently quoted.
"""
- parts = path.split(b"/")
- parts = (quote(unquote(part), safe=b"") for part in parts)
- return b"/".join(parts)
+ # Unquote only the unreserved characters
+ # Then quote only illegal characters (do not quote reserved, unreserved,
+ # or '%')
+ return quote(unquote_unreserved(uri), safe="!#$%&'()*+,/:;=?@[]~")
diff --git a/setup.py b/setup.py
index 641463d..849ab83 100755
--- a/setup.py
+++ b/setup.py
@@ -4,7 +4,7 @@
import os
import sys
import requests
-from requests.compat import is_py3
+from requests.compat import is_py3, is_py2
try:
from setuptools import setup
@@ -22,11 +22,20 @@ if sys.argv[-1] == 'test':
sys.exit()
required = ['certifi>=0.0.7',]
+packages = [
+ 'requests',
+ 'requests.packages',
+ 'requests.packages.urllib3',
+ 'requests.packages.urllib3.packages',
+ 'requests.packages.urllib3.packages.ssl_match_hostname',
+ 'requests.packages.urllib3.packages.mimetools_choose_boundary',
+]
if is_py3:
required.append('chardet2')
else:
required.append('chardet>=1.0.0')
+ packages.append('requests.packages.oreos')
setup(
@@ -38,15 +47,7 @@ setup(
author='Kenneth Reitz',
author_email='me@kennethreitz.com',
url='http://python-requests.org',
- packages=[
- 'requests',
- 'requests.packages',
- 'requests.packages.urllib3',
- 'requests.packages.urllib3.packages',
- 'requests.packages.urllib3.packages.ssl_match_hostname',
- 'requests.packages.urllib3.packages.mimetools_choose_boundary',
- 'requests.packages.oreos'
- ],
+ packages=packages,
package_data={'': ['LICENSE', 'NOTICE']},
include_package_data=True,
install_requires=required,
@@ -59,6 +60,7 @@ setup(
'Programming Language :: Python',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
+ 'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.0',
'Programming Language :: Python :: 3.1',
),
diff --git a/test_requests.py b/test_requests.py
deleted file mode 100755
index 29de3ed..0000000
--- a/test_requests.py
+++ /dev/null
@@ -1,721 +0,0 @@
-#!/usr/bin/env python
-# -*- coding: utf-8 -*-
-
-
-
-import io
-import json
-import time
-import os
-import sys
-import unittest
-
-import requests
-from requests.compat import str, bytes, StringIO
-# import envoy
-from requests import HTTPError
-from requests.auth import HTTPBasicAuth, HTTPDigestAuth
-
-
-
-if (sys.platform == 'win32') and ('HTTPBIN_URL' not in os.environ):
- os.environ['HTTPBIN_URL'] = 'http://httpbin.org/'
-
-# TODO: Detect an open port.
-PORT = os.environ.get('HTTPBIN_PORT', '7077')
-HTTPBIN_URL = os.environ.get('HTTPBIN_URL', 'http://0.0.0.0:%s/' % (PORT))
-
-
-def httpbin(*suffix):
- """Returns url for HTTPBIN resource."""
-
- return HTTPBIN_URL + '/'.join(suffix)
-
-
-SERVICES = (httpbin, )
-
-_httpbin = False
-
-class RequestsTestSuite(unittest.TestCase):
- """Requests test cases."""
-
- # It goes to eleven.
- _multiprocess_can_split_ = True
-
- def setUp(self):
-
- global _httpbin
-
- if (not 'HTTPBIN_URL' in os.environ) and not _httpbin:
- # c = envoy.connect('httpbin %s' % (PORT))
- # time.sleep(1)
- _httpbin = True
-
-
- def test_entry_points(self):
-
- requests.session
- requests.session().get
- requests.session().head
- requests.get
- requests.head
- requests.put
- requests.patch
- requests.post
-
-
-
- def test_invalid_url(self):
- self.assertRaises(ValueError, requests.get, 'hiwpefhipowhefopw')
-
- def test_HTTP_200_OK_GET(self):
- r = requests.get(httpbin('/get'))
- self.assertEqual(r.status_code, 200)
-
- def test_response_sent(self):
- r = requests.get(httpbin('/get'))
-
- self.assertTrue(r.request.sent)
-
- def test_HTTP_302_ALLOW_REDIRECT_GET(self):
- r = requests.get(httpbin('redirect', '1'))
- self.assertEqual(r.status_code, 200)
-
- def test_HTTP_302_GET(self):
- r = requests.get(httpbin('redirect', '1'), allow_redirects=False)
- self.assertEqual(r.status_code, 302)
-
-
- def test_HTTP_200_OK_GET_WITH_PARAMS(self):
- heads = {'User-agent': 'Mozilla/5.0'}
-
- r = requests.get(httpbin('user-agent'), headers=heads)
-
- assert heads['User-agent'] in r.text
- self.assertEqual(r.status_code, 200)
-
-
- def test_HTTP_200_OK_GET_WITH_MIXED_PARAMS(self):
- heads = {'User-agent': 'Mozilla/5.0'}
-
- r = requests.get(httpbin('get') + '?test=true', params={'q': 'test'}, headers=heads)
- self.assertEqual(r.status_code, 200)
-
-
- def test_user_agent_transfers(self):
- """Issue XX"""
-
- heads = {
- 'User-agent':
- 'Mozilla/5.0 (github.com/kennethreitz/requests)'
- }
-
- r = requests.get(httpbin('user-agent'), headers=heads);
- self.assertTrue(heads['User-agent'] in r.text)
-
- heads = {
- 'user-agent':
- 'Mozilla/5.0 (github.com/kennethreitz/requests)'
- }
-
- r = requests.get(httpbin('user-agent'), headers=heads);
- self.assertTrue(heads['user-agent'] in r.text)
-
-
- def test_HTTP_200_OK_HEAD(self):
- r = requests.head(httpbin('/get'))
- self.assertEqual(r.status_code, 200)
-
-
- def test_HTTP_200_OK_PUT(self):
- r = requests.put(httpbin('put'))
- self.assertEqual(r.status_code, 200)
-
-
- def test_BASICAUTH_TUPLE_HTTP_200_OK_GET(self):
-
- for service in SERVICES:
-
- auth = ('user', 'pass')
- url = service('basic-auth', 'user', 'pass')
-
- r = requests.get(url, auth=auth)
- self.assertEqual(r.status_code, 200)
-
- r = requests.get(url)
- self.assertEqual(r.status_code, 401)
-
-
- s = requests.session(auth=auth)
- r = s.get(url)
- self.assertEqual(r.status_code, 200)
-
-
- def test_BASICAUTH_HTTP_200_OK_GET(self):
-
- for service in SERVICES:
-
- auth = HTTPBasicAuth('user', 'pass')
- url = service('basic-auth', 'user', 'pass')
-
- r = requests.get(url, auth=auth)
- self.assertEqual(r.status_code, 200)
-
- auth = ('user', 'pass')
- r = requests.get(url, auth=auth)
- self.assertEqual(r.status_code, 200)
-
- r = requests.get(url)
- self.assertEqual(r.status_code, 401)
-
-
- s = requests.session(auth=auth)
- r = s.get(url)
- self.assertEqual(r.status_code, 200)
-
-
- def test_DIGESTAUTH_HTTP_200_OK_GET(self):
-
- for service in SERVICES:
-
- auth = HTTPDigestAuth('user', 'pass')
- url = service('digest-auth', 'auth', 'user', 'pass')
-
- r = requests.get(url, auth=auth)
- self.assertEqual(r.status_code, 200)
-
- r = requests.get(url)
- self.assertEqual(r.status_code, 401)
-
-
- s = requests.session(auth=auth)
- r = s.get(url)
- self.assertEqual(r.status_code, 200)
-
- def test_POSTBIN_GET_POST_FILES(self):
-
- for service in SERVICES:
-
- url = service('post')
- post = requests.post(url).raise_for_status()
-
- post = requests.post(url, data={'some': 'data'})
- self.assertEqual(post.status_code, 200)
-
- post2 = requests.post(url, files={'some': open('test_requests.py')})
- self.assertEqual(post2.status_code, 200)
-
- post3 = requests.post(url, data='[{"some": "json"}]')
- self.assertEqual(post3.status_code, 200)
-
-
- def test_POSTBIN_GET_POST_FILES_WITH_PARAMS(self):
-
- for service in SERVICES:
-
- url = service('post')
- post = requests.post(url,
- files={'some': open('test_requests.py')},
- data={'some': 'data'})
-
- self.assertEqual(post.status_code, 200)
-
-
- def test_POSTBIN_GET_POST_FILES_WITH_HEADERS(self):
-
- for service in SERVICES:
-
- url = service('post')
-
- post2 = requests.post(url,
- files={'some': open('test_requests.py')},
- headers = {'User-Agent': 'requests-tests'})
-
- self.assertEqual(post2.status_code, 200)
-
-
- def test_nonzero_evaluation(self):
-
- for service in SERVICES:
-
- r = requests.get(service('status', '500'))
- self.assertEqual(bool(r), False)
-
- r = requests.get(service('/get'))
- self.assertEqual(bool(r), True)
-
-
- def test_request_ok_set(self):
-
- for service in SERVICES:
-
- r = requests.get(service('status', '404'))
- # print r.status_code
- # r.raise_for_status()
- self.assertEqual(r.ok, False)
-
-
- def test_status_raising(self):
- r = requests.get(httpbin('status', '404'))
- self.assertRaises(HTTPError, r.raise_for_status)
-
- r = requests.get(httpbin('status', '200'))
- self.assertFalse(r.error)
- r.raise_for_status()
-
-
- def test_default_status_raising(self):
- config = {'danger_mode': True}
- args = [httpbin('status', '404')]
- kwargs = dict(config=config)
- self.assertRaises(HTTPError, requests.get, *args, **kwargs)
-
- r = requests.get(httpbin('status', '200'))
- self.assertEqual(r.status_code, 200)
-
-
- def test_decompress_gzip(self):
-
- r = requests.get(httpbin('gzip'))
- r.content.decode('ascii')
-
- def test_response_has_unicode_url(self):
-
- for service in SERVICES:
-
- url = service('get')
-
- response = requests.get(url)
-
- assert isinstance(response.url, str)
-
-
- def test_unicode_get(self):
-
- for service in SERVICES:
-
- url = service('/get')
-
- requests.get(url, params={'foo': 'føø'})
- requests.get(url, params={'føø': 'føø'})
- requests.get(url, params={'føø': 'føø'})
- requests.get(url, params={'foo': 'foo'})
- requests.get(service('ø'), params={'foo': 'foo'})
-
-
- def test_httpauth_recursion(self):
-
- http_auth = HTTPBasicAuth('user', 'BADpass')
-
- for service in SERVICES:
- r = requests.get(service('basic-auth', 'user', 'pass'), auth=http_auth)
- self.assertEqual(r.status_code, 401)
-
-
- def test_urlencoded_post_data(self):
-
- for service in SERVICES:
-
- r = requests.post(service('post'), data=dict(test='fooaowpeuf'))
-
- self.assertEqual(r.status_code, 200)
- self.assertEqual(r.headers['content-type'], 'application/json')
- self.assertEqual(r.url, service('post'))
-
- rbody = json.loads(r.text)
-
- self.assertEqual(rbody.get('form'), dict(test='fooaowpeuf'))
- self.assertEqual(rbody.get('data'), '')
-
-
- def test_nonurlencoded_post_data(self):
-
- for service in SERVICES:
-
- r = requests.post(service('post'), data='fooaowpeuf')
-
- self.assertEqual(r.status_code, 200)
- self.assertEqual(r.headers['content-type'], 'application/json')
- self.assertEqual(r.url, service('post'))
-
- rbody = json.loads(r.text)
- # Body wasn't valid url encoded data, so the server returns None as
- # "form" and the raw body as "data".
-
- assert rbody.get('form') in (None, {})
- self.assertEqual(rbody.get('data'), 'fooaowpeuf')
-
-
- def test_urlencoded_post_querystring(self):
-
- for service in SERVICES:
-
- r = requests.post(service('post'), params=dict(test='fooaowpeuf'))
-
- self.assertEqual(r.status_code, 200)
- self.assertEqual(r.headers['content-type'], 'application/json')
- self.assertEqual(r.url, service('post?test=fooaowpeuf'))
-
- rbody = json.loads(r.text)
- self.assertEqual(rbody.get('form'), {}) # No form supplied
- self.assertEqual(rbody.get('data'), '')
-
-
- def test_urlencoded_post_query_and_data(self):
-
- for service in SERVICES:
-
- r = requests.post(
- service('post'),
- params=dict(test='fooaowpeuf'),
- data=dict(test2="foobar"))
-
- self.assertEqual(r.status_code, 200)
- self.assertEqual(r.headers['content-type'], 'application/json')
- self.assertEqual(r.url, service('post?test=fooaowpeuf'))
-
- rbody = json.loads(r.text)
- self.assertEqual(rbody.get('form'), dict(test2='foobar'))
- self.assertEqual(rbody.get('data'), '')
-
-
- def test_nonurlencoded_postdata(self):
-
- for service in SERVICES:
-
- r = requests.post(service('post'), data="foobar")
-
- self.assertEqual(r.status_code, 200)
- self.assertEqual(r.headers['content-type'], 'application/json')
-
- rbody = json.loads(r.text)
-
- assert rbody.get('form') in (None, {})
- self.assertEqual(rbody.get('data'), 'foobar')
-
-
- # def test_idna(self):
- # r = requests.get(u'http://➡.ws/httpbin')
- # assert 'httpbin' in r.url
-
-
- def test_urlencoded_get_query_multivalued_param(self):
-
- for service in SERVICES:
-
- r = requests.get(service('get'), params=dict(test=['foo','baz']))
- self.assertEqual(r.status_code, 200)
- self.assertEqual(r.url, service('get?test=foo&test=baz'))
-
-
- def test_urlencoded_post_querystring_multivalued(self):
-
- for service in SERVICES:
-
- r = requests.post(service('post'), params=dict(test=['foo','baz']))
- self.assertEqual(r.status_code, 200)
- self.assertEqual(r.headers['content-type'], 'application/json')
- self.assertEqual(r.url, service('post?test=foo&test=baz'))
-
- rbody = json.loads(r.text)
- self.assertEqual(rbody.get('form'), {}) # No form supplied
- self.assertEqual(rbody.get('data'), '')
-
-
- def test_urlencoded_post_query_multivalued_and_data(self):
-
- for service in SERVICES:
-
- r = requests.post(
- service('post'),
- params=dict(test=['foo','baz']),
- data=dict(test2="foobar",test3=['foo','baz']))
-
- self.assertEqual(r.status_code, 200)
- self.assertEqual(r.headers['content-type'], 'application/json')
- self.assertEqual(r.url, service('post?test=foo&test=baz'))
-
- # print(r.text)
- # print('-----------------------')
-
- rbody = json.loads(r.text)
- self.assertEqual(rbody.get('form'), dict(test2='foobar',test3=['foo','baz']))
- self.assertEqual(rbody.get('data'), '')
-
-
- def test_GET_no_redirect(self):
-
- for service in SERVICES:
-
- r = requests.get(service('redirect', '3'), allow_redirects=False)
- self.assertEqual(r.status_code, 302)
- self.assertEqual(len(r.history), 0)
-
-
- def test_HEAD_no_redirect(self):
-
- for service in SERVICES:
-
- r = requests.head(service('redirect', '3'), allow_redirects=False)
- self.assertEqual(r.status_code, 302)
- self.assertEqual(len(r.history), 0)
-
-
- def test_redirect_history(self):
-
- for service in SERVICES:
-
- r = requests.get(service('redirect', '3'))
- self.assertEqual(r.status_code, 200)
- self.assertEqual(len(r.history), 3)
-
-
- def test_relative_redirect_history(self):
-
- for service in SERVICES:
-
- r = requests.get(service('relative-redirect', '3'))
- self.assertEqual(r.status_code, 200)
- self.assertEqual(len(r.history), 3)
-
-
- def test_session_HTTP_200_OK_GET(self):
-
- s = requests.session()
- r = s.get(httpbin('/get'))
- self.assertEqual(r.status_code, 200)
-
-
- def test_session_persistent_headers(self):
-
- heads = {'User-agent': 'Mozilla/5.0'}
-
- s = requests.session()
- s.headers = heads
-
- # Make 2 requests from Session object, should send header both times
- r1 = s.get(httpbin('user-agent'))
- assert heads['User-agent'] in r1.text
-
- r2 = s.get(httpbin('user-agent'))
- assert heads['User-agent'] in r2.text
-
- new_heads = {'User-agent': 'blah'}
- r3 = s.get(httpbin('user-agent'), headers=new_heads)
- assert new_heads['User-agent'] in r3.text
-
- self.assertEqual(r2.status_code, 200)
-
- def test_single_hook(self):
-
- def add_foo_header(args):
- if not args.get('headers'):
- args['headers'] = {}
-
- args['headers'].update({
- 'X-Foo': 'foo'
- })
-
- return args
-
- for service in SERVICES:
- url = service('headers')
-
- response = requests.get(
- url = url,
- hooks = {
- 'args': add_foo_header
- }
- )
-
- assert 'foo' in response.text
-
- def test_multiple_hooks(self):
-
- def add_foo_header(args):
- if not args.get('headers'):
- args['headers'] = {}
-
- args['headers'].update({
- 'X-Foo': 'foo'
- })
-
- return args
-
- def add_bar_header(args):
- if not args.get('headers'):
- args['headers'] = {}
-
- args['headers'].update({
- 'X-Bar': 'bar'
- })
-
- return args
-
- for service in SERVICES:
- url = service('headers')
-
- response = requests.get(
- url = url,
- hooks = {
- 'args': [add_foo_header, add_bar_header]
- }
- )
-
- assert 'foo' in response.text
- assert 'bar' in response.text
-
- def test_session_persistent_cookies(self):
-
- s = requests.session()
-
- # Internally dispatched cookies are sent.
- _c = {'kenneth': 'reitz', 'bessie': 'monke'}
- r = s.get(httpbin('cookies'), cookies=_c)
- r = s.get(httpbin('cookies'))
-
- # Those cookies persist transparently.
- c = json.loads(r.text).get('cookies')
- assert c == _c
-
- # Double check.
- r = s.get(httpbin('cookies'), cookies={})
- c = json.loads(r.text).get('cookies')
- assert c == _c
-
- # Remove a cookie by setting it's value to None.
- r = s.get(httpbin('cookies'), cookies={'bessie': None})
- c = json.loads(r.text).get('cookies')
- del _c['bessie']
- assert c == _c
-
- # Test session-level cookies.
- s = requests.session(cookies=_c)
- r = s.get(httpbin('cookies'))
- c = json.loads(r.text).get('cookies')
- assert c == _c
-
- # Have the server set a cookie.
- r = s.get(httpbin('cookies', 'set', 'k', 'v'), allow_redirects=True)
- c = json.loads(r.text).get('cookies')
-
- assert 'k' in c
-
- # And server-set cookie persistience.
- r = s.get(httpbin('cookies'))
- c = json.loads(r.text).get('cookies')
-
- assert 'k' in c
-
-
-
- def test_session_persistent_params(self):
-
- params = {'a': 'a_test'}
-
- s = requests.session()
- s.params = params
-
- # Make 2 requests from Session object, should send header both times
- r1 = s.get(httpbin('get'))
- assert params['a'] in r1.text
-
-
- params2 = {'b': 'b_test'}
-
- r2 = s.get(httpbin('get'), params=params2)
- assert params['a'] in r2.text
- assert params2['b'] in r2.text
-
-
- params3 = {'b': 'b_test', 'a': None, 'c': 'c_test'}
-
- r3 = s.get(httpbin('get'), params=params3)
-
- assert not params['a'] in r3.text
- assert params3['b'] in r3.text
- assert params3['c'] in r3.text
-
- def test_invalid_content(self):
- # WARNING: if you're using a terrible DNS provider (comcast),
- # this will fail.
- try:
- hah = 'http://somedomainthatclearlydoesntexistg.com'
- r = requests.get(hah, allow_redirects=False)
- except requests.ConnectionError:
- pass # \o/
- else:
- assert False
-
-
- config = {'safe_mode': True}
- r = requests.get(hah, allow_redirects=False, config=config)
- assert r.content == None
-
- def test_cached_response(self):
-
- r1 = requests.get(httpbin('get'), prefetch=False)
- assert not r1._content
- assert r1.content
- assert r1.text
-
- r2 = requests.get(httpbin('get'), prefetch=True)
- assert r2._content
- assert r2.content
- assert r2.text
-
- def test_iter_lines(self):
-
- lines = (0, 2, 10, 100)
-
- for i in lines:
- r = requests.get(httpbin('stream', str(i)), prefetch=False)
- lines = list(r.iter_lines())
- len_lines = len(lines)
-
- self.assertEqual(i, len_lines)
-
- # Test 'dangling' fragment in responses that do not terminate in
- # a newline.
- quote = (
- '''Why will he not upon our fair request\n'''
- '''Untent his person and share the air with us?'''
- )
-
- # Make a request and monkey-patch its contents
- r = requests.get(httpbin('get'))
- r.raw = StringIO(quote)
-
- # Make sure iter_lines doesn't chop the trailing bit
- lines = '\n'.join(r.iter_lines())
- self.assertEqual(lines, quote)
-
- def test_safe_mode(self):
-
- safe = requests.session(config=dict(safe_mode=True))
-
- # Safe mode creates empty responses for failed requests.
- # Iterating on these responses should produce empty sequences
- r = safe.get('http://_/')
- self.assertEqual(list(r.iter_lines()), [])
- assert isinstance(r.error, requests.exceptions.ConnectionError)
-
- r = safe.get('http://_/')
- self.assertEqual(list(r.iter_content()), [])
- assert isinstance(r.error, requests.exceptions.ConnectionError)
-
- # When not in safe mode, should raise Timeout exception
- self.assertRaises(
- requests.exceptions.Timeout,
- requests.get,
- httpbin('stream', '1000'), timeout=0.0001)
-
- # In safe mode, should return a blank response
- r = requests.get(httpbin('stream', '1000'), timeout=0.0001,
- config=dict(safe_mode=True))
- assert r.content is None
- assert isinstance(r.error, requests.exceptions.Timeout)
-
-
-if __name__ == '__main__':
- unittest.main()