aboutsummaryrefslogtreecommitdiff
path: root/requests
diff options
context:
space:
mode:
authorSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:41:19 -0700
committerSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:41:19 -0700
commit1c0a691ebf468d42b7c0d6b0e9daf0b2ff82cc20 (patch)
treee228f79dfbc25cdacb33ce72b76732aec43d29ba /requests
parentbf12eaaa5428798962777e05fd98be024e0ce27c (diff)
downloadpython-requests-1c0a691ebf468d42b7c0d6b0e9daf0b2ff82cc20.tar
python-requests-1c0a691ebf468d42b7c0d6b0e9daf0b2ff82cc20.tar.gz
Imported Upstream version 0.8.2
Diffstat (limited to 'requests')
-rw-r--r--requests/__init__.py33
-rw-r--r--requests/api.py116
-rw-r--r--requests/async.py47
-rw-r--r--requests/auth.py146
-rw-r--r--requests/config.py68
-rw-r--r--requests/core.py29
-rw-r--r--requests/defaults.py42
-rw-r--r--requests/exceptions.py18
-rw-r--r--requests/hooks.py2
-rw-r--r--requests/models.py666
-rw-r--r--requests/monkeys.py148
-rw-r--r--requests/packages/__init__.py2
-rw-r--r--requests/packages/oreos/__init__.py3
-rw-r--r--requests/packages/oreos/core.py24
-rw-r--r--requests/packages/oreos/monkeys.py770
-rw-r--r--requests/packages/oreos/structures.py399
-rw-r--r--requests/packages/poster/__init__.py34
-rw-r--r--requests/packages/poster/encode.py414
-rw-r--r--requests/packages/poster/streaminghttp.py199
-rw-r--r--requests/packages/urllib3/__init__.py48
-rw-r--r--requests/packages/urllib3/_collections.py131
-rw-r--r--requests/packages/urllib3/connectionpool.py525
-rw-r--r--requests/packages/urllib3/exceptions.py35
-rw-r--r--requests/packages/urllib3/filepost.py71
-rw-r--r--requests/packages/urllib3/poolmanager.py128
-rw-r--r--requests/packages/urllib3/request.py145
-rw-r--r--requests/packages/urllib3/response.py181
-rw-r--r--requests/sessions.py275
-rw-r--r--requests/status_codes.py7
-rw-r--r--requests/structures.py3
-rw-r--r--requests/utils.py137
31 files changed, 3452 insertions, 1394 deletions
diff --git a/requests/__init__.py b/requests/__init__.py
index 15a5050..9d2319a 100644
--- a/requests/__init__.py
+++ b/requests/__init__.py
@@ -1,4 +1,33 @@
# -*- coding: utf-8 -*-
-from core import *
-from core import __version__
+# __
+# /__) _ _ _ _ _/ _
+# / ( (- (/ (/ (- _) / _)
+# /
+
+"""
+requests
+~~~~~~~~
+
+:copyright: (c) 2011 by Kenneth Reitz.
+:license: ISC, see LICENSE for more details.
+
+"""
+
+__title__ = 'requests'
+__version__ = '0.8.2'
+__build__ = 0x000802
+__author__ = 'Kenneth Reitz'
+__license__ = 'ISC'
+__copyright__ = 'Copyright 2011 Kenneth Reitz'
+
+
+from . import utils
+from .models import Request, Response
+from .api import request, get, head, post, patch, put, delete, options
+from .sessions import session, Session
+from .status_codes import codes
+from .exceptions import (
+ RequestException, Timeout, URLRequired,
+ TooManyRedirects, HTTPError, ConnectionError
+)
diff --git a/requests/api.py b/requests/api.py
index 1b847b7..9e0c96f 100644
--- a/requests/api.py
+++ b/requests/api.py
@@ -4,26 +4,30 @@
requests.api
~~~~~~~~~~~~
-This module impliments the Requests API.
+This module implements the Requests API.
:copyright: (c) 2011 by Kenneth Reitz.
:license: ISC, see LICENSE for more details.
"""
-import config
-from .models import Request, Response, AuthObject
-from .status_codes import codes
-from .hooks import dispatch_hook
-from .utils import cookiejar_from_dict, header_expand
+from .sessions import session
-__all__ = ('request', 'get', 'head', 'post', 'patch', 'put', 'delete')
-
def request(method, url,
- params=None, data=None, headers=None, cookies=None, files=None, auth=None,
- timeout=None, allow_redirects=False, proxies=None, hooks=None, return_response=True):
-
+ params=None,
+ data=None,
+ headers=None,
+ cookies=None,
+ files=None,
+ auth=None,
+ timeout=None,
+ allow_redirects=False,
+ proxies=None,
+ hooks=None,
+ return_response=True,
+ prefetch=False,
+ config=None):
"""Constructs and sends a :class:`Request <Request>`.
Returns :class:`Response <Response>` object.
@@ -33,76 +37,56 @@ def request(method, url,
:param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
:param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
:param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
- :param files: (optional) Dictionary of 'filename': file-like-objects for multipart encoding upload.
- :param auth: (optional) AuthObject to enable Basic HTTP Auth.
+ :param files: (optional) Dictionary of 'name': file-like-objects (or {'name': ('filename', fileobj)}) for multipart encoding upload.
+ :param auth: (optional) Auth typle to enable Basic/Digest/Custom HTTP Auth.
:param timeout: (optional) Float describing the timeout of the request.
:param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
:param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
:param return_response: (optional) If False, an un-sent Request object will returned.
+ :param config: (optional) A configuration dictionary.
"""
- method = str(method).upper()
-
- if cookies is None:
- cookies = {}
-
- cookies = cookiejar_from_dict(cookies)
-
- # Expand header values
- if headers:
- for k, v in headers.items() or {}:
- headers[k] = header_expand(v)
-
- args = dict(
- method = method,
- url = url,
- data = data,
- params = params,
- headers = headers,
- cookiejar = cookies,
- files = files,
- auth = auth,
- hooks = hooks,
- timeout = timeout or config.settings.timeout,
- allow_redirects = allow_redirects,
- proxies = proxies or config.settings.proxies,
+ s = session()
+ return s.request(
+ method=method,
+ url=url,
+ params=params,
+ data=data,
+ headers=headers,
+ cookies=cookies,
+ files=files,
+ auth=auth,
+ timeout=timeout,
+ allow_redirects=allow_redirects,
+ proxies=proxies,
+ hooks=hooks,
+ return_response=return_response,
+ config=config,
+ prefetch=prefetch
)
- # Arguments manipulation hook.
- args = dispatch_hook('args', hooks, args)
-
- r = Request(**args)
-
- # Pre-request hook.
- r = dispatch_hook('pre_request', hooks, r)
- # Don't send if asked nicely.
- if not return_response:
- return r
- # Send the HTTP Request.
- r.send()
-
- # Post-request hook.
- r = dispatch_hook('post_request', hooks, r)
-
- # Response manipulation hook.
- r.response = dispatch_hook('response', hooks, r.response)
+def get(url, **kwargs):
+ """Sends a GET request. Returns :class:`Response` object.
- return r.response
+ :param url: URL for the new :class:`Request` object.
+ :param **kwargs: Optional arguments that ``request`` takes.
+ """
+ kwargs.setdefault('allow_redirects', True)
+ return request('get', url, **kwargs)
-def get(url, **kwargs):
- """Sends a GET request. Returns :class:`Response` object.
+def options(url, **kwargs):
+ """Sends a OPTIONS request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
:param **kwargs: Optional arguments that ``request`` takes.
"""
-
kwargs.setdefault('allow_redirects', True)
- return request('GET', url, **kwargs)
+ return request('options', url, **kwargs)
def head(url, **kwargs):
@@ -113,10 +97,10 @@ def head(url, **kwargs):
"""
kwargs.setdefault('allow_redirects', True)
- return request('HEAD', url, **kwargs)
+ return request('head', url, **kwargs)
-def post(url, data='', **kwargs):
+def post(url, data=None, **kwargs):
"""Sends a POST request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
@@ -127,7 +111,7 @@ def post(url, data='', **kwargs):
return request('post', url, data=data, **kwargs)
-def put(url, data='', **kwargs):
+def put(url, data=None, **kwargs):
"""Sends a PUT request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
@@ -138,7 +122,7 @@ def put(url, data='', **kwargs):
return request('put', url, data=data, **kwargs)
-def patch(url, data='', **kwargs):
+def patch(url, data=None, **kwargs):
"""Sends a PATCH request. Returns :class:`Response` object.
:param url: URL for the new :class:`Request` object.
@@ -146,7 +130,7 @@ def patch(url, data='', **kwargs):
:param **kwargs: Optional arguments that ``request`` takes.
"""
- return request('patch', url, **kwargs)
+ return request('patch', url, data=data, **kwargs)
def delete(url, **kwargs):
diff --git a/requests/async.py b/requests/async.py
index db25f6a..8bafb1e 100644
--- a/requests/async.py
+++ b/requests/async.py
@@ -12,6 +12,7 @@ by gevent. All API methods return a ``Request`` instance (as opposed to
try:
import gevent
from gevent import monkey as curious_george
+ from gevent.pool import Pool
except ImportError:
raise RuntimeError('Gevent is required for requests.async.')
@@ -19,25 +20,28 @@ except ImportError:
curious_george.patch_all(thread=False)
from . import api
-from .hooks import dispatch_hook
__all__ = (
'map',
- 'get', 'head', 'post', 'put', 'patch', 'delete', 'request'
+ 'get', 'options', 'head', 'post', 'put', 'patch', 'delete', 'request'
)
-def _patched(f):
+def patched(f):
"""Patches a given API function to not send."""
def wrapped(*args, **kwargs):
- return f(*args, return_response=False, **kwargs)
+
+ kwargs['return_response'] = False
+ kwargs['prefetch'] = True
+
+ return f(*args, **kwargs)
return wrapped
-def _send(r, pools=None):
+def send(r, pools=None):
"""Sends a given Request object."""
if pools:
@@ -45,34 +49,35 @@ def _send(r, pools=None):
r.send()
- # Post-request hook.
- r = dispatch_hook('post_request', r.hooks, r)
-
- # Response manipulation hook.
- r.response = dispatch_hook('response', r.hooks, r.response)
-
return r.response
# Patched requests.api functions.
-get = _patched(api.get)
-head = _patched(api.head)
-post = _patched(api.post)
-put = _patched(api.put)
-patch = _patched(api.patch)
-delete = _patched(api.delete)
-request = _patched(api.request)
+get = patched(api.get)
+options = patched(api.options)
+head = patched(api.head)
+post = patched(api.post)
+put = patched(api.put)
+patch = patched(api.patch)
+delete = patched(api.delete)
+request = patched(api.request)
-def map(requests, prefetch=True):
+def map(requests, prefetch=True, size=None):
"""Concurrently converts a list of Requests to Responses.
:param requests: a collection of Request objects.
:param prefetch: If False, the content will not be downloaded immediately.
+ :param size: Specifies the number of requests to make at a time. If None, no throttling occurs.
"""
- jobs = [gevent.spawn(_send, r) for r in requests]
- gevent.joinall(jobs)
+ if size:
+ pool = Pool(size)
+ pool.map(send, requests)
+ pool.join()
+ else:
+ jobs = [gevent.spawn(send, r) for r in requests]
+ gevent.joinall(jobs)
if prefetch:
[r.response.content for r in requests]
diff --git a/requests/auth.py b/requests/auth.py
new file mode 100644
index 0000000..aabeb86
--- /dev/null
+++ b/requests/auth.py
@@ -0,0 +1,146 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.auth
+~~~~~~~~~~~~~
+
+This module contains the authentication handlers for Requests.
+"""
+
+import time
+import hashlib
+
+from base64 import b64encode
+from urlparse import urlparse
+
+from .utils import randombytes, parse_dict_header
+
+
+def http_basic(r, username, password):
+ """Attaches HTTP Basic Authentication to the given Request object.
+ Arguments should be considered non-positional.
+
+ """
+ username = str(username)
+ password = str(password)
+
+ auth_s = b64encode('%s:%s' % (username, password))
+ r.headers['Authorization'] = ('Basic %s' % auth_s)
+
+ return r
+
+
+def http_digest(r, username, password):
+ """Attaches HTTP Digest Authentication to the given Request object.
+ Arguments should be considered non-positional.
+ """
+
+ def handle_401(r):
+ """Takes the given response and tries digest-auth, if needed."""
+
+ s_auth = r.headers.get('www-authenticate', '')
+
+ if 'digest' in s_auth.lower():
+
+ last_nonce = ''
+ nonce_count = 0
+
+ chal = parse_dict_header(s_auth.replace('Digest ', ''))
+
+ realm = chal['realm']
+ nonce = chal['nonce']
+ qop = chal.get('qop')
+ algorithm = chal.get('algorithm', 'MD5')
+ opaque = chal.get('opaque', None)
+
+ algorithm = algorithm.upper()
+ # lambdas assume digest modules are imported at the top level
+ if algorithm == 'MD5':
+ H = lambda x: hashlib.md5(x).hexdigest()
+ elif algorithm == 'SHA':
+ H = lambda x: hashlib.sha1(x).hexdigest()
+ # XXX MD5-sess
+ KD = lambda s, d: H("%s:%s" % (s, d))
+
+ if H is None:
+ return None
+
+ # XXX not implemented yet
+ entdig = None
+ p_parsed = urlparse(r.request.url)
+ path = p_parsed.path + p_parsed.query
+
+ A1 = "%s:%s:%s" % (username, realm, password)
+ A2 = "%s:%s" % (r.request.method, path)
+
+ if qop == 'auth':
+ if nonce == last_nonce:
+ nonce_count += 1
+ else:
+ nonce_count = 1
+ last_nonce = nonce
+
+ ncvalue = '%08x' % nonce_count
+ cnonce = (hashlib.sha1("%s:%s:%s:%s" % (
+ nonce_count, nonce, time.ctime(), randombytes(8)))
+ .hexdigest()[:16]
+ )
+ noncebit = "%s:%s:%s:%s:%s" % (nonce, ncvalue, cnonce, qop, H(A2))
+ respdig = KD(H(A1), noncebit)
+ elif qop is None:
+ respdig = KD(H(A1), "%s:%s" % (nonce, H(A2)))
+ else:
+ # XXX handle auth-int.
+ return None
+
+ # XXX should the partial digests be encoded too?
+ base = 'username="%s", realm="%s", nonce="%s", uri="%s", ' \
+ 'response="%s"' % (username, realm, nonce, path, respdig)
+ if opaque:
+ base += ', opaque="%s"' % opaque
+ if entdig:
+ base += ', digest="%s"' % entdig
+ base += ', algorithm="%s"' % algorithm
+ if qop:
+ base += ', qop=auth, nc=%s, cnonce="%s"' % (ncvalue, cnonce)
+
+
+ r.request.headers['Authorization'] = 'Digest %s' % (base)
+ r.request.send(anyway=True)
+ _r = r.request.response
+ _r.history.append(r)
+
+ return _r
+
+ return r
+
+ r.hooks['response'] = handle_401
+ return r
+
+
+def dispatch(t):
+ """Given an auth tuple, return an expanded version."""
+
+ if not t:
+ return t
+ else:
+ t = list(t)
+
+ # Make sure they're passing in something.
+ assert len(t) >= 2
+
+ # If only two items are passed in, assume HTTPBasic.
+ if (len(t) == 2):
+ t.insert(0, 'basic')
+
+ # Allow built-in string referenced auths.
+ if isinstance(t[0], basestring):
+ if t[0] in ('basic', 'forced_basic'):
+ t[0] = http_basic
+ elif t[0] in ('digest',):
+ t[0] = http_digest
+
+ # Return a custom callable.
+ return (t[0], tuple(t[1:]))
+
+
diff --git a/requests/config.py b/requests/config.py
deleted file mode 100644
index 794109c..0000000
--- a/requests/config.py
+++ /dev/null
@@ -1,68 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.config
-~~~~~~~~~~~~~~~
-
-This module provides the Requests settings feature set.
-
-"""
-
-class Settings(object):
- _singleton = {}
-
- # attributes with defaults
- __attrs__ = []
-
- def __init__(self, **kwargs):
- super(Settings, self).__init__()
-
- self.__dict__ = self._singleton
-
-
- def __call__(self, *args, **kwargs):
- # new instance of class to call
- r = self.__class__()
-
- # cache previous settings for __exit__
- r.__cache = self.__dict__.copy()
- map(self.__cache.setdefault, self.__attrs__)
-
- # set new settings
- self.__dict__.update(*args, **kwargs)
-
- return r
-
-
- def __enter__(self):
- pass
-
-
- def __exit__(self, *args):
-
- # restore cached copy
- self.__dict__.update(self.__cache.copy())
- del self.__cache
-
-
- def __getattribute__(self, key):
- if key in object.__getattribute__(self, '__attrs__'):
- try:
- return object.__getattribute__(self, key)
- except AttributeError:
- return None
- return object.__getattribute__(self, key)
-
-
-settings = Settings()
-
-settings.base_headers = {'User-Agent': 'python-requests.org'}
-settings.accept_gzip = True
-settings.proxies = None
-settings.verbose = None
-settings.timeout = None
-settings.max_redirects = 30
-settings.decode_unicode = True
-
-#: Use socket.setdefaulttimeout() as fallback?
-settings.timeout_fallback = True
diff --git a/requests/core.py b/requests/core.py
deleted file mode 100644
index de05cf9..0000000
--- a/requests/core.py
+++ /dev/null
@@ -1,29 +0,0 @@
-# -*- coding: utf-8 -*-
-
-"""
-requests.core
-~~~~~~~~~~~~~
-
-This module implements the main Requests system.
-
-:copyright: (c) 2011 by Kenneth Reitz.
-:license: ISC, see LICENSE for more details.
-
-"""
-
-__title__ = 'requests'
-__version__ = '0.6.4'
-__build__ = 0x000604
-__author__ = 'Kenneth Reitz'
-__license__ = 'ISC'
-__copyright__ = 'Copyright 2011 Kenneth Reitz'
-
-
-from models import HTTPError, Request, Response
-from api import *
-from exceptions import *
-from sessions import session
-from status_codes import codes
-from config import settings
-
-import utils
diff --git a/requests/defaults.py b/requests/defaults.py
new file mode 100644
index 0000000..7a5a3fb
--- /dev/null
+++ b/requests/defaults.py
@@ -0,0 +1,42 @@
+# -*- coding: utf-8 -*-
+
+"""
+requests.defaults
+~~~~~~~~~~~~~~~~~
+
+This module provides the Requests configuration defaults.
+
+Configurations:
+
+:base_headers: Default HTTP headers.
+:verbose: Stream to write request logging to.
+:timeout: Seconds until request timeout.
+:max_redirects: Maximum njumber of redirects allowed within a request.
+:decode_unicode: Decode unicode responses automatically?
+:keep_alive: Reuse HTTP Connections?
+:max_retries: The number of times a request should be retried in the event of a connection failure.
+:safe_mode: If true, Requests will catch all errors.
+:pool_maxsize: The maximium size of an HTTP connection pool.
+:pool_connections: The number of active HTTP connection pools to use.
+
+"""
+
+from . import __version__
+
+defaults = dict()
+
+
+defaults['base_headers'] = {
+ 'User-Agent': 'python-requests/%s' % __version__,
+ 'Accept-Encoding': ', '.join(('identity', 'deflate', 'compress', 'gzip')),
+ 'Accept': '*/*'
+}
+
+defaults['verbose'] = None
+defaults['max_redirects'] = 30
+defaults['decode_unicode'] = True
+defaults['pool_connections'] = 10
+defaults['pool_maxsize'] = 10
+defaults['max_retries'] = 0
+defaults['safe_mode'] = False
+defaults['keep_alive'] = True
diff --git a/requests/exceptions.py b/requests/exceptions.py
index c08c614..d20a95c 100644
--- a/requests/exceptions.py
+++ b/requests/exceptions.py
@@ -2,25 +2,27 @@
"""
requests.exceptions
-~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~~~~~~~
+
+This module contains the set of Requests' exceptions.
"""
class RequestException(Exception):
- """There was an ambiguous exception that occured while handling your
+ """There was an ambiguous exception that occurred while handling your
request."""
-class AuthenticationError(RequestException):
- """The authentication credentials provided were invalid."""
-
+class HTTPError(RequestException):
+ """An HTTP error occured."""
+
+class ConnectionError(RequestException):
+ """A Connection error occured."""
+
class Timeout(RequestException):
"""The request timed out."""
class URLRequired(RequestException):
"""A valid URL is required to make a request."""
-class InvalidMethod(RequestException):
- """An inappropriate method was attempted."""
-
class TooManyRedirects(RequestException):
"""Too many redirects."""
diff --git a/requests/hooks.py b/requests/hooks.py
index 2938029..f9cf480 100644
--- a/requests/hooks.py
+++ b/requests/hooks.py
@@ -26,7 +26,7 @@ import warnings
def dispatch_hook(key, hooks, hook_data):
- """Dipatches a hook dictionary on a given peice of data."""
+ """Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or dict()
diff --git a/requests/models.py b/requests/models.py
index 9a8f5f9..0be3e89 100644
--- a/requests/models.py
+++ b/requests/models.py
@@ -4,27 +4,30 @@
requests.models
~~~~~~~~~~~~~~~
+This module contains the primary objects that power Requests.
"""
import urllib
-import urllib2
-import socket
-import codecs
import zlib
-
-from urllib2 import HTTPError
-from urlparse import urlparse, urlunparse, urljoin
+from urlparse import urlparse, urlunparse, urljoin, urlsplit
from datetime import datetime
-from .config import settings
-from .monkeys import Request as _Request, HTTPBasicAuthHandler, HTTPForcedBasicAuthHandler, HTTPDigestAuthHandler, HTTPRedirectHandler
+from .auth import dispatch as auth_dispatch
+from .hooks import dispatch_hook
from .structures import CaseInsensitiveDict
-from .packages.poster.encode import multipart_encode
-from .packages.poster.streaminghttp import register_openers, get_handlers
-from .utils import dict_from_cookiejar, get_unicode_from_response, stream_decode_response_unicode, decode_gzip, stream_decode_gzip
from .status_codes import codes
-from .exceptions import RequestException, AuthenticationError, Timeout, URLRequired, InvalidMethod, TooManyRedirects
+from .packages import oreos
+from .packages.urllib3.exceptions import MaxRetryError
+from .packages.urllib3.exceptions import SSLError as _SSLError
+from .packages.urllib3.exceptions import HTTPError as _HTTPError
+from .packages.urllib3 import connectionpool, poolmanager
+from .packages.urllib3.filepost import encode_multipart_formdata
+from .exceptions import (
+ Timeout, URLRequired, TooManyRedirects, HTTPError, ConnectionError)
+from .utils import (
+ get_encoding_from_headers, stream_decode_response_unicode,
+ decode_gzip, stream_decode_gzip, guess_filename, requote_path)
REDIRECT_STATI = (codes.moved, codes.found, codes.other, codes.temporary_moved)
@@ -37,19 +40,31 @@ class Request(object):
"""
def __init__(self,
- url=None, headers=dict(), files=None, method=None, data=dict(),
- params=dict(), auth=None, cookiejar=None, timeout=None, redirect=False,
- allow_redirects=False, proxies=None, hooks=None):
-
- #: Float describ the timeout of the request.
+ url=None,
+ headers=dict(),
+ files=None,
+ method=None,
+ data=dict(),
+ params=dict(),
+ auth=None,
+ cookies=None,
+ timeout=None,
+ redirect=False,
+ allow_redirects=False,
+ proxies=None,
+ hooks=None,
+ config=None,
+ _poolmanager=None):
+
+ #: Float describes the timeout of the request.
# (Use socket.setdefaulttimeout() as fallback)
self.timeout = timeout
#: Request URL.
self.url = url
- #: Dictonary of HTTP Headers to attach to the :class:`Request <Request>`.
- self.headers = headers
+ #: Dictionary of HTTP Headers to attach to the :class:`Request <Request>`.
+ self.headers = dict(headers or [])
#: Dictionary of files to multipart upload (``{filename: content}``).
self.files = files
@@ -64,6 +79,7 @@ class Request(object):
#: Dictionary or byte of querystring data to attach to the
#: :class:`Request <Request>`.
self.params = None
+ self.params = dict(params or [])
#: True if :class:`Request <Request>` is part of a redirect chain (disables history
#: and HTTPError storage).
@@ -73,7 +89,7 @@ class Request(object):
self.allow_redirects = allow_redirects
# Dictionary mapping protocol to the URL of the proxy (e.g. {'http': 'foo.bar:3128'})
- self.proxies = proxies
+ self.proxies = dict(proxies or [])
self.data, self._enc_data = self._encode_params(data)
self.params, self._enc_params = self._encode_params(params)
@@ -82,16 +98,15 @@ class Request(object):
#: content and metadata of HTTP Response, once :attr:`sent <send>`.
self.response = Response()
- if isinstance(auth, (list, tuple)):
- auth = AuthObject(*auth)
- if not auth:
- auth = auth_manager.get_auth(self.url)
-
- #: :class:`AuthObject` to attach to :class:`Request <Request>`.
- self.auth = auth
+ #: Authentication tuple to attach to :class:`Request <Request>`.
+ self._auth = auth
+ self.auth = auth_dispatch(auth)
#: CookieJar to attach to :class:`Request <Request>`.
- self.cookiejar = cookiejar
+ self.cookies = dict(cookies or [])
+
+ #: Dictionary of configurations for this request.
+ self.config = dict(config or [])
#: True if Request has been sent.
self.sent = False
@@ -99,116 +114,79 @@ class Request(object):
#: Event-handling hooks.
self.hooks = hooks
- # Header manipulation and defaults.
-
- if settings.accept_gzip:
- settings.base_headers.update({'Accept-Encoding': 'gzip'})
+ #: Session.
+ self.session = None
if headers:
headers = CaseInsensitiveDict(self.headers)
else:
headers = CaseInsensitiveDict()
- for (k, v) in settings.base_headers.items():
+ for (k, v) in self.config.get('base_headers', {}).items():
if k not in headers:
headers[k] = v
self.headers = headers
+ self._poolmanager = _poolmanager
+
+ # Pre-request hook.
+ r = dispatch_hook('pre_request', hooks, self)
+ self.__dict__.update(r.__dict__)
def __repr__(self):
return '<Request [%s]>' % (self.method)
- def _checks(self):
- """Deterministic checks for consistency."""
-
- if not self.url:
- raise URLRequired
-
-
- def _get_opener(self):
- """Creates appropriate opener object for urllib2."""
-
- _handlers = []
-
- if self.cookiejar is not None:
- _handlers.append(urllib2.HTTPCookieProcessor(self.cookiejar))
-
- if self.auth:
- if not isinstance(self.auth.handler,
- (urllib2.AbstractBasicAuthHandler,
- urllib2.AbstractDigestAuthHandler)):
-
- # TODO: REMOVE THIS COMPLETELY
- auth_manager.add_password(
- self.auth.realm, self.url,
- self.auth.username,
- self.auth.password)
-
- self.auth.handler = self.auth.handler(auth_manager)
- auth_manager.add_auth(self.url, self.auth)
-
- _handlers.append(self.auth.handler)
-
- if self.proxies:
- _handlers.append(urllib2.ProxyHandler(self.proxies))
-
- _handlers.append(HTTPRedirectHandler)
-
- if not _handlers:
- return urllib2.urlopen
-
- if self.data or self.files:
- _handlers.extend(get_handlers())
-
- opener = urllib2.build_opener(*_handlers)
-
- if self.headers:
- # Allow default headers in the opener to be overloaded
- normal_keys = [k.capitalize() for k in self.headers]
- for key, val in opener.addheaders[:]:
- if key not in normal_keys:
- continue
- # Remove it, we have a value to take its place
- opener.addheaders.remove((key, val))
-
- return opener.open
-
-
def _build_response(self, resp, is_error=False):
"""Build internal :class:`Response <Response>` object
from given response.
"""
-
def build(resp):
response = Response()
- response.status_code = getattr(resp, 'code', None)
- try:
- response.headers = CaseInsensitiveDict(getattr(resp.info(), 'dict', None))
- response.raw = resp
+ # Pass settings over.
+ response.config = self.config
- if self.cookiejar:
- response.cookies = dict_from_cookiejar(self.cookiejar)
+ if resp:
+ # Fallback to None if there's no staus_code, for whatever reason.
+ response.status_code = getattr(resp, 'status', None)
- except AttributeError:
- pass
+ # Make headers case-insensitive.
+ response.headers = CaseInsensitiveDict(getattr(resp, 'headers', None))
+
+ # Set encoding.
+ response.encoding = get_encoding_from_headers(response.headers)
+
+ # Start off with our local cookies.
+ cookies = self.cookies or dict()
+
+ # Add new cookies from the server.
+ if 'set-cookie' in response.headers:
+ cookie_header = response.headers['set-cookie']
+ cookies = oreos.dict_from_string(cookie_header)
+
+ # Save cookies in Response.
+ response.cookies = cookies
+
+ # Save original resopnse for later.
+ response.raw = resp
if is_error:
response.error = resp
- response.url = getattr(resp, 'url', None)
+ response.url = self.full_url
return response
-
history = []
r = build(resp)
+ cookies = self.cookies
+ self.cookies.update(r.cookies)
if r.status_code in REDIRECT_STATI and not self.redirect:
@@ -217,9 +195,7 @@ class Request(object):
((r.status_code is codes.see_other) or (self.allow_redirects))
):
- r.raw.close()
-
- if not len(history) < settings.max_redirects:
+ if not len(history) < self.config.get('max_redirects'):
raise TooManyRedirects()
history.append(r)
@@ -234,7 +210,7 @@ class Request(object):
# Facilitate non-RFC2616-compliant 'location' headers
# (e.g. '/path/to/resource' instead of 'http://domain.tld/path/to/resource')
if not urlparse(url).netloc:
- url = urljoin(r.url, urllib.quote(urllib.unquote(url)))
+ url = urljoin(r.url, url)
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.3.4
if r.status_code is codes.see_other:
@@ -242,18 +218,38 @@ class Request(object):
else:
method = self.method
+ # Remove the cookie headers that were sent.
+ headers = self.headers
+ try:
+ del headers['Cookie']
+ except KeyError:
+ pass
+
request = Request(
- url, self.headers, self.files, method,
- self.data, self.params, self.auth, self.cookiejar,
- redirect=True
+ url=url,
+ headers=headers,
+ files=self.files,
+ method=method,
+ params=self.session.params,
+ auth=self._auth,
+ cookies=cookies,
+ redirect=True,
+ config=self.config,
+ timeout=self.timeout,
+ _poolmanager=self._poolmanager,
+ proxies = self.proxies,
)
+
request.send()
+ cookies.update(request.response.cookies)
r = request.response
+ self.cookies.update(r.cookies)
r.history = history
self.response = r
self.response.request = self
+ self.response.cookies.update(self.cookies)
@staticmethod
@@ -268,6 +264,9 @@ class Request(object):
returns it twice.
"""
+ if hasattr(data, '__iter__'):
+ data = dict(data)
+
if hasattr(data, 'items'):
result = []
for k, vs in data.items():
@@ -278,28 +277,63 @@ class Request(object):
else:
return data, data
-
- def _build_url(self):
+ @property
+ def full_url(self):
"""Build the actual URL to use."""
+ if not self.url:
+ raise URLRequired()
+
# Support for unicode domain names and paths.
scheme, netloc, path, params, query, fragment = urlparse(self.url)
+
+ if not scheme:
+ raise ValueError()
+
netloc = netloc.encode('idna')
+
if isinstance(path, unicode):
path = path.encode('utf-8')
- path = urllib.quote(urllib.unquote(path))
- self.url = str(urlunparse([ scheme, netloc, path, params, query, fragment ]))
+
+ path = requote_path(path)
+
+ url = str(urlunparse([ scheme, netloc, path, params, query, fragment ]))
if self._enc_params:
- if urlparse(self.url).query:
- return '%s&%s' % (self.url, self._enc_params)
+ if urlparse(url).query:
+ return '%s&%s' % (url, self._enc_params)
else:
- return '%s?%s' % (self.url, self._enc_params)
+ return '%s?%s' % (url, self._enc_params)
else:
- return self.url
+ return url
+
+ @property
+ def path_url(self):
+ """Build the path URL to use."""
+
+ url = []
+
+ p = urlsplit(self.full_url)
+
+ # Proxies use full URLs.
+ if p.scheme in self.proxies:
+ return self.full_url
+
+ path = p.path
+ if not path:
+ path = '/'
+ url.append(path)
+
+ query = p.query
+ if query:
+ url.append('?')
+ url.append(query)
+
+ return ''.join(url)
- def send(self, anyway=False):
+
+ def send(self, anyway=False, prefetch=False):
"""Sends the request. Returns True of successful, false if not.
If there was an HTTPError during transmission,
self.response.status_code will contain the HTTPError code.
@@ -310,80 +344,134 @@ class Request(object):
already been sent.
"""
- self._checks()
-
# Logging
- if settings.verbose:
- settings.verbose.write('%s %s %s\n' % (
+ if self.config.get('verbose'):
+ self.config.get('verbose').write('%s %s %s\n' % (
datetime.now().isoformat(), self.method, self.url
))
+ # Build the URL
+ url = self.full_url
+
+ # Nottin' on you.
+ body = None
+ content_type = None
- url = self._build_url()
- if self.method in ('GET', 'HEAD', 'DELETE'):
- req = _Request(url, method=self.method)
+ # Multi-part file uploads.
+ if self.files:
+ if not isinstance(self.data, basestring):
+
+ try:
+ fields = self.data.copy()
+ except AttributeError:
+ fields = dict(self.data)
+
+ for (k, v) in self.files.items():
+ # support for explicit filename
+ if isinstance(v, (tuple, list)):
+ fn, fp = v
+ else:
+ fn = guess_filename(v) or k
+ fp = v
+ fields.update({k: (fn, fp.read())})
+
+ (body, content_type) = encode_multipart_formdata(fields)
+ else:
+ pass
+ # TODO: Conflict?
else:
+ if self.data:
- if self.files:
- register_openers()
+ body = self._enc_data
+ if isinstance(self.data, basestring):
+ content_type = None
+ else:
+ content_type = 'application/x-www-form-urlencoded'
- if self.data:
- self.files.update(self.data)
+ # Add content-type if it wasn't explicitly provided.
+ if (content_type) and (not 'content-type' in self.headers):
+ self.headers['Content-Type'] = content_type
- datagen, headers = multipart_encode(self.files)
- req = _Request(url, data=datagen, headers=headers, method=self.method)
- else:
- req = _Request(url, data=self._enc_data, method=self.method)
+ if self.auth:
+ auth_func, auth_args = self.auth
- if self.headers:
- for k,v in self.headers.iteritems():
- req.add_header(k, v)
+ # Allow auth to make its changes.
+ r = auth_func(self, *auth_args)
+
+ # Update self to reflect the auth changes.
+ self.__dict__.update(r.__dict__)
+
+ _p = urlparse(url)
+ proxy = self.proxies.get(_p.scheme)
+
+ if proxy:
+ conn = poolmanager.proxy_from_url(proxy)
+ else:
+ # Check to see if keep_alive is allowed.
+ if self.config.get('keep_alive'):
+ conn = self._poolmanager.connection_from_url(url)
+ else:
+ conn = connectionpool.connection_from_url(url)
if not self.sent or anyway:
- try:
- opener = self._get_opener()
- try:
+ if self.cookies:
- resp = opener(req, timeout=self.timeout)
+ # Skip if 'cookie' header is explicitly set.
+ if 'cookie' not in self.headers:
- except TypeError, err:
- # timeout argument is new since Python v2.6
- if not 'timeout' in str(err):
- raise
+ # Simple cookie with our dict.
+ c = oreos.monkeys.SimpleCookie()
+ for (k, v) in self.cookies.items():
+ c[k] = v
- if settings.timeout_fallback:
- # fall-back and use global socket timeout (This is not thread-safe!)
- old_timeout = socket.getdefaulttimeout()
- socket.setdefaulttimeout(self.timeout)
+ # Turn it into a header.
+ cookie_header = c.output(header='', sep='; ').strip()
- resp = opener(req)
+ # Attach Cookie header to request.
+ self.headers['Cookie'] = cookie_header
- if settings.timeout_fallback:
- # restore gobal timeout
- socket.setdefaulttimeout(old_timeout)
+ try:
+ # Send the request.
+ r = conn.urlopen(
+ method=self.method,
+ url=self.path_url,
+ body=body,
+ headers=self.headers,
+ redirect=False,
+ assert_same_host=False,
+ preload_content=prefetch,
+ decode_content=False,
+ retries=self.config.get('max_retries', 0),
+ timeout=self.timeout,
+ )
- if self.cookiejar is not None:
- self.cookiejar.extract_cookies(resp, req)
- except (urllib2.HTTPError, urllib2.URLError), why:
- if hasattr(why, 'reason'):
- if isinstance(why.reason, socket.timeout):
- why = Timeout(why)
- elif isinstance(why.reason, socket.error):
- why = Timeout(why)
+ except MaxRetryError, e:
+ if not self.config.get('safe_mode', False):
+ raise ConnectionError(e)
+ else:
+ r = None
- self._build_response(why, is_error=True)
+ except (_SSLError, _HTTPError), e:
+ if not self.config.get('safe_mode', False):
+ raise Timeout('Request timed out.')
- else:
- self._build_response(resp)
- self.response.ok = True
+ self._build_response(r)
+
+ # Response manipulation hook.
+ self.response = dispatch_hook('response', self.hooks, self.response)
+ # Post-request hook.
+ r = dispatch_hook('post_request', self.hooks, self)
+ self.__dict__.update(r.__dict__)
- self.sent = self.response.ok
+ # If prefetch is True, mark content as consumed.
+ if prefetch:
+ self.response._content_consumed = True
- return self.sent
+ return self.sent
class Response(object):
@@ -412,12 +500,12 @@ class Response(object):
#: Final URL location of Response.
self.url = None
- #: True if no :attr:`error` occured.
- self.ok = False
-
- #: Resulting :class:`HTTPError` of request, if one occured.
+ #: Resulting :class:`HTTPError` of request, if one occurred.
self.error = None
+ #: Encoding to decode with when accessing r.content.
+ self.encoding = None
+
#: A list of :class:`Response <Response>` objects from
#: the history of the Request. Any redirect responses will end
#: up here.
@@ -427,17 +515,27 @@ class Response(object):
self.request = None
#: A dictionary of Cookies the server sent back.
- self.cookies = None
+ self.cookies = {}
+
+ #: Dictionary of configurations for this request.
+ self.config = {}
def __repr__(self):
return '<Response [%s]>' % (self.status_code)
-
def __nonzero__(self):
"""Returns true if :attr:`status_code` is 'OK'."""
+ return self.ok
+
+ @property
+ def ok(self):
+ try:
+ self.raise_for_status()
+ except HTTPError:
+ return False
+ return True
- return not self.error
def iter_content(self, chunk_size=10 * 1024, decode_unicode=None):
"""Iterates over the response data. This avoids reading the content
@@ -446,8 +544,9 @@ class Response(object):
length of each item returned as decoding can take place.
"""
if self._content_consumed:
- raise RuntimeError('The content for this response was '
- 'already consumed')
+ raise RuntimeError(
+ 'The content for this response was already consumed'
+ )
def generate():
while 1:
@@ -456,224 +555,81 @@ class Response(object):
break
yield chunk
self._content_consumed = True
+
gen = generate()
+
if 'gzip' in self.headers.get('content-encoding', ''):
gen = stream_decode_gzip(gen)
+
if decode_unicode is None:
- decode_unicode = settings.decode_unicode
+ decode_unicode = self.config.get('decode_unicode')
+
if decode_unicode:
gen = stream_decode_response_unicode(gen, self)
+
return gen
+
@property
def content(self):
"""Content of the response, in bytes or unicode
(if available).
"""
- if self._content is not None:
- return self._content
+ if self._content is None:
+ # Read the contents.
+ try:
+ if self._content_consumed:
+ raise RuntimeError(
+ 'The content for this response was already consumed')
- if self._content_consumed:
- raise RuntimeError('The content for this response was '
- 'already consumed')
+ self._content = self.raw.read()
+ except AttributeError:
+ self._content = None
- # Read the contents.
- self._content = self.raw.read()
+ content = self._content
# Decode GZip'd content.
if 'gzip' in self.headers.get('content-encoding', ''):
try:
- self._content = decode_gzip(self._content)
+ content = decode_gzip(self._content)
except zlib.error:
pass
# Decode unicode content.
- if settings.decode_unicode:
- self._content = get_unicode_from_response(self)
-
- self._content_consumed = True
- return self._content
-
-
- def raise_for_status(self):
- """Raises stored :class:`HTTPError` or :class:`URLError`, if one occured."""
- if self.error:
- raise self.error
-
-
-
-class AuthManager(object):
- """Requests Authentication Manager."""
-
- def __new__(cls):
- singleton = cls.__dict__.get('__singleton__')
- if singleton is not None:
- return singleton
-
- cls.__singleton__ = singleton = object.__new__(cls)
-
- return singleton
-
-
- def __init__(self):
- self.passwd = {}
- self._auth = {}
-
-
- def __repr__(self):
- return '<AuthManager [%s]>' % (self.method)
-
+ if self.config.get('decode_unicode'):
- def add_auth(self, uri, auth):
- """Registers AuthObject to AuthManager."""
+ # Try charset from content-type
- uri = self.reduce_uri(uri, False)
+ if self.encoding:
+ try:
+ content = unicode(content, self.encoding)
+ except UnicodeError:
+ pass
- # try to make it an AuthObject
- if not isinstance(auth, AuthObject):
+ # Fall back:
try:
- auth = AuthObject(*auth)
+ content = unicode(content, self.encoding, errors='replace')
except TypeError:
pass
- self._auth[uri] = auth
-
-
- def add_password(self, realm, uri, user, passwd):
- """Adds password to AuthManager."""
- # uri could be a single URI or a sequence
- if isinstance(uri, basestring):
- uri = [uri]
-
- reduced_uri = tuple([self.reduce_uri(u, False) for u in uri])
-
- if reduced_uri not in self.passwd:
- self.passwd[reduced_uri] = {}
- self.passwd[reduced_uri] = (user, passwd)
-
-
- def find_user_password(self, realm, authuri):
- for uris, authinfo in self.passwd.iteritems():
- reduced_authuri = self.reduce_uri(authuri, False)
- for uri in uris:
- if self.is_suburi(uri, reduced_authuri):
- return authinfo
-
- return (None, None)
-
-
- def get_auth(self, uri):
- (in_domain, in_path) = self.reduce_uri(uri, False)
-
- for domain, path, authority in (
- (i[0][0], i[0][1], i[1]) for i in self._auth.iteritems()
- ):
- if in_domain == domain:
- if path in in_path:
- return authority
-
-
- def reduce_uri(self, uri, default_port=True):
- """Accept authority or URI and extract only the authority and path."""
-
- # note HTTP URLs do not have a userinfo component
- parts = urllib2.urlparse.urlsplit(uri)
-
- if parts[1]:
- # URI
- scheme = parts[0]
- authority = parts[1]
- path = parts[2] or '/'
- else:
- # host or host:port
- scheme = None
- authority = uri
- path = '/'
-
- host, port = urllib2.splitport(authority)
-
- if default_port and port is None and scheme is not None:
- dport = {"http": 80,
- "https": 443,
- }.get(scheme)
- if dport is not None:
- authority = "%s:%d" % (host, dport)
-
- return authority, path
-
-
- def is_suburi(self, base, test):
- """Check if test is below base in a URI tree
-
- Both args must be URIs in reduced form.
- """
- if base == test:
- return True
- if base[0] != test[0]:
- return False
- common = urllib2.posixpath.commonprefix((base[1], test[1]))
- if len(common) == len(base[1]):
- return True
- return False
-
-
- def empty(self):
- self.passwd = {}
-
-
- def remove(self, uri, realm=None):
- # uri could be a single URI or a sequence
- if isinstance(uri, basestring):
- uri = [uri]
-
- for default_port in True, False:
- reduced_uri = tuple([self.reduce_uri(u, default_port) for u in uri])
- del self.passwd[reduced_uri][realm]
-
-
- def __contains__(self, uri):
- # uri could be a single URI or a sequence
- if isinstance(uri, basestring):
- uri = [uri]
-
- uri = tuple([self.reduce_uri(u, False) for u in uri])
+ self._content_consumed = True
+ return content
- if uri in self.passwd:
- return True
- return False
+ def raise_for_status(self):
+ """Raises stored :class:`HTTPError` or :class:`URLError`, if one occurred."""
-auth_manager = AuthManager()
+ if self.error:
+ raise self.error
+ if (self.status_code >= 300) and (self.status_code < 400):
+ raise HTTPError('%s Redirection' % self.status_code)
+ elif (self.status_code >= 400) and (self.status_code < 500):
+ raise HTTPError('%s Client Error' % self.status_code)
-class AuthObject(object):
- """The :class:`AuthObject` is a simple HTTP Authentication token. When
- given to a Requests function, it enables Basic HTTP Authentication for that
- Request. You can also enable Authorization for domain realms with AutoAuth.
- See AutoAuth for more details.
+ elif (self.status_code >= 500) and (self.status_code < 600):
+ raise HTTPError('%s Server Error' % self.status_code)
- :param username: Username to authenticate with.
- :param password: Password for given username.
- :param realm: (optional) the realm this auth applies to
- :param handler: (optional) basic || digest || proxy_basic || proxy_digest
- """
- _handlers = {
- 'basic': HTTPBasicAuthHandler,
- 'forced_basic': HTTPForcedBasicAuthHandler,
- 'digest': HTTPDigestAuthHandler,
- 'proxy_basic': urllib2.ProxyBasicAuthHandler,
- 'proxy_digest': urllib2.ProxyDigestAuthHandler
- }
-
- def __init__(self, username, password, handler='forced_basic', realm=None):
- self.username = username
- self.password = password
- self.realm = realm
-
- if isinstance(handler, basestring):
- self.handler = self._handlers.get(handler.lower(), HTTPForcedBasicAuthHandler)
- else:
- self.handler = handler
diff --git a/requests/monkeys.py b/requests/monkeys.py
deleted file mode 100644
index c838071..0000000
--- a/requests/monkeys.py
+++ /dev/null
@@ -1,148 +0,0 @@
-#-*- coding: utf-8 -*-
-
-"""
-requests.monkeys
-~~~~~~~~~~~~~~~~
-
-Urllib2 Monkey patches.
-
-"""
-
-import urllib2
-import re
-
-class Request(urllib2.Request):
- """Hidden wrapper around the urllib2.Request object. Allows for manual
- setting of HTTP methods.
- """
-
- def __init__(self, url, data=None, headers={}, origin_req_host=None, unverifiable=False, method=None):
- urllib2.Request.__init__(self, url, data, headers, origin_req_host, unverifiable)
- self.method = method
-
- def get_method(self):
- if self.method:
- return self.method
-
- return urllib2.Request.get_method(self)
-
-
-class HTTPRedirectHandler(urllib2.HTTPRedirectHandler):
- """HTTP Redirect handler."""
- def http_error_301(self, req, fp, code, msg, headers):
- pass
-
- http_error_302 = http_error_303 = http_error_307 = http_error_301
-
-
-
-class HTTPBasicAuthHandler(urllib2.HTTPBasicAuthHandler):
- """HTTP Basic Auth Handler with authentication loop fixes."""
-
- def __init__(self, *args, **kwargs):
- urllib2.HTTPBasicAuthHandler.__init__(self, *args, **kwargs)
- self.retried_req = None
- self.retried = 0
-
-
- def reset_retry_count(self):
- # Python 2.6.5 will call this on 401 or 407 errors and thus loop
- # forever. We disable reset_retry_count completely and reset in
- # http_error_auth_reqed instead.
- pass
-
-
- def http_error_auth_reqed(self, auth_header, host, req, headers):
- # Reset the retry counter once for each request.
- if req is not self.retried_req:
- self.retried_req = req
- self.retried = 0
-
- return urllib2.HTTPBasicAuthHandler.http_error_auth_reqed(
- self, auth_header, host, req, headers
- )
-
-
-
-class HTTPForcedBasicAuthHandler(HTTPBasicAuthHandler):
- """HTTP Basic Auth Handler with forced Authentication."""
-
- auth_header = 'Authorization'
- rx = re.compile('(?:.*,)*[ \t]*([^ \t]+)[ \t]+'
- 'realm=(["\'])(.*?)\\2', re.I)
-
- def __init__(self, *args, **kwargs):
- HTTPBasicAuthHandler.__init__(self, *args, **kwargs)
-
-
- def http_error_401(self, req, fp, code, msg, headers):
- url = req.get_full_url()
- response = self._http_error_auth_reqed('www-authenticate', url, req, headers)
- self.reset_retry_count()
- return response
-
- http_error_404 = http_error_401
-
-
- def _http_error_auth_reqed(self, authreq, host, req, headers):
-
- authreq = headers.get(authreq, None)
-
- if self.retried > 5:
- # retry sending the username:password 5 times before failing.
- raise urllib2.HTTPError(req.get_full_url(), 401, "basic auth failed",
- headers, None)
- else:
- self.retried += 1
-
- if authreq:
-
- mo = self.rx.search(authreq)
-
- if mo:
- scheme, quote, realm = mo.groups()
-
- if scheme.lower() == 'basic':
- response = self.retry_http_basic_auth(host, req, realm)
-
- if response and response.code not in (401, 404):
- self.retried = 0
- return response
- else:
- response = self.retry_http_basic_auth(host, req, 'Realm')
-
- if response and response.code not in (401, 404):
- self.retried = 0
- return response
-
-
-
-class HTTPDigestAuthHandler(urllib2.HTTPDigestAuthHandler):
-
- def __init__(self, *args, **kwargs):
- urllib2.HTTPDigestAuthHandler.__init__(self, *args, **kwargs)
- self.retried_req = None
-
- def reset_retry_count(self):
- # Python 2.6.5 will call this on 401 or 407 errors and thus loop
- # forever. We disable reset_retry_count completely and reset in
- # http_error_auth_reqed instead.
- pass
-
- def http_error_auth_reqed(self, auth_header, host, req, headers):
- # Reset the retry counter once for each request.
- if req is not self.retried_req:
- self.retried_req = req
- self.retried = 0
- # In python < 2.5 AbstractDigestAuthHandler raises a ValueError if
- # it doesn't know about the auth type requested. This can happen if
- # somebody is using BasicAuth and types a bad password.
-
- try:
- return urllib2.HTTPDigestAuthHandler.http_error_auth_reqed(
- self, auth_header, host, req, headers)
- except ValueError, inst:
- arg = inst.args[0]
- if arg.startswith("AbstractDigestAuthHandler doesn't know "):
- return
- raise \ No newline at end of file
diff --git a/requests/packages/__init__.py b/requests/packages/__init__.py
index ab2669e..d62c4b7 100644
--- a/requests/packages/__init__.py
+++ b/requests/packages/__init__.py
@@ -1,3 +1,3 @@
from __future__ import absolute_import
-from . import poster
+from . import urllib3
diff --git a/requests/packages/oreos/__init__.py b/requests/packages/oreos/__init__.py
new file mode 100644
index 0000000..d01340f
--- /dev/null
+++ b/requests/packages/oreos/__init__.py
@@ -0,0 +1,3 @@
+# -*- coding: utf-8 -*-
+
+from .core import dict_from_string \ No newline at end of file
diff --git a/requests/packages/oreos/core.py b/requests/packages/oreos/core.py
new file mode 100644
index 0000000..359d744
--- /dev/null
+++ b/requests/packages/oreos/core.py
@@ -0,0 +1,24 @@
+# -*- coding: utf-8 -*-
+
+"""
+oreos.core
+~~~~~~~~~~
+
+The creamy white center.
+"""
+
+from .monkeys import SimpleCookie
+
+
+def dict_from_string(s):
+ """Returns a MultiDict with Cookies."""
+
+ cookies = dict()
+
+ c = SimpleCookie()
+ c.load(s)
+
+ for k,v in c.items():
+ cookies.update({k: v.value})
+
+ return cookies \ No newline at end of file
diff --git a/requests/packages/oreos/monkeys.py b/requests/packages/oreos/monkeys.py
new file mode 100644
index 0000000..6be3074
--- /dev/null
+++ b/requests/packages/oreos/monkeys.py
@@ -0,0 +1,770 @@
+# -*- coding: utf-8 -*-
+
+"""
+oreos.monkeys
+~~~~~~~~~~~~~
+
+Monkeypatches.
+"""
+#!/usr/bin/env python
+#
+
+####
+# Copyright 2000 by Timothy O'Malley <timo@alum.mit.edu>
+#
+# All Rights Reserved
+#
+# Permission to use, copy, modify, and distribute this software
+# and its documentation for any purpose and without fee is hereby
+# granted, provided that the above copyright notice appear in all
+# copies and that both that copyright notice and this permission
+# notice appear in supporting documentation, and that the name of
+# Timothy O'Malley not be used in advertising or publicity
+# pertaining to distribution of the software without specific, written
+# prior permission.
+#
+# Timothy O'Malley DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS
+# SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY
+# AND FITNESS, IN NO EVENT SHALL Timothy O'Malley BE LIABLE FOR
+# ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
+# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
+# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
+# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR
+# PERFORMANCE OF THIS SOFTWARE.
+#
+####
+#
+# Id: Cookie.py,v 2.29 2000/08/23 05:28:49 timo Exp
+# by Timothy O'Malley <timo@alum.mit.edu>
+#
+# Cookie.py is a Python module for the handling of HTTP
+# cookies as a Python dictionary. See RFC 2109 for more
+# information on cookies.
+#
+# The original idea to treat Cookies as a dictionary came from
+# Dave Mitchell (davem@magnet.com) in 1995, when he released the
+# first version of nscookie.py.
+#
+####
+
+r"""
+Here's a sample session to show how to use this module.
+At the moment, this is the only documentation.
+
+The Basics
+----------
+
+Importing is easy..
+
+ >>> import Cookie
+
+Most of the time you start by creating a cookie. Cookies come in
+three flavors, each with slightly different encoding semantics, but
+more on that later.
+
+ >>> C = Cookie.SimpleCookie()
+ >>> C = Cookie.SerialCookie()
+ >>> C = Cookie.SmartCookie()
+
+[Note: Long-time users of Cookie.py will remember using
+Cookie.Cookie() to create an Cookie object. Although deprecated, it
+is still supported by the code. See the Backward Compatibility notes
+for more information.]
+
+Once you've created your Cookie, you can add values just as if it were
+a dictionary.
+
+ >>> C = Cookie.SmartCookie()
+ >>> C["fig"] = "newton"
+ >>> C["sugar"] = "wafer"
+ >>> C.output()
+ 'Set-Cookie: fig=newton\r\nSet-Cookie: sugar=wafer'
+
+Notice that the printable representation of a Cookie is the
+appropriate format for a Set-Cookie: header. This is the
+default behavior. You can change the header and printed
+attributes by using the .output() function
+
+ >>> C = Cookie.SmartCookie()
+ >>> C["rocky"] = "road"
+ >>> C["rocky"]["path"] = "/cookie"
+ >>> print C.output(header="Cookie:")
+ Cookie: rocky=road; Path=/cookie
+ >>> print C.output(attrs=[], header="Cookie:")
+ Cookie: rocky=road
+
+The load() method of a Cookie extracts cookies from a string. In a
+CGI script, you would use this method to extract the cookies from the
+HTTP_COOKIE environment variable.
+
+ >>> C = Cookie.SmartCookie()
+ >>> C.load("chips=ahoy; vienna=finger")
+ >>> C.output()
+ 'Set-Cookie: chips=ahoy\r\nSet-Cookie: vienna=finger'
+
+The load() method is darn-tootin smart about identifying cookies
+within a string. Escaped quotation marks, nested semicolons, and other
+such trickeries do not confuse it.
+
+ >>> C = Cookie.SmartCookie()
+ >>> C.load('keebler="E=everybody; L=\\"Loves\\"; fudge=\\012;";')
+ >>> print C
+ Set-Cookie: keebler="E=everybody; L=\"Loves\"; fudge=\012;"
+
+Each element of the Cookie also supports all of the RFC 2109
+Cookie attributes. Here's an example which sets the Path
+attribute.
+
+ >>> C = Cookie.SmartCookie()
+ >>> C["oreo"] = "doublestuff"
+ >>> C["oreo"]["path"] = "/"
+ >>> print C
+ Set-Cookie: oreo=doublestuff; Path=/
+
+Each dictionary element has a 'value' attribute, which gives you
+back the value associated with the key.
+
+ >>> C = Cookie.SmartCookie()
+ >>> C["twix"] = "none for you"
+ >>> C["twix"].value
+ 'none for you'
+
+
+A Bit More Advanced
+-------------------
+
+As mentioned before, there are three different flavors of Cookie
+objects, each with different encoding/decoding semantics. This
+section briefly discusses the differences.
+
+SimpleCookie
+
+The SimpleCookie expects that all values should be standard strings.
+Just to be sure, SimpleCookie invokes the str() builtin to convert
+the value to a string, when the values are set dictionary-style.
+
+ >>> C = Cookie.SimpleCookie()
+ >>> C["number"] = 7
+ >>> C["string"] = "seven"
+ >>> C["number"].value
+ '7'
+ >>> C["string"].value
+ 'seven'
+ >>> C.output()
+ 'Set-Cookie: number=7\r\nSet-Cookie: string=seven'
+
+
+SerialCookie
+
+The SerialCookie expects that all values should be serialized using
+cPickle (or pickle, if cPickle isn't available). As a result of
+serializing, SerialCookie can save almost any Python object to a
+value, and recover the exact same object when the cookie has been
+returned. (SerialCookie can yield some strange-looking cookie
+values, however.)
+
+ >>> C = Cookie.SerialCookie()
+ >>> C["number"] = 7
+ >>> C["string"] = "seven"
+ >>> C["number"].value
+ 7
+ >>> C["string"].value
+ 'seven'
+ >>> C.output()
+ 'Set-Cookie: number="I7\\012."\r\nSet-Cookie: string="S\'seven\'\\012p1\\012."'
+
+Be warned, however, if SerialCookie cannot de-serialize a value (because
+it isn't a valid pickle'd object), IT WILL RAISE AN EXCEPTION.
+
+
+SmartCookie
+
+The SmartCookie combines aspects of each of the other two flavors.
+When setting a value in a dictionary-fashion, the SmartCookie will
+serialize (ala cPickle) the value *if and only if* it isn't a
+Python string. String objects are *not* serialized. Similarly,
+when the load() method parses out values, it attempts to de-serialize
+the value. If it fails, then it fallsback to treating the value
+as a string.
+
+ >>> C = Cookie.SmartCookie()
+ >>> C["number"] = 7
+ >>> C["string"] = "seven"
+ >>> C["number"].value
+ 7
+ >>> C["string"].value
+ 'seven'
+ >>> C.output()
+ 'Set-Cookie: number="I7\\012."\r\nSet-Cookie: string=seven'
+
+
+Backwards Compatibility
+-----------------------
+
+In order to keep compatibilty with earlier versions of Cookie.py,
+it is still possible to use Cookie.Cookie() to create a Cookie. In
+fact, this simply returns a SmartCookie.
+
+ >>> C = Cookie.Cookie()
+ >>> print C.__class__.__name__
+ SmartCookie
+
+
+Finis.
+""" #"
+# ^
+# |----helps out font-lock
+
+#
+# Import our required modules
+#
+import string
+
+try:
+ from cPickle import dumps, loads
+except ImportError:
+ from pickle import dumps, loads
+
+import re, warnings
+
+__all__ = ["CookieError","BaseCookie","SimpleCookie","SerialCookie",
+ "SmartCookie","Cookie"]
+
+_nulljoin = ''.join
+_semispacejoin = '; '.join
+_spacejoin = ' '.join
+
+#
+# Define an exception visible to External modules
+#
+class CookieError(Exception):
+ pass
+
+
+# These quoting routines conform to the RFC2109 specification, which in
+# turn references the character definitions from RFC2068. They provide
+# a two-way quoting algorithm. Any non-text character is translated
+# into a 4 character sequence: a forward-slash followed by the
+# three-digit octal equivalent of the character. Any '\' or '"' is
+# quoted with a preceeding '\' slash.
+#
+# These are taken from RFC2068 and RFC2109.
+# _LegalChars is the list of chars which don't require "'s
+# _Translator hash-table for fast quoting
+#
+_LegalChars = string.ascii_letters + string.digits + "!#$%&'*+-.^_`|~[]_"
+_Translator = {
+ '\000' : '\\000', '\001' : '\\001', '\002' : '\\002',
+ '\003' : '\\003', '\004' : '\\004', '\005' : '\\005',
+ '\006' : '\\006', '\007' : '\\007', '\010' : '\\010',
+ '\011' : '\\011', '\012' : '\\012', '\013' : '\\013',
+ '\014' : '\\014', '\015' : '\\015', '\016' : '\\016',
+ '\017' : '\\017', '\020' : '\\020', '\021' : '\\021',
+ '\022' : '\\022', '\023' : '\\023', '\024' : '\\024',
+ '\025' : '\\025', '\026' : '\\026', '\027' : '\\027',
+ '\030' : '\\030', '\031' : '\\031', '\032' : '\\032',
+ '\033' : '\\033', '\034' : '\\034', '\035' : '\\035',
+ '\036' : '\\036', '\037' : '\\037',
+
+ # Because of the way browsers really handle cookies (as opposed
+ # to what the RFC says) we also encode , and ;
+
+ ',' : '\\054', ';' : '\\073',
+
+ '"' : '\\"', '\\' : '\\\\',
+
+ '\177' : '\\177', '\200' : '\\200', '\201' : '\\201',
+ '\202' : '\\202', '\203' : '\\203', '\204' : '\\204',
+ '\205' : '\\205', '\206' : '\\206', '\207' : '\\207',
+ '\210' : '\\210', '\211' : '\\211', '\212' : '\\212',
+ '\213' : '\\213', '\214' : '\\214', '\215' : '\\215',
+ '\216' : '\\216', '\217' : '\\217', '\220' : '\\220',
+ '\221' : '\\221', '\222' : '\\222', '\223' : '\\223',
+ '\224' : '\\224', '\225' : '\\225', '\226' : '\\226',
+ '\227' : '\\227', '\230' : '\\230', '\231' : '\\231',
+ '\232' : '\\232', '\233' : '\\233', '\234' : '\\234',
+ '\235' : '\\235', '\236' : '\\236', '\237' : '\\237',
+ '\240' : '\\240', '\241' : '\\241', '\242' : '\\242',
+ '\243' : '\\243', '\244' : '\\244', '\245' : '\\245',
+ '\246' : '\\246', '\247' : '\\247', '\250' : '\\250',
+ '\251' : '\\251', '\252' : '\\252', '\253' : '\\253',
+ '\254' : '\\254', '\255' : '\\255', '\256' : '\\256',
+ '\257' : '\\257', '\260' : '\\260', '\261' : '\\261',
+ '\262' : '\\262', '\263' : '\\263', '\264' : '\\264',
+ '\265' : '\\265', '\266' : '\\266', '\267' : '\\267',
+ '\270' : '\\270', '\271' : '\\271', '\272' : '\\272',
+ '\273' : '\\273', '\274' : '\\274', '\275' : '\\275',
+ '\276' : '\\276', '\277' : '\\277', '\300' : '\\300',
+ '\301' : '\\301', '\302' : '\\302', '\303' : '\\303',
+ '\304' : '\\304', '\305' : '\\305', '\306' : '\\306',
+ '\307' : '\\307', '\310' : '\\310', '\311' : '\\311',
+ '\312' : '\\312', '\313' : '\\313', '\314' : '\\314',
+ '\315' : '\\315', '\316' : '\\316', '\317' : '\\317',
+ '\320' : '\\320', '\321' : '\\321', '\322' : '\\322',
+ '\323' : '\\323', '\324' : '\\324', '\325' : '\\325',
+ '\326' : '\\326', '\327' : '\\327', '\330' : '\\330',
+ '\331' : '\\331', '\332' : '\\332', '\333' : '\\333',
+ '\334' : '\\334', '\335' : '\\335', '\336' : '\\336',
+ '\337' : '\\337', '\340' : '\\340', '\341' : '\\341',
+ '\342' : '\\342', '\343' : '\\343', '\344' : '\\344',
+ '\345' : '\\345', '\346' : '\\346', '\347' : '\\347',
+ '\350' : '\\350', '\351' : '\\351', '\352' : '\\352',
+ '\353' : '\\353', '\354' : '\\354', '\355' : '\\355',
+ '\356' : '\\356', '\357' : '\\357', '\360' : '\\360',
+ '\361' : '\\361', '\362' : '\\362', '\363' : '\\363',
+ '\364' : '\\364', '\365' : '\\365', '\366' : '\\366',
+ '\367' : '\\367', '\370' : '\\370', '\371' : '\\371',
+ '\372' : '\\372', '\373' : '\\373', '\374' : '\\374',
+ '\375' : '\\375', '\376' : '\\376', '\377' : '\\377'
+ }
+
+_idmap = ''.join(chr(x) for x in xrange(256))
+
+def _quote(str, LegalChars=_LegalChars,
+ idmap=_idmap, translate=string.translate):
+ #
+ # If the string does not need to be double-quoted,
+ # then just return the string. Otherwise, surround
+ # the string in doublequotes and precede quote (with a \)
+ # special characters.
+ #
+ if "" == translate(str, idmap, LegalChars):
+ return str
+ else:
+ return '"' + _nulljoin( map(_Translator.get, str, str) ) + '"'
+# end _quote
+
+
+_OctalPatt = re.compile(r"\\[0-3][0-7][0-7]")
+_QuotePatt = re.compile(r"[\\].")
+
+def _unquote(str):
+ # If there aren't any doublequotes,
+ # then there can't be any special characters. See RFC 2109.
+ if len(str) < 2:
+ return str
+ if str[0] != '"' or str[-1] != '"':
+ return str
+
+ # We have to assume that we must decode this string.
+ # Down to work.
+
+ # Remove the "s
+ str = str[1:-1]
+
+ # Check for special sequences. Examples:
+ # \012 --> \n
+ # \" --> "
+ #
+ i = 0
+ n = len(str)
+ res = []
+ while 0 <= i < n:
+ Omatch = _OctalPatt.search(str, i)
+ Qmatch = _QuotePatt.search(str, i)
+ if not Omatch and not Qmatch: # Neither matched
+ res.append(str[i:])
+ break
+ # else:
+ j = k = -1
+ if Omatch: j = Omatch.start(0)
+ if Qmatch: k = Qmatch.start(0)
+ if Qmatch and ( not Omatch or k < j ): # QuotePatt matched
+ res.append(str[i:k])
+ res.append(str[k+1])
+ i = k+2
+ else: # OctalPatt matched
+ res.append(str[i:j])
+ res.append( chr( int(str[j+1:j+4], 8) ) )
+ i = j+4
+ return _nulljoin(res)
+# end _unquote
+
+# The _getdate() routine is used to set the expiration time in
+# the cookie's HTTP header. By default, _getdate() returns the
+# current time in the appropriate "expires" format for a
+# Set-Cookie header. The one optional argument is an offset from
+# now, in seconds. For example, an offset of -3600 means "one hour ago".
+# The offset may be a floating point number.
+#
+
+_weekdayname = ['Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat', 'Sun']
+
+_monthname = [None,
+ 'Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun',
+ 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec']
+
+def _getdate(future=0, weekdayname=_weekdayname, monthname=_monthname):
+ from time import gmtime, time
+ now = time()
+ year, month, day, hh, mm, ss, wd, y, z = gmtime(now + future)
+ return "%s, %02d-%3s-%4d %02d:%02d:%02d GMT" % \
+ (weekdayname[wd], day, monthname[month], year, hh, mm, ss)
+
+
+#
+# A class to hold ONE key,value pair.
+# In a cookie, each such pair may have several attributes.
+# so this class is used to keep the attributes associated
+# with the appropriate key,value pair.
+# This class also includes a coded_value attribute, which
+# is used to hold the network representation of the
+# value. This is most useful when Python objects are
+# pickled for network transit.
+#
+
+class Morsel(dict):
+ # RFC 2109 lists these attributes as reserved:
+ # path comment domain
+ # max-age secure version
+ #
+ # For historical reasons, these attributes are also reserved:
+ # expires
+ #
+ # This is an extension from Microsoft:
+ # httponly
+ #
+ # This dictionary provides a mapping from the lowercase
+ # variant on the left to the appropriate traditional
+ # formatting on the right.
+ _reserved = { "expires" : "expires",
+ "path" : "Path",
+ "comment" : "Comment",
+ "domain" : "Domain",
+ "max-age" : "Max-Age",
+ "secure" : "secure",
+ "httponly" : "httponly",
+ "version" : "Version",
+ }
+
+ def __init__(self):
+ # Set defaults
+ self.key = self.value = self.coded_value = None
+
+ # Set default attributes
+ for K in self._reserved:
+ dict.__setitem__(self, K, "")
+ # end __init__
+
+ def __setitem__(self, K, V):
+ K = K.lower()
+ if not K in self._reserved:
+ raise CookieError("Invalid Attribute %s" % K)
+ dict.__setitem__(self, K, V)
+ # end __setitem__
+
+ def isReservedKey(self, K):
+ return K.lower() in self._reserved
+ # end isReservedKey
+
+ def set(self, key, val, coded_val,
+ LegalChars=_LegalChars,
+ idmap=_idmap, translate=string.translate):
+ # First we verify that the key isn't a reserved word
+ # Second we make sure it only contains legal characters
+ if key.lower() in self._reserved:
+ raise CookieError("Attempt to set a reserved key: %s" % key)
+ if "" != translate(key, idmap, LegalChars):
+ raise CookieError("Illegal key value: %s" % key)
+
+ # It's a good key, so save it.
+ self.key = key
+ self.value = val
+ self.coded_value = coded_val
+ # end set
+
+ def output(self, attrs=None, header = "Set-Cookie:"):
+ return "%s %s" % ( header, self.OutputString(attrs) )
+
+ __str__ = output
+
+ def __repr__(self):
+ return '<%s: %s=%s>' % (self.__class__.__name__,
+ self.key, repr(self.value) )
+
+ def js_output(self, attrs=None):
+ # Print javascript
+ return """
+ <script type="text/javascript">
+ <!-- begin hiding
+ document.cookie = \"%s\";
+ // end hiding -->
+ </script>
+ """ % ( self.OutputString(attrs).replace('"',r'\"'), )
+ # end js_output()
+
+ def OutputString(self, attrs=None):
+ # Build up our result
+ #
+ result = []
+ RA = result.append
+
+ # First, the key=value pair
+ RA("%s=%s" % (self.key, self.coded_value))
+
+ # Now add any defined attributes
+ if attrs is None:
+ attrs = self._reserved
+ items = self.items()
+ items.sort()
+ for K,V in items:
+ if V == "": continue
+ if K not in attrs: continue
+ if K == "expires" and type(V) == type(1):
+ RA("%s=%s" % (self._reserved[K], _getdate(V)))
+ elif K == "max-age" and type(V) == type(1):
+ RA("%s=%d" % (self._reserved[K], V))
+ elif K == "secure":
+ RA(str(self._reserved[K]))
+ elif K == "httponly":
+ RA(str(self._reserved[K]))
+ else:
+ RA("%s=%s" % (self._reserved[K], V))
+
+ # Return the result
+ return _semispacejoin(result)
+ # end OutputString
+# end Morsel class
+
+
+
+#
+# Pattern for finding cookie
+#
+# This used to be strict parsing based on the RFC2109 and RFC2068
+# specifications. I have since discovered that MSIE 3.0x doesn't
+# follow the character rules outlined in those specs. As a
+# result, the parsing rules here are less strict.
+#
+
+_LegalCharsPatt = r"[\w\d!#%&'~_`><@,:/\$\*\+\-\.\^\|\)\(\?\}\{\=\[\]\_]"
+
+_CookiePattern = re.compile(
+ r"(?x)" # This is a Verbose pattern
+ r"(?P<key>" # Start of group 'key'
+ ""+ _LegalCharsPatt +"+?" # Any word of at least one letter, nongreedy
+ r")" # End of group 'key'
+ r"\s*=\s*" # Equal Sign
+ r"(?P<val>" # Start of group 'val'
+ r'"(?:[^\\"]|\\.)*"' # Any doublequoted string
+ r"|" # or
+ r"\w{3},\s[\w\d-]{9,11}\s[\d:]{8}\sGMT" # Special case for "expires" attr
+ r"|" # or
+ ""+ _LegalCharsPatt +"*" # Any word or empty string
+ r")" # End of group 'val'
+ r"\s*;?" # Probably ending in a semi-colon
+ )
+
+
+# At long last, here is the cookie class.
+# Using this class is almost just like using a dictionary.
+# See this module's docstring for example usage.
+#
+class BaseCookie(dict):
+ # A container class for a set of Morsels
+ #
+
+ def value_decode(self, val):
+ """real_value, coded_value = value_decode(STRING)
+ Called prior to setting a cookie's value from the network
+ representation. The VALUE is the value read from HTTP
+ header.
+ Override this function to modify the behavior of cookies.
+ """
+ return val, val
+ # end value_encode
+
+ def value_encode(self, val):
+ """real_value, coded_value = value_encode(VALUE)
+ Called prior to setting a cookie's value from the dictionary
+ representation. The VALUE is the value being assigned.
+ Override this function to modify the behavior of cookies.
+ """
+ strval = str(val)
+ return strval, strval
+ # end value_encode
+
+ def __init__(self, input=None):
+ if input: self.load(input)
+ # end __init__
+
+ def __set(self, key, real_value, coded_value):
+ """Private method for setting a cookie's value"""
+ M = self.get(key, Morsel())
+ M.set(key, real_value, coded_value)
+ dict.__setitem__(self, key, M)
+ # end __set
+
+ def __setitem__(self, key, value):
+ """Dictionary style assignment."""
+ rval, cval = self.value_encode(value)
+ self.__set(key, rval, cval)
+ # end __setitem__
+
+ def output(self, attrs=None, header="Set-Cookie:", sep="\015\012"):
+ """Return a string suitable for HTTP."""
+ result = []
+ items = self.items()
+ items.sort()
+ for K,V in items:
+ result.append( V.output(attrs, header) )
+ return sep.join(result)
+ # end output
+
+ __str__ = output
+
+ def __repr__(self):
+ L = []
+ items = self.items()
+ items.sort()
+ for K,V in items:
+ L.append( '%s=%s' % (K,repr(V.value) ) )
+ return '<%s: %s>' % (self.__class__.__name__, _spacejoin(L))
+
+ def js_output(self, attrs=None):
+ """Return a string suitable for JavaScript."""
+ result = []
+ items = self.items()
+ items.sort()
+ for K,V in items:
+ result.append( V.js_output(attrs) )
+ return _nulljoin(result)
+ # end js_output
+
+ def load(self, rawdata):
+ """Load cookies from a string (presumably HTTP_COOKIE) or
+ from a dictionary. Loading cookies from a dictionary 'd'
+ is equivalent to calling:
+ map(Cookie.__setitem__, d.keys(), d.values())
+ """
+ if type(rawdata) == type(""):
+ self.__ParseString(rawdata)
+ else:
+ # self.update() wouldn't call our custom __setitem__
+ for k, v in rawdata.items():
+ self[k] = v
+ return
+ # end load()
+
+ def __ParseString(self, str, patt=_CookiePattern):
+ i = 0 # Our starting point
+ n = len(str) # Length of string
+ M = None # current morsel
+
+ while 0 <= i < n:
+ # Start looking for a cookie
+ match = patt.search(str, i)
+ if not match: break # No more cookies
+
+ K,V = match.group("key"), match.group("val")
+ i = match.end(0)
+
+ # Parse the key, value in case it's metainfo
+ if K[0] == "$":
+ # We ignore attributes which pertain to the cookie
+ # mechanism as a whole. See RFC 2109.
+ # (Does anyone care?)
+ if M:
+ M[ K[1:] ] = V
+ elif K.lower() in Morsel._reserved:
+ if M:
+ M[ K ] = _unquote(V)
+ else:
+ rval, cval = self.value_decode(V)
+ self.__set(K, rval, cval)
+ M = self[K]
+ # end __ParseString
+# end BaseCookie class
+
+class SimpleCookie(BaseCookie):
+ """SimpleCookie
+ SimpleCookie supports strings as cookie values. When setting
+ the value using the dictionary assignment notation, SimpleCookie
+ calls the builtin str() to convert the value to a string. Values
+ received from HTTP are kept as strings.
+ """
+ def value_decode(self, val):
+ return _unquote( val ), val
+ def value_encode(self, val):
+ strval = str(val)
+ return strval, _quote( strval )
+# end SimpleCookie
+
+class SerialCookie(BaseCookie):
+ """SerialCookie
+ SerialCookie supports arbitrary objects as cookie values. All
+ values are serialized (using cPickle) before being sent to the
+ client. All incoming values are assumed to be valid Pickle
+ representations. IF AN INCOMING VALUE IS NOT IN A VALID PICKLE
+ FORMAT, THEN AN EXCEPTION WILL BE RAISED.
+
+ Note: Large cookie values add overhead because they must be
+ retransmitted on every HTTP transaction.
+
+ Note: HTTP has a 2k limit on the size of a cookie. This class
+ does not check for this limit, so be careful!!!
+ """
+ def __init__(self, input=None):
+ warnings.warn("SerialCookie class is insecure; do not use it",
+ DeprecationWarning)
+ BaseCookie.__init__(self, input)
+ # end __init__
+ def value_decode(self, val):
+ # This could raise an exception!
+ return loads( _unquote(val) ), val
+ def value_encode(self, val):
+ return val, _quote( dumps(val) )
+# end SerialCookie
+
+class SmartCookie(BaseCookie):
+ """SmartCookie
+ SmartCookie supports arbitrary objects as cookie values. If the
+ object is a string, then it is quoted. If the object is not a
+ string, however, then SmartCookie will use cPickle to serialize
+ the object into a string representation.
+
+ Note: Large cookie values add overhead because they must be
+ retransmitted on every HTTP transaction.
+
+ Note: HTTP has a 2k limit on the size of a cookie. This class
+ does not check for this limit, so be careful!!!
+ """
+ def __init__(self, input=None):
+ warnings.warn("Cookie/SmartCookie class is insecure; do not use it",
+ DeprecationWarning)
+ BaseCookie.__init__(self, input)
+ # end __init__
+ def value_decode(self, val):
+ strval = _unquote(val)
+ try:
+ return loads(strval), val
+ except:
+ return strval, val
+ def value_encode(self, val):
+ if type(val) == type(""):
+ return val, _quote(val)
+ else:
+ return val, _quote( dumps(val) )
+# end SmartCookie
+
+
+###########################################################
+# Backwards Compatibility: Don't break any existing code!
+
+# We provide Cookie() as an alias for SmartCookie()
+Cookie = SmartCookie
+
+#
+###########################################################
+
+def _test():
+ import doctest, Cookie
+ return doctest.testmod(Cookie)
+
+if __name__ == "__main__":
+ _test()
+
+
+#Local Variables:
+#tab-width: 4
+#end:
diff --git a/requests/packages/oreos/structures.py b/requests/packages/oreos/structures.py
new file mode 100644
index 0000000..063d5f9
--- /dev/null
+++ b/requests/packages/oreos/structures.py
@@ -0,0 +1,399 @@
+# -*- coding: utf-8 -*-
+
+"""
+oreos.sructures
+~~~~~~~~~~~~~~~
+
+The plastic blue packaging.
+
+This is mostly directly stolen from mitsuhiko/werkzeug.
+"""
+
+__all__ = ('MultiDict',)
+
+class _Missing(object):
+
+ def __repr__(self):
+ return 'no value'
+
+ def __reduce__(self):
+ return '_missing'
+
+_missing = _Missing()
+
+
+
+def iter_multi_items(mapping):
+ """Iterates over the items of a mapping yielding keys and values
+ without dropping any from more complex structures.
+ """
+ if isinstance(mapping, MultiDict):
+ for item in mapping.iteritems(multi=True):
+ yield item
+ elif isinstance(mapping, dict):
+ for key, value in mapping.iteritems():
+ if isinstance(value, (tuple, list)):
+ for value in value:
+ yield key, value
+ else:
+ yield key, value
+ else:
+ for item in mapping:
+ yield item
+
+
+
+class TypeConversionDict(dict):
+ """Works like a regular dict but the :meth:`get` method can perform
+ type conversions. :class:`MultiDict` and :class:`CombinedMultiDict`
+ are subclasses of this class and provide the same feature.
+
+ .. versionadded:: 0.5
+ """
+
+ def get(self, key, default=None, type=None):
+ """Return the default value if the requested data doesn't exist.
+ If `type` is provided and is a callable it should convert the value,
+ return it or raise a :exc:`ValueError` if that is not possible. In
+ this case the function will return the default as if the value was not
+ found:
+
+ >>> d = TypeConversionDict(foo='42', bar='blub')
+ >>> d.get('foo', type=int)
+ 42
+ >>> d.get('bar', -1, type=int)
+ -1
+
+ :param key: The key to be looked up.
+ :param default: The default value to be returned if the key can't
+ be looked up. If not further specified `None` is
+ returned.
+ :param type: A callable that is used to cast the value in the
+ :class:`MultiDict`. If a :exc:`ValueError` is raised
+ by this callable the default value is returned.
+ """
+ try:
+ rv = self[key]
+ if type is not None:
+ rv = type(rv)
+ except (KeyError, ValueError):
+ rv = default
+ return rv
+
+
+class MultiDict(TypeConversionDict):
+ """A :class:`MultiDict` is a dictionary subclass customized to deal with
+ multiple values for the same key which is for example used by the parsing
+ functions in the wrappers. This is necessary because some HTML form
+ elements pass multiple values for the same key.
+
+ :class:`MultiDict` implements all standard dictionary methods.
+ Internally, it saves all values for a key as a list, but the standard dict
+ access methods will only return the first value for a key. If you want to
+ gain access to the other values, too, you have to use the `list` methods as
+ explained below.
+
+ Basic Usage:
+
+ >>> d = MultiDict([('a', 'b'), ('a', 'c')])
+ >>> d
+ MultiDict([('a', 'b'), ('a', 'c')])
+ >>> d['a']
+ 'b'
+ >>> d.getlist('a')
+ ['b', 'c']
+ >>> 'a' in d
+ True
+
+ It behaves like a normal dict thus all dict functions will only return the
+ first value when multiple values for one key are found.
+
+ From Werkzeug 0.3 onwards, the `KeyError` raised by this class is also a
+ subclass of the :exc:`~exceptions.BadRequest` HTTP exception and will
+ render a page for a ``400 BAD REQUEST`` if caught in a catch-all for HTTP
+ exceptions.
+
+ A :class:`MultiDict` can be constructed from an iterable of
+ ``(key, value)`` tuples, a dict, a :class:`MultiDict` or from Werkzeug 0.2
+ onwards some keyword parameters.
+
+ :param mapping: the initial value for the :class:`MultiDict`. Either a
+ regular dict, an iterable of ``(key, value)`` tuples
+ or `None`.
+ """
+
+ def __init__(self, mapping=None):
+ if isinstance(mapping, MultiDict):
+ dict.__init__(self, ((k, l[:]) for k, l in mapping.iterlists()))
+ elif isinstance(mapping, dict):
+ tmp = {}
+ for key, value in mapping.iteritems():
+ if isinstance(value, (tuple, list)):
+ value = list(value)
+ else:
+ value = [value]
+ tmp[key] = value
+ dict.__init__(self, tmp)
+ else:
+ tmp = {}
+ for key, value in mapping or ():
+ tmp.setdefault(key, []).append(value)
+ dict.__init__(self, tmp)
+
+ def __getstate__(self):
+ return dict(self.lists())
+
+ def __setstate__(self, value):
+ dict.clear(self)
+ dict.update(self, value)
+
+ def __iter__(self):
+ return self.iterkeys()
+
+ def __getitem__(self, key):
+ """Return the first data value for this key;
+ raises KeyError if not found.
+
+ :param key: The key to be looked up.
+ :raise KeyError: if the key does not exist.
+ """
+ if key in self:
+ return dict.__getitem__(self, key)[0]
+ raise KeyError(key)
+
+ def __setitem__(self, key, value):
+ """Like :meth:`add` but removes an existing key first.
+
+ :param key: the key for the value.
+ :param value: the value to set.
+ """
+ dict.__setitem__(self, key, [value])
+
+ def add(self, key, value):
+ """Adds a new value for the key.
+
+ .. versionadded:: 0.6
+
+ :param key: the key for the value.
+ :param value: the value to add.
+ """
+ dict.setdefault(self, key, []).append(value)
+
+ def getlist(self, key, type=None):
+ """Return the list of items for a given key. If that key is not in the
+ `MultiDict`, the return value will be an empty list. Just as `get`
+ `getlist` accepts a `type` parameter. All items will be converted
+ with the callable defined there.
+
+ :param key: The key to be looked up.
+ :param type: A callable that is used to cast the value in the
+ :class:`MultiDict`. If a :exc:`ValueError` is raised
+ by this callable the value will be removed from the list.
+ :return: a :class:`list` of all the values for the key.
+ """
+ try:
+ rv = dict.__getitem__(self, key)
+ except KeyError:
+ return []
+ if type is None:
+ return list(rv)
+ result = []
+ for item in rv:
+ try:
+ result.append(type(item))
+ except ValueError:
+ pass
+ return result
+
+ def setlist(self, key, new_list):
+ """Remove the old values for a key and add new ones. Note that the list
+ you pass the values in will be shallow-copied before it is inserted in
+ the dictionary.
+
+ >>> d = MultiDict()
+ >>> d.setlist('foo', ['1', '2'])
+ >>> d['foo']
+ '1'
+ >>> d.getlist('foo')
+ ['1', '2']
+
+ :param key: The key for which the values are set.
+ :param new_list: An iterable with the new values for the key. Old values
+ are removed first.
+ """
+ dict.__setitem__(self, key, list(new_list))
+
+ def setdefault(self, key, default=None):
+ """Returns the value for the key if it is in the dict, otherwise it
+ returns `default` and sets that value for `key`.
+
+ :param key: The key to be looked up.
+ :param default: The default value to be returned if the key is not
+ in the dict. If not further specified it's `None`.
+ """
+ if key not in self:
+ self[key] = default
+ else:
+ default = self[key]
+ return default
+
+ def setlistdefault(self, key, default_list=None):
+ """Like `setdefault` but sets multiple values. The list returned
+ is not a copy, but the list that is actually used internally. This
+ means that you can put new values into the dict by appending items
+ to the list:
+
+ >>> d = MultiDict({"foo": 1})
+ >>> d.setlistdefault("foo").extend([2, 3])
+ >>> d.getlist("foo")
+ [1, 2, 3]
+
+ :param key: The key to be looked up.
+ :param default: An iterable of default values. It is either copied
+ (in case it was a list) or converted into a list
+ before returned.
+ :return: a :class:`list`
+ """
+ if key not in self:
+ default_list = list(default_list or ())
+ dict.__setitem__(self, key, default_list)
+ else:
+ default_list = dict.__getitem__(self, key)
+ return default_list
+
+ def items(self, multi=False):
+ """Return a list of ``(key, value)`` pairs.
+
+ :param multi: If set to `True` the list returned will have a
+ pair for each value of each key. Otherwise it
+ will only contain pairs for the first value of
+ each key.
+
+ :return: a :class:`list`
+ """
+ return list(self.iteritems(multi))
+
+ def lists(self):
+ """Return a list of ``(key, values)`` pairs, where values is the list of
+ all values associated with the key.
+
+ :return: a :class:`list`
+ """
+ return list(self.iterlists())
+
+ def values(self):
+ """Returns a list of the first value on every key's value list.
+
+ :return: a :class:`list`.
+ """
+ return [self[key] for key in self.iterkeys()]
+
+ def listvalues(self):
+ """Return a list of all values associated with a key. Zipping
+ :meth:`keys` and this is the same as calling :meth:`lists`:
+
+ >>> d = MultiDict({"foo": [1, 2, 3]})
+ >>> zip(d.keys(), d.listvalues()) == d.lists()
+ True
+
+ :return: a :class:`list`
+ """
+ return list(self.iterlistvalues())
+
+ def iteritems(self, multi=False):
+ """Like :meth:`items` but returns an iterator."""
+ for key, values in dict.iteritems(self):
+ if multi:
+ for value in values:
+ yield key, value
+ else:
+ yield key, values[0]
+
+ def iterlists(self):
+ """Like :meth:`items` but returns an iterator."""
+ for key, values in dict.iteritems(self):
+ yield key, list(values)
+
+ def itervalues(self):
+ """Like :meth:`values` but returns an iterator."""
+ for values in dict.itervalues(self):
+ yield values[0]
+
+ def iterlistvalues(self):
+ """Like :meth:`listvalues` but returns an iterator."""
+ return dict.itervalues(self)
+
+ def copy(self):
+ """Return a shallow copy of this object."""
+ return self.__class__(self)
+
+ def to_dict(self, flat=True):
+ """Return the contents as regular dict. If `flat` is `True` the
+ returned dict will only have the first item present, if `flat` is
+ `False` all values will be returned as lists.
+
+ :param flat: If set to `False` the dict returned will have lists
+ with all the values in it. Otherwise it will only
+ contain the first value for each key.
+ :return: a :class:`dict`
+ """
+ if flat:
+ return dict(self.iteritems())
+ return dict(self.lists())
+
+ def update(self, other_dict):
+ """update() extends rather than replaces existing key lists."""
+ for key, value in iter_multi_items(other_dict):
+ MultiDict.add(self, key, value)
+
+ def pop(self, key, default=_missing):
+ """Pop the first item for a list on the dict. Afterwards the
+ key is removed from the dict, so additional values are discarded:
+
+ >>> d = MultiDict({"foo": [1, 2, 3]})
+ >>> d.pop("foo")
+ 1
+ >>> "foo" in d
+ False
+
+ :param key: the key to pop.
+ :param default: if provided the value to return if the key was
+ not in the dictionary.
+ """
+ try:
+ return dict.pop(self, key)[0]
+ except KeyError, e:
+ if default is not _missing:
+ return default
+ raise KeyError(str(e))
+
+ def popitem(self):
+ """Pop an item from the dict."""
+ try:
+ item = dict.popitem(self)
+ return (item[0], item[1][0])
+ except KeyError, e:
+ raise KeyError(str(e))
+
+ def poplist(self, key):
+ """Pop the list for a key from the dict. If the key is not in the dict
+ an empty list is returned.
+
+ .. versionchanged:: 0.5
+ If the key does no longer exist a list is returned instead of
+ raising an error.
+ """
+ return dict.pop(self, key, [])
+
+ def popitemlist(self):
+ """Pop a ``(key, list)`` tuple from the dict."""
+ try:
+ return dict.popitem(self)
+ except KeyError, e:
+ raise KeyError(str(e))
+
+ def __copy__(self):
+ return self.copy()
+
+ def __repr__(self):
+ return '%s(%r)' % (self.__class__.__name__, self.items(multi=True))
diff --git a/requests/packages/poster/__init__.py b/requests/packages/poster/__init__.py
deleted file mode 100644
index 6e216fc..0000000
--- a/requests/packages/poster/__init__.py
+++ /dev/null
@@ -1,34 +0,0 @@
-# Copyright (c) 2010 Chris AtLee
-#
-# Permission is hereby granted, free of charge, to any person obtaining a copy
-# of this software and associated documentation files (the "Software"), to deal
-# in the Software without restriction, including without limitation the rights
-# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
-# copies of the Software, and to permit persons to whom the Software is
-# furnished to do so, subject to the following conditions:
-#
-# The above copyright notice and this permission notice shall be included in
-# all copies or substantial portions of the Software.
-#
-# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
-# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
-# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
-# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
-# THE SOFTWARE.
-"""poster module
-
-Support for streaming HTTP uploads, and multipart/form-data encoding
-
-```poster.version``` is a 3-tuple of integers representing the version number.
-New releases of poster will always have a version number that compares greater
-than an older version of poster.
-New in version 0.6."""
-
-from __future__ import absolute_import
-
-from . import streaminghttp
-from . import encode
-
-version = (0, 8, 0) # Thanks JP!
diff --git a/requests/packages/poster/encode.py b/requests/packages/poster/encode.py
deleted file mode 100644
index cf2298d..0000000
--- a/requests/packages/poster/encode.py
+++ /dev/null
@@ -1,414 +0,0 @@
-"""multipart/form-data encoding module
-
-This module provides functions that faciliate encoding name/value pairs
-as multipart/form-data suitable for a HTTP POST or PUT request.
-
-multipart/form-data is the standard way to upload files over HTTP"""
-
-__all__ = ['gen_boundary', 'encode_and_quote', 'MultipartParam',
- 'encode_string', 'encode_file_header', 'get_body_size', 'get_headers',
- 'multipart_encode']
-
-try:
- import uuid
- def gen_boundary():
- """Returns a random string to use as the boundary for a message"""
- return uuid.uuid4().hex
-except ImportError:
- import random, sha
- def gen_boundary():
- """Returns a random string to use as the boundary for a message"""
- bits = random.getrandbits(160)
- return sha.new(str(bits)).hexdigest()
-
-import urllib, re, os, mimetypes
-try:
- from email.header import Header
-except ImportError:
- # Python 2.4
- from email.Header import Header
-
-def encode_and_quote(data):
- """If ``data`` is unicode, return urllib.quote_plus(data.encode("utf-8"))
- otherwise return urllib.quote_plus(data)"""
- if data is None:
- return None
-
- if isinstance(data, unicode):
- data = data.encode("utf-8")
- return urllib.quote_plus(data)
-
-def _strify(s):
- """If s is a unicode string, encode it to UTF-8 and return the results,
- otherwise return str(s), or None if s is None"""
- if s is None:
- return None
- if isinstance(s, unicode):
- return s.encode("utf-8")
- return str(s)
-
-class MultipartParam(object):
- """Represents a single parameter in a multipart/form-data request
-
- ``name`` is the name of this parameter.
-
- If ``value`` is set, it must be a string or unicode object to use as the
- data for this parameter.
-
- If ``filename`` is set, it is what to say that this parameter's filename
- is. Note that this does not have to be the actual filename any local file.
-
- If ``filetype`` is set, it is used as the Content-Type for this parameter.
- If unset it defaults to "text/plain; charset=utf8"
-
- If ``filesize`` is set, it specifies the length of the file ``fileobj``
-
- If ``fileobj`` is set, it must be a file-like object that supports
- .read().
-
- Both ``value`` and ``fileobj`` must not be set, doing so will
- raise a ValueError assertion.
-
- If ``fileobj`` is set, and ``filesize`` is not specified, then
- the file's size will be determined first by stat'ing ``fileobj``'s
- file descriptor, and if that fails, by seeking to the end of the file,
- recording the current position as the size, and then by seeking back to the
- beginning of the file.
-
- ``cb`` is a callable which will be called from iter_encode with (self,
- current, total), representing the current parameter, current amount
- transferred, and the total size.
- """
- def __init__(self, name, value=None, filename=None, filetype=None,
- filesize=None, fileobj=None, cb=None):
- self.name = Header(name).encode()
- self.value = _strify(value)
- if filename is None:
- self.filename = None
- else:
- if isinstance(filename, unicode):
- # Encode with XML entities
- self.filename = filename.encode("ascii", "xmlcharrefreplace")
- else:
- self.filename = str(filename)
- self.filename = self.filename.encode("string_escape").\
- replace('"', '\\"')
- self.filetype = _strify(filetype)
-
- self.filesize = filesize
- self.fileobj = fileobj
- self.cb = cb
-
- if self.value is not None and self.fileobj is not None:
- raise ValueError("Only one of value or fileobj may be specified")
-
- if fileobj is not None and filesize is None:
- # Try and determine the file size
- try:
- self.filesize = os.fstat(fileobj.fileno()).st_size
- except (OSError, AttributeError):
- try:
- fileobj.seek(0, 2)
- self.filesize = fileobj.tell()
- fileobj.seek(0)
- except:
- raise ValueError("Could not determine filesize")
-
- def __cmp__(self, other):
- attrs = ['name', 'value', 'filename', 'filetype', 'filesize', 'fileobj']
- myattrs = [getattr(self, a) for a in attrs]
- oattrs = [getattr(other, a) for a in attrs]
- return cmp(myattrs, oattrs)
-
- def reset(self):
- if self.fileobj is not None:
- self.fileobj.seek(0)
- elif self.value is None:
- raise ValueError("Don't know how to reset this parameter")
-
- @classmethod
- def from_file(cls, paramname, filename):
- """Returns a new MultipartParam object constructed from the local
- file at ``filename``.
-
- ``filesize`` is determined by os.path.getsize(``filename``)
-
- ``filetype`` is determined by mimetypes.guess_type(``filename``)[0]
-
- ``filename`` is set to os.path.basename(``filename``)
- """
-
- return cls(paramname, filename=os.path.basename(filename),
- filetype=mimetypes.guess_type(filename)[0],
- filesize=os.path.getsize(filename),
- fileobj=open(filename, "rb"))
-
- @classmethod
- def from_params(cls, params):
- """Returns a list of MultipartParam objects from a sequence of
- name, value pairs, MultipartParam instances,
- or from a mapping of names to values
-
- The values may be strings or file objects, or MultipartParam objects.
- MultipartParam object names must match the given names in the
- name,value pairs or mapping, if applicable."""
- if hasattr(params, 'items'):
- params = params.items()
-
- retval = []
- for item in params:
- if isinstance(item, cls):
- retval.append(item)
- continue
- name, value = item
- if isinstance(value, cls):
- assert value.name == name
- retval.append(value)
- continue
- if hasattr(value, 'read'):
- # Looks like a file object
- filename = getattr(value, 'name', None)
- if filename is not None:
- filetype = mimetypes.guess_type(filename)[0]
- else:
- filetype = None
-
- retval.append(cls(name=name, filename=filename,
- filetype=filetype, fileobj=value))
- else:
- retval.append(cls(name, value))
- return retval
-
- def encode_hdr(self, boundary):
- """Returns the header of the encoding of this parameter"""
- boundary = encode_and_quote(boundary)
-
- headers = ["--%s" % boundary]
-
- if self.filename:
- disposition = 'form-data; name="%s"; filename="%s"' % (self.name,
- self.filename)
- else:
- disposition = 'form-data; name="%s"' % self.name
-
- headers.append("Content-Disposition: %s" % disposition)
-
- if self.filetype:
- filetype = self.filetype
- else:
- filetype = "text/plain; charset=utf-8"
-
- headers.append("Content-Type: %s" % filetype)
-
- headers.append("")
- headers.append("")
-
- return "\r\n".join(headers)
-
- def encode(self, boundary):
- """Returns the string encoding of this parameter"""
- if self.value is None:
- value = self.fileobj.read()
- else:
- value = self.value
-
- if re.search("^--%s$" % re.escape(boundary), value, re.M):
- raise ValueError("boundary found in encoded string")
-
- return "%s%s\r\n" % (self.encode_hdr(boundary), value)
-
- def iter_encode(self, boundary, blocksize=4096):
- """Yields the encoding of this parameter
- If self.fileobj is set, then blocks of ``blocksize`` bytes are read and
- yielded."""
- total = self.get_size(boundary)
- current = 0
- if self.value is not None:
- block = self.encode(boundary)
- current += len(block)
- yield block
- if self.cb:
- self.cb(self, current, total)
- else:
- block = self.encode_hdr(boundary)
- current += len(block)
- yield block
- if self.cb:
- self.cb(self, current, total)
- last_block = ""
- encoded_boundary = "--%s" % encode_and_quote(boundary)
- boundary_exp = re.compile("^%s$" % re.escape(encoded_boundary),
- re.M)
- while True:
- block = self.fileobj.read(blocksize)
- if not block:
- current += 2
- yield "\r\n"
- if self.cb:
- self.cb(self, current, total)
- break
- last_block += block
- if boundary_exp.search(last_block):
- raise ValueError("boundary found in file data")
- last_block = last_block[-len(encoded_boundary)-2:]
- current += len(block)
- yield block
- if self.cb:
- self.cb(self, current, total)
-
- def get_size(self, boundary):
- """Returns the size in bytes that this param will be when encoded
- with the given boundary."""
- if self.filesize is not None:
- valuesize = self.filesize
- else:
- valuesize = len(self.value)
-
- return len(self.encode_hdr(boundary)) + 2 + valuesize
-
-def encode_string(boundary, name, value):
- """Returns ``name`` and ``value`` encoded as a multipart/form-data
- variable. ``boundary`` is the boundary string used throughout
- a single request to separate variables."""
-
- return MultipartParam(name, value).encode(boundary)
-
-def encode_file_header(boundary, paramname, filesize, filename=None,
- filetype=None):
- """Returns the leading data for a multipart/form-data field that contains
- file data.
-
- ``boundary`` is the boundary string used throughout a single request to
- separate variables.
-
- ``paramname`` is the name of the variable in this request.
-
- ``filesize`` is the size of the file data.
-
- ``filename`` if specified is the filename to give to this field. This
- field is only useful to the server for determining the original filename.
-
- ``filetype`` if specified is the MIME type of this file.
-
- The actual file data should be sent after this header has been sent.
- """
-
- return MultipartParam(paramname, filesize=filesize, filename=filename,
- filetype=filetype).encode_hdr(boundary)
-
-def get_body_size(params, boundary):
- """Returns the number of bytes that the multipart/form-data encoding
- of ``params`` will be."""
- size = sum(p.get_size(boundary) for p in MultipartParam.from_params(params))
- return size + len(boundary) + 6
-
-def get_headers(params, boundary):
- """Returns a dictionary with Content-Type and Content-Length headers
- for the multipart/form-data encoding of ``params``."""
- headers = {}
- boundary = urllib.quote_plus(boundary)
- headers['Content-Type'] = "multipart/form-data; boundary=%s" % boundary
- headers['Content-Length'] = str(get_body_size(params, boundary))
- return headers
-
-class multipart_yielder:
- def __init__(self, params, boundary, cb):
- self.params = params
- self.boundary = boundary
- self.cb = cb
-
- self.i = 0
- self.p = None
- self.param_iter = None
- self.current = 0
- self.total = get_body_size(params, boundary)
-
- def __iter__(self):
- return self
-
- def next(self):
- """generator function to yield multipart/form-data representation
- of parameters"""
- if self.param_iter is not None:
- try:
- block = self.param_iter.next()
- self.current += len(block)
- if self.cb:
- self.cb(self.p, self.current, self.total)
- return block
- except StopIteration:
- self.p = None
- self.param_iter = None
-
- if self.i is None:
- raise StopIteration
- elif self.i >= len(self.params):
- self.param_iter = None
- self.p = None
- self.i = None
- block = "--%s--\r\n" % self.boundary
- self.current += len(block)
- if self.cb:
- self.cb(self.p, self.current, self.total)
- return block
-
- self.p = self.params[self.i]
- self.param_iter = self.p.iter_encode(self.boundary)
- self.i += 1
- return self.next()
-
- def reset(self):
- self.i = 0
- self.current = 0
- for param in self.params:
- param.reset()
-
-def multipart_encode(params, boundary=None, cb=None):
- """Encode ``params`` as multipart/form-data.
-
- ``params`` should be a sequence of (name, value) pairs or MultipartParam
- objects, or a mapping of names to values.
- Values are either strings parameter values, or file-like objects to use as
- the parameter value. The file-like objects must support .read() and either
- .fileno() or both .seek() and .tell().
-
- If ``boundary`` is set, then it as used as the MIME boundary. Otherwise
- a randomly generated boundary will be used. In either case, if the
- boundary string appears in the parameter values a ValueError will be
- raised.
-
- If ``cb`` is set, it should be a callback which will get called as blocks
- of data are encoded. It will be called with (param, current, total),
- indicating the current parameter being encoded, the current amount encoded,
- and the total amount to encode.
-
- Returns a tuple of `datagen`, `headers`, where `datagen` is a
- generator that will yield blocks of data that make up the encoded
- parameters, and `headers` is a dictionary with the assoicated
- Content-Type and Content-Length headers.
-
- Examples:
-
- >>> datagen, headers = multipart_encode( [("key", "value1"), ("key", "value2")] )
- >>> s = "".join(datagen)
- >>> assert "value2" in s and "value1" in s
-
- >>> p = MultipartParam("key", "value2")
- >>> datagen, headers = multipart_encode( [("key", "value1"), p] )
- >>> s = "".join(datagen)
- >>> assert "value2" in s and "value1" in s
-
- >>> datagen, headers = multipart_encode( {"key": "value1"} )
- >>> s = "".join(datagen)
- >>> assert "value2" not in s and "value1" in s
-
- """
- if boundary is None:
- boundary = gen_boundary()
- else:
- boundary = urllib.quote_plus(boundary)
-
- headers = get_headers(params, boundary)
- params = MultipartParam.from_params(params)
-
- return multipart_yielder(params, boundary, cb), headers
diff --git a/requests/packages/poster/streaminghttp.py b/requests/packages/poster/streaminghttp.py
deleted file mode 100644
index 1b591d4..0000000
--- a/requests/packages/poster/streaminghttp.py
+++ /dev/null
@@ -1,199 +0,0 @@
-"""Streaming HTTP uploads module.
-
-This module extends the standard httplib and urllib2 objects so that
-iterable objects can be used in the body of HTTP requests.
-
-In most cases all one should have to do is call :func:`register_openers()`
-to register the new streaming http handlers which will take priority over
-the default handlers, and then you can use iterable objects in the body
-of HTTP requests.
-
-**N.B.** You must specify a Content-Length header if using an iterable object
-since there is no way to determine in advance the total size that will be
-yielded, and there is no way to reset an interator.
-
-Example usage:
-
->>> from StringIO import StringIO
->>> import urllib2, poster.streaminghttp
-
->>> opener = poster.streaminghttp.register_openers()
-
->>> s = "Test file data"
->>> f = StringIO(s)
-
->>> req = urllib2.Request("http://localhost:5000", f,
-... {'Content-Length': str(len(s))})
-"""
-
-import httplib, urllib2, socket
-from httplib import NotConnected
-
-__all__ = ['StreamingHTTPConnection', 'StreamingHTTPRedirectHandler',
- 'StreamingHTTPHandler', 'register_openers']
-
-if hasattr(httplib, 'HTTPS'):
- __all__.extend(['StreamingHTTPSHandler', 'StreamingHTTPSConnection'])
-
-class _StreamingHTTPMixin:
- """Mixin class for HTTP and HTTPS connections that implements a streaming
- send method."""
- def send(self, value):
- """Send ``value`` to the server.
-
- ``value`` can be a string object, a file-like object that supports
- a .read() method, or an iterable object that supports a .next()
- method.
- """
- # Based on python 2.6's httplib.HTTPConnection.send()
- if self.sock is None:
- if self.auto_open:
- self.connect()
- else:
- raise NotConnected()
-
- # send the data to the server. if we get a broken pipe, then close
- # the socket. we want to reconnect when somebody tries to send again.
- #
- # NOTE: we DO propagate the error, though, because we cannot simply
- # ignore the error... the caller will know if they can retry.
- if self.debuglevel > 0:
- print "send:", repr(value)
- try:
- blocksize = 8192
- if hasattr(value, 'read') :
- if hasattr(value, 'seek'):
- value.seek(0)
- if self.debuglevel > 0:
- print "sendIng a read()able"
- data = value.read(blocksize)
- while data:
- self.sock.sendall(data)
- data = value.read(blocksize)
- elif hasattr(value, 'next'):
- if hasattr(value, 'reset'):
- value.reset()
- if self.debuglevel > 0:
- print "sendIng an iterable"
- for data in value:
- self.sock.sendall(data)
- else:
- self.sock.sendall(value)
- except socket.error, v:
- if v[0] == 32: # Broken pipe
- self.close()
- raise
-
-class StreamingHTTPConnection(_StreamingHTTPMixin, httplib.HTTPConnection):
- """Subclass of `httplib.HTTPConnection` that overrides the `send()` method
- to support iterable body objects"""
-
-class StreamingHTTPRedirectHandler(urllib2.HTTPRedirectHandler):
- """Subclass of `urllib2.HTTPRedirectHandler` that overrides the
- `redirect_request` method to properly handle redirected POST requests
-
- This class is required because python 2.5's HTTPRedirectHandler does
- not remove the Content-Type or Content-Length headers when requesting
- the new resource, but the body of the original request is not preserved.
- """
-
- handler_order = urllib2.HTTPRedirectHandler.handler_order - 1
-
- # From python2.6 urllib2's HTTPRedirectHandler
- def redirect_request(self, req, fp, code, msg, headers, newurl):
- """Return a Request or None in response to a redirect.
-
- This is called by the http_error_30x methods when a
- redirection response is received. If a redirection should
- take place, return a new Request to allow http_error_30x to
- perform the redirect. Otherwise, raise HTTPError if no-one
- else should try to handle this url. Return None if you can't
- but another Handler might.
- """
- m = req.get_method()
- if (code in (301, 302, 303, 307) and m in ("GET", "HEAD")
- or code in (301, 302, 303) and m == "POST"):
- # Strictly (according to RFC 2616), 301 or 302 in response
- # to a POST MUST NOT cause a redirection without confirmation
- # from the user (of urllib2, in this case). In practice,
- # essentially all clients do redirect in this case, so we
- # do the same.
- # be conciliant with URIs containing a space
- newurl = newurl.replace(' ', '%20')
- newheaders = dict((k, v) for k, v in req.headers.items()
- if k.lower() not in (
- "content-length", "content-type")
- )
- return urllib2.Request(newurl,
- headers=newheaders,
- origin_req_host=req.get_origin_req_host(),
- unverifiable=True)
- else:
- raise urllib2.HTTPError(req.get_full_url(), code, msg, headers, fp)
-
-class StreamingHTTPHandler(urllib2.HTTPHandler):
- """Subclass of `urllib2.HTTPHandler` that uses
- StreamingHTTPConnection as its http connection class."""
-
- handler_order = urllib2.HTTPHandler.handler_order - 1
-
- def http_open(self, req):
- """Open a StreamingHTTPConnection for the given request"""
- return self.do_open(StreamingHTTPConnection, req)
-
- def http_request(self, req):
- """Handle a HTTP request. Make sure that Content-Length is specified
- if we're using an interable value"""
- # Make sure that if we're using an iterable object as the request
- # body, that we've also specified Content-Length
- if req.has_data():
- data = req.get_data()
- if hasattr(data, 'read') or hasattr(data, 'next'):
- if not req.has_header('Content-length'):
- raise ValueError(
- "No Content-Length specified for iterable body")
- return urllib2.HTTPHandler.do_request_(self, req)
-
-if hasattr(httplib, 'HTTPS'):
- class StreamingHTTPSConnection(_StreamingHTTPMixin,
- httplib.HTTPSConnection):
- """Subclass of `httplib.HTTSConnection` that overrides the `send()`
- method to support iterable body objects"""
-
- class StreamingHTTPSHandler(urllib2.HTTPSHandler):
- """Subclass of `urllib2.HTTPSHandler` that uses
- StreamingHTTPSConnection as its http connection class."""
-
- handler_order = urllib2.HTTPSHandler.handler_order - 1
-
- def https_open(self, req):
- return self.do_open(StreamingHTTPSConnection, req)
-
- def https_request(self, req):
- # Make sure that if we're using an iterable object as the request
- # body, that we've also specified Content-Length
- if req.has_data():
- data = req.get_data()
- if hasattr(data, 'read') or hasattr(data, 'next'):
- if not req.has_header('Content-length'):
- raise ValueError(
- "No Content-Length specified for iterable body")
- return urllib2.HTTPSHandler.do_request_(self, req)
-
-
-def get_handlers():
- handlers = [StreamingHTTPHandler, StreamingHTTPRedirectHandler]
- if hasattr(httplib, "HTTPS"):
- handlers.append(StreamingHTTPSHandler)
- return handlers
-
-def register_openers():
- """Register the streaming http handlers in the global urllib2 default
- opener object.
-
- Returns the created OpenerDirector object."""
- opener = urllib2.build_opener(*get_handlers())
-
- urllib2.install_opener(opener)
-
- return opener
diff --git a/requests/packages/urllib3/__init__.py b/requests/packages/urllib3/__init__.py
new file mode 100644
index 0000000..20b1fb4
--- /dev/null
+++ b/requests/packages/urllib3/__init__.py
@@ -0,0 +1,48 @@
+# urllib3/__init__.py
+# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+"""
+urllib3 - Thread-safe connection pooling and re-using.
+"""
+
+__author__ = 'Andrey Petrov (andrey.petrov@shazow.net)'
+__license__ = 'MIT'
+__version__ = '1.0.2'
+
+
+from .connectionpool import (
+ HTTPConnectionPool,
+ HTTPSConnectionPool,
+ connection_from_url,
+ get_host,
+ make_headers)
+
+
+from .exceptions import (
+ HTTPError,
+ MaxRetryError,
+ SSLError,
+ TimeoutError)
+
+from .poolmanager import PoolManager, ProxyManager, proxy_from_url
+from .response import HTTPResponse
+from .filepost import encode_multipart_formdata
+
+
+# Set default logging handler to avoid "No handler found" warnings.
+import logging
+try:
+ from logging import NullHandler
+except ImportError:
+ class NullHandler(logging.Handler):
+ def emit(self, record):
+ pass
+
+logging.getLogger(__name__).addHandler(NullHandler())
+
+# ... Clean up.
+del logging
+del NullHandler
diff --git a/requests/packages/urllib3/_collections.py b/requests/packages/urllib3/_collections.py
new file mode 100644
index 0000000..00b2cd5
--- /dev/null
+++ b/requests/packages/urllib3/_collections.py
@@ -0,0 +1,131 @@
+# urllib3/_collections.py
+# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from collections import deque
+
+from threading import RLock
+
+__all__ = ['RecentlyUsedContainer']
+
+
+class AccessEntry(object):
+ __slots__ = ('key', 'is_valid')
+
+ def __init__(self, key, is_valid=True):
+ self.key = key
+ self.is_valid = is_valid
+
+
+class RecentlyUsedContainer(dict):
+ """
+ Provides a dict-like that maintains up to ``maxsize`` keys while throwing
+ away the least-recently-used keys beyond ``maxsize``.
+ """
+
+ # If len(self.access_log) exceeds self._maxsize * CLEANUP_FACTOR, then we
+ # will attempt to cleanup the invalidated entries in the access_log
+ # datastructure during the next 'get' operation.
+ CLEANUP_FACTOR = 10
+
+ def __init__(self, maxsize=10):
+ self._maxsize = maxsize
+
+ self._container = {}
+
+ # We use a deque to to store our keys ordered by the last access.
+ self.access_log = deque()
+ self.access_log_lock = RLock()
+
+ # We look up the access log entry by the key to invalidate it so we can
+ # insert a new authorative entry at the head without having to dig and
+ # find the old entry for removal immediately.
+ self.access_lookup = {}
+
+ # Trigger a heap cleanup when we get past this size
+ self.access_log_limit = maxsize * self.CLEANUP_FACTOR
+
+ def _invalidate_entry(self, key):
+ "If exists: Invalidate old entry and return it."
+ old_entry = self.access_lookup.get(key)
+ if old_entry:
+ old_entry.is_valid = False
+
+ return old_entry
+
+ def _push_entry(self, key):
+ "Push entry onto our access log, invalidate the old entry if exists."
+ self._invalidate_entry(key)
+
+ new_entry = AccessEntry(key)
+ self.access_lookup[key] = new_entry
+
+ self.access_log_lock.acquire()
+ self.access_log.appendleft(new_entry)
+ self.access_log_lock.release()
+
+ def _prune_entries(self, num):
+ "Pop entries from our access log until we popped ``num`` valid ones."
+ while num > 0:
+ self.access_log_lock.acquire()
+ p = self.access_log.pop()
+ self.access_log_lock.release()
+
+ if not p.is_valid:
+ continue # Invalidated entry, skip
+
+ dict.pop(self, p.key, None)
+ self.access_lookup.pop(p.key, None)
+ num -= 1
+
+ def _prune_invalidated_entries(self):
+ "Rebuild our access_log without the invalidated entries."
+ self.access_log_lock.acquire()
+ self.access_log = deque(e for e in self.access_log if e.is_valid)
+ self.access_log_lock.release()
+
+ def _get_ordered_access_keys(self):
+ "Return ordered access keys for inspection. Used for testing."
+ self.access_log_lock.acquire()
+ r = [e.key for e in self.access_log if e.is_valid]
+ self.access_log_lock.release()
+
+ return r
+
+ def __getitem__(self, key):
+ item = dict.get(self, key)
+
+ if not item:
+ raise KeyError(key)
+
+ # Insert new entry with new high priority, also implicitly invalidates
+ # the old entry.
+ self._push_entry(key)
+
+ if len(self.access_log) > self.access_log_limit:
+ # Heap is getting too big, try to clean up any tailing invalidated
+ # entries.
+ self._prune_invalidated_entries()
+
+ return item
+
+ def __setitem__(self, key, item):
+ # Add item to our container and access log
+ dict.__setitem__(self, key, item)
+ self._push_entry(key)
+
+ # Discard invalid and excess entries
+ self._prune_entries(len(self) - self._maxsize)
+
+ def __delitem__(self, key):
+ self._invalidate_entry(key)
+ self.access_lookup.pop(key, None)
+ dict.__delitem__(self, key)
+
+ def get(self, key, default=None):
+ try:
+ return self[key]
+ except KeyError:
+ return default
diff --git a/requests/packages/urllib3/connectionpool.py b/requests/packages/urllib3/connectionpool.py
new file mode 100644
index 0000000..8b10dc7
--- /dev/null
+++ b/requests/packages/urllib3/connectionpool.py
@@ -0,0 +1,525 @@
+# urllib3/connectionpool.py
+# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import logging
+import socket
+
+
+from httplib import HTTPConnection, HTTPSConnection, HTTPException
+from Queue import Queue, Empty, Full
+from select import select
+from socket import error as SocketError, timeout as SocketTimeout
+
+
+try:
+ import ssl
+ BaseSSLError = ssl.SSLError
+except ImportError:
+ ssl = None
+ BaseSSLError = None
+
+
+from .request import RequestMethods
+from .response import HTTPResponse
+from .exceptions import (
+ SSLError,
+ MaxRetryError,
+ TimeoutError,
+ HostChangedError,
+ EmptyPoolError,
+)
+
+
+log = logging.getLogger(__name__)
+
+_Default = object()
+
+
+## Connection objects (extension of httplib)
+
+class VerifiedHTTPSConnection(HTTPSConnection):
+ """
+ Based on httplib.HTTPSConnection but wraps the socket with
+ SSL certification.
+ """
+ cert_reqs = None
+ ca_certs = None
+
+ def set_cert(self, key_file=None, cert_file=None,
+ cert_reqs='CERT_NONE', ca_certs=None):
+ ssl_req_scheme = {
+ 'CERT_NONE': ssl.CERT_NONE,
+ 'CERT_OPTIONAL': ssl.CERT_OPTIONAL,
+ 'CERT_REQUIRED': ssl.CERT_REQUIRED
+ }
+
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = ssl_req_scheme.get(cert_reqs) or ssl.CERT_NONE
+ self.ca_certs = ca_certs
+
+ def connect(self):
+ # Add certificate verification
+ sock = socket.create_connection((self.host, self.port), self.timeout)
+
+ # Wrap socket using verification with the root certs in
+ # trusted_root_certs
+ self.sock = ssl.wrap_socket(sock, self.key_file, self.cert_file,
+ cert_reqs=self.cert_reqs,
+ ca_certs=self.ca_certs)
+
+
+## Pool objects
+
+class ConnectionPool(object):
+ """
+ Base class for all connection pools, such as
+ :class:`.HTTPConnectionPool` and :class:`.HTTPSConnectionPool`.
+ """
+ pass
+
+
+class HTTPConnectionPool(ConnectionPool, RequestMethods):
+ """
+ Thread-safe connection pool for one host.
+
+ :param host:
+ Host used for this HTTP Connection (e.g. "localhost"), passed into
+ :class:`httplib.HTTPConnection`.
+
+ :param port:
+ Port used for this HTTP Connection (None is equivalent to 80), passed
+ into :class:`httplib.HTTPConnection`.
+
+ :param strict:
+ Causes BadStatusLine to be raised if the status line can't be parsed
+ as a valid HTTP/1.0 or 1.1 status line, passed into
+ :class:`httplib.HTTPConnection`.
+
+ :param timeout:
+ Socket timeout for each individual connection, can be a float. None
+ disables timeout.
+
+ :param maxsize:
+ Number of connections to save that can be reused. More than 1 is useful
+ in multithreaded situations. If ``block`` is set to false, more
+ connections will be created but they will not be saved once they've
+ been used.
+
+ :param block:
+ If set to True, no more than ``maxsize`` connections will be used at
+ a time. When no free connections are available, the call will block
+ until a connection has been released. This is a useful side effect for
+ particular multithreaded situations where one does not want to use more
+ than maxsize connections per host to prevent flooding.
+
+ :param headers:
+ Headers to include with all requests, unless other headers are given
+ explicitly.
+ """
+
+ scheme = 'http'
+
+ def __init__(self, host, port=None, strict=False, timeout=None, maxsize=1,
+ block=False, headers=None):
+ self.host = host
+ self.port = port
+ self.strict = strict
+ self.timeout = timeout
+ self.pool = Queue(maxsize)
+ self.block = block
+ self.headers = headers or {}
+
+ # Fill the queue up so that doing get() on it will block properly
+ for _ in xrange(maxsize):
+ self.pool.put(None)
+
+ # These are mostly for testing and debugging purposes.
+ self.num_connections = 0
+ self.num_requests = 0
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`httplib.HTTPConnection`.
+ """
+ self.num_connections += 1
+ log.info("Starting new HTTP connection (%d): %s" %
+ (self.num_connections, self.host))
+ return HTTPConnection(host=self.host, port=self.port)
+
+ def _get_conn(self, timeout=None):
+ """
+ Get a connection. Will return a pooled connection if one is available.
+
+ If no connections are available and :prop:`.block` is ``False``, then a
+ fresh connection is returned.
+
+ :param timeout:
+ Seconds to wait before giving up and raising
+ :class:`urllib3.exceptions.EmptyPoolError` if the pool is empty and
+ :prop:`.block` is ``True``.
+ """
+ conn = None
+ try:
+ conn = self.pool.get(block=self.block, timeout=timeout)
+
+ # If this is a persistent connection, check if it got disconnected
+ if conn and conn.sock and select([conn.sock], [], [], 0.0)[0]:
+ # Either data is buffered (bad), or the connection is dropped.
+ log.info("Resetting dropped connection: %s" % self.host)
+ conn.close()
+
+ except Empty:
+ if self.block:
+ raise EmptyPoolError("Pool reached maximum size and no more "
+ "connections are allowed.")
+ pass # Oh well, we'll create a new connection then
+
+ return conn or self._new_conn()
+
+ def _put_conn(self, conn):
+ """
+ Put a connection back into the pool.
+
+ :param conn:
+ Connection object for the current host and port as returned by
+ :meth:`._new_conn` or :meth:`._get_conn`.
+
+ If the pool is already full, the connection is discarded because we
+ exceeded maxsize. If connections are discarded frequently, then maxsize
+ should be increased.
+ """
+ try:
+ self.pool.put(conn, block=False)
+ except Full:
+ # This should never happen if self.block == True
+ log.warning("HttpConnectionPool is full, discarding connection: %s"
+ % self.host)
+
+ def _make_request(self, conn, method, url, timeout=_Default,
+ **httplib_request_kw):
+ """
+ Perform a request on a given httplib connection object taken from our
+ pool.
+ """
+ self.num_requests += 1
+
+ if timeout is _Default:
+ timeout = self.timeout
+
+ conn.request(method, url, **httplib_request_kw)
+ conn.sock.settimeout(timeout)
+ httplib_response = conn.getresponse()
+
+ log.debug("\"%s %s %s\" %s %s" %
+ (method, url,
+ conn._http_vsn_str, # pylint: disable-msg=W0212
+ httplib_response.status, httplib_response.length))
+
+ return httplib_response
+
+
+ def is_same_host(self, url):
+ """
+ Check if the given ``url`` is a member of the same host as this
+ conncetion pool.
+ """
+ # TODO: Add optional support for socket.gethostbyname checking.
+ return (url.startswith('/') or
+ get_host(url) == (self.scheme, self.host, self.port))
+
+ def urlopen(self, method, url, body=None, headers=None, retries=3,
+ redirect=True, assert_same_host=True, timeout=_Default,
+ pool_timeout=None, release_conn=None, **response_kw):
+ """
+ Get a connection from the pool and perform an HTTP request. This is the
+ lowest level call for making a request, so you'll need to specify all
+ the raw details.
+
+ .. note::
+
+ More commonly, it's appropriate to use a convenience method provided
+ by :class:`.RequestMethods`, such as :meth:`.request`.
+
+ :param method:
+ HTTP request method (such as GET, POST, PUT, etc.)
+
+ :param body:
+ Data to send in the request body (useful for creating
+ POST requests, see HTTPConnectionPool.post_url for
+ more convenience).
+
+ :param headers:
+ Dictionary of custom headers to send, such as User-Agent,
+ If-None-Match, etc. If None, pool headers are used. If provided,
+ these headers completely replace any pool-specific headers.
+
+ :param retries:
+ Number of retries to allow before raising a MaxRetryError exception.
+
+ :param redirect:
+ Automatically handle redirects (status codes 301, 302, 303, 307),
+ each redirect counts as a retry.
+
+ :param assert_same_host:
+ If ``True``, will make sure that the host of the pool requests is
+ consistent else will raise HostChangedError. When False, you can
+ use the pool on an HTTP proxy and request foreign hosts.
+
+ :param timeout:
+ If specified, overrides the default timeout for this one request.
+
+ :param pool_timeout:
+ If set and the pool is set to block=True, then this method will
+ block for ``pool_timeout`` seconds and raise EmptyPoolError if no
+ connection is available within the time period.
+
+ :param release_conn:
+ If False, then the urlopen call will not release the connection
+ back into the pool once a response is received. This is useful if
+ you're not preloading the response's content immediately. You will
+ need to call ``r.release_conn()`` on the response ``r`` to return
+ the connection back into the pool. If None, it takes the value of
+ ``response_kw.get('preload_content', True)``.
+
+ :param \**response_kw:
+ Additional parameters are passed to
+ :meth:`urllib3.response.HTTPResponse.from_httplib`
+ """
+ if headers is None:
+ headers = self.headers
+
+ if retries < 0:
+ raise MaxRetryError("Max retries exceeded for url: %s" % url)
+
+ if release_conn is None:
+ release_conn = response_kw.get('preload_content', True)
+
+ # Check host
+ if assert_same_host and not self.is_same_host(url):
+ host = "%s://%s" % (self.scheme, self.host)
+ if self.port:
+ host = "%s:%d" % (host, self.port)
+
+ raise HostChangedError("Connection pool with host '%s' tried to "
+ "open a foreign host: %s" % (host, url))
+
+ conn = None
+
+ try:
+ # Request a connection from the queue
+ # (Could raise SocketError: Bad file descriptor)
+ conn = self._get_conn(timeout=pool_timeout)
+
+ # Make the request on the httplib connection object
+ httplib_response = self._make_request(conn, method, url,
+ timeout=timeout,
+ body=body, headers=headers)
+
+ # If we're going to release the connection in ``finally:``, then
+ # the request doesn't need to know about the connection. Otherwise
+ # it will also try to release it and we'll have a double-release
+ # mess.
+ response_conn = not release_conn and conn
+
+ # Import httplib's response into our own wrapper object
+ response = HTTPResponse.from_httplib(httplib_response,
+ pool=self,
+ connection=response_conn,
+ **response_kw)
+
+ # else:
+ # The connection will be put back into the pool when
+ # ``response.release_conn()`` is called (implicitly by
+ # ``response.read()``)
+
+ except (SocketTimeout, Empty), e:
+ # Timed out either by socket or queue
+ raise TimeoutError("Request timed out after %s seconds" %
+ self.timeout)
+
+ except (BaseSSLError), e:
+ # SSL certificate error
+ raise SSLError(e)
+
+ except (HTTPException, SocketError), e:
+ # Connection broken, discard. It will be replaced next _get_conn().
+ conn = None
+
+ finally:
+ if conn and release_conn:
+ # Put the connection back to be reused
+ self._put_conn(conn)
+
+ if not conn:
+ log.warn("Retrying (%d attempts remain) after connection "
+ "broken by '%r': %s" % (retries, e, url))
+ return self.urlopen(method, url, body, headers, retries - 1,
+ redirect, assert_same_host) # Try again
+
+ # Handle redirection
+ if (redirect and
+ response.status in [301, 302, 303, 307] and
+ 'location' in response.headers): # Redirect, retry
+ log.info("Redirecting %s -> %s" %
+ (url, response.headers.get('location')))
+ return self.urlopen(method, response.headers.get('location'), body,
+ headers, retries - 1, redirect,
+ assert_same_host)
+
+ return response
+
+
+class HTTPSConnectionPool(HTTPConnectionPool):
+ """
+ Same as :class:`.HTTPConnectionPool`, but HTTPS.
+
+ When Python is compiled with the :mod:`ssl` module, then
+ :class:`.VerifiedHTTPSConnection` is used, which *can* verify certificates,
+ instead of :class:httplib.HTTPSConnection`.
+
+ The ``key_file``, ``cert_file``, ``cert_reqs``, and ``ca_certs`` parameters
+ are only used if :mod:`ssl` is available and are fed into
+ :meth:`ssl.wrap_socket` to upgrade the connection socket into an SSL socket.
+ """
+
+ scheme = 'https'
+
+ def __init__(self, host, port=None,
+ strict=False, timeout=None, maxsize=1,
+ block=False, headers=None,
+ key_file=None, cert_file=None,
+ cert_reqs='CERT_NONE', ca_certs=None):
+
+ super(HTTPSConnectionPool, self).__init__(host, port,
+ strict, timeout, maxsize,
+ block, headers)
+ self.key_file = key_file
+ self.cert_file = cert_file
+ self.cert_reqs = cert_reqs
+ self.ca_certs = ca_certs
+
+ def _new_conn(self):
+ """
+ Return a fresh :class:`httplib.HTTPSConnection`.
+ """
+ self.num_connections += 1
+ log.info("Starting new HTTPS connection (%d): %s"
+ % (self.num_connections, self.host))
+
+ if not ssl:
+ return HTTPSConnection(host=self.host, port=self.port)
+
+ connection = VerifiedHTTPSConnection(host=self.host, port=self.port)
+ connection.set_cert(key_file=self.key_file, cert_file=self.cert_file,
+ cert_reqs=self.cert_reqs, ca_certs=self.ca_certs)
+ return connection
+
+
+## Helpers
+
+def make_headers(keep_alive=None, accept_encoding=None, user_agent=None,
+ basic_auth=None):
+ """
+ Shortcuts for generating request headers.
+
+ :param keep_alive:
+ If ``True``, adds 'connection: keep-alive' header.
+
+ :param accept_encoding:
+ Can be a boolean, list, or string.
+ ``True`` translates to 'gzip,deflate'.
+ List will get joined by comma.
+ String will be used as provided.
+
+ :param user_agent:
+ String representing the user-agent you want, such as
+ "python-urllib3/0.6"
+
+ :param basic_auth:
+ Colon-separated username:password string for 'authorization: basic ...'
+ auth header.
+
+ Example: ::
+
+ >>> make_headers(keep_alive=True, user_agent="Batman/1.0")
+ {'connection': 'keep-alive', 'user-agent': 'Batman/1.0'}
+ >>> make_headers(accept_encoding=True)
+ {'accept-encoding': 'gzip,deflate'}
+ """
+ headers = {}
+ if accept_encoding:
+ if isinstance(accept_encoding, str):
+ pass
+ elif isinstance(accept_encoding, list):
+ accept_encoding = ','.join(accept_encoding)
+ else:
+ accept_encoding = 'gzip,deflate'
+ headers['accept-encoding'] = accept_encoding
+
+ if user_agent:
+ headers['user-agent'] = user_agent
+
+ if keep_alive:
+ headers['connection'] = 'keep-alive'
+
+ if basic_auth:
+ headers['authorization'] = 'Basic ' + \
+ basic_auth.encode('base64').strip()
+
+ return headers
+
+
+def get_host(url):
+ """
+ Given a url, return its scheme, host and port (None if it's not there).
+
+ For example: ::
+
+ >>> get_host('http://google.com/mail/')
+ ('http', 'google.com', None)
+ >>> get_host('google.com:80')
+ ('http', 'google.com', 80)
+ """
+ # This code is actually similar to urlparse.urlsplit, but much
+ # simplified for our needs.
+ port = None
+ scheme = 'http'
+ if '//' in url:
+ scheme, url = url.split('://', 1)
+ if '/' in url:
+ url, _path = url.split('/', 1)
+ if ':' in url:
+ url, port = url.split(':', 1)
+ port = int(port)
+ return scheme, url, port
+
+
+def connection_from_url(url, **kw):
+ """
+ Given a url, return an :class:`.ConnectionPool` instance of its host.
+
+ This is a shortcut for not having to parse out the scheme, host, and port
+ of the url before creating an :class:`.ConnectionPool` instance.
+
+ :param url:
+ Absolute URL string that must include the scheme. Port is optional.
+
+ :param \**kw:
+ Passes additional parameters to the constructor of the appropriate
+ :class:`.ConnectionPool`. Useful for specifying things like
+ timeout, maxsize, headers, etc.
+
+ Example: ::
+
+ >>> conn = connection_from_url('http://google.com/')
+ >>> r = conn.request('GET', '/')
+ """
+ scheme, host, port = get_host(url)
+ if scheme == 'https':
+ return HTTPSConnectionPool(host, port=port, **kw)
+ else:
+ return HTTPConnectionPool(host, port=port, **kw)
diff --git a/requests/packages/urllib3/exceptions.py b/requests/packages/urllib3/exceptions.py
new file mode 100644
index 0000000..69f459b
--- /dev/null
+++ b/requests/packages/urllib3/exceptions.py
@@ -0,0 +1,35 @@
+# urllib3/exceptions.py
+# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+## Exceptions
+
+class HTTPError(Exception):
+ "Base exception used by this module."
+ pass
+
+
+class SSLError(Exception):
+ "Raised when SSL certificate fails in an HTTPS connection."
+ pass
+
+
+class MaxRetryError(HTTPError):
+ "Raised when the maximum number of retries is exceeded."
+ pass
+
+
+class TimeoutError(HTTPError):
+ "Raised when a socket timeout occurs."
+ pass
+
+
+class HostChangedError(HTTPError):
+ "Raised when an existing pool gets a request for a foreign host."
+ pass
+
+class EmptyPoolError(HTTPError):
+ "Raised when a pool runs out of connections and no more are allowed."
+ pass
diff --git a/requests/packages/urllib3/filepost.py b/requests/packages/urllib3/filepost.py
new file mode 100644
index 0000000..2ffea8b
--- /dev/null
+++ b/requests/packages/urllib3/filepost.py
@@ -0,0 +1,71 @@
+# urllib3/filepost.py
+# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import codecs
+import mimetools
+import mimetypes
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO # pylint: disable-msg=W0404
+
+
+writer = codecs.lookup('utf-8')[3]
+
+
+def get_content_type(filename):
+ return mimetypes.guess_type(filename)[0] or 'application/octet-stream'
+
+
+def encode_multipart_formdata(fields, boundary=None):
+ """
+ Encode a dictionary of ``fields`` using the multipart/form-data mime format.
+
+ :param fields:
+ Dictionary of fields. The key is treated as the field name, and the
+ value as the body of the form-data. If the value is a tuple of two
+ elements, then the first element is treated as the filename of the
+ form-data section.
+
+ :param boundary:
+ If not specified, then a random boundary will be generated using
+ :func:`mimetools.choose_boundary`.
+ """
+ body = StringIO()
+ if boundary is None:
+ boundary = mimetools.choose_boundary()
+
+ for fieldname, value in fields.iteritems():
+ body.write('--%s\r\n' % (boundary))
+
+ if isinstance(value, tuple):
+ filename, data = value
+ writer(body).write('Content-Disposition: form-data; name="%s"; '
+ 'filename="%s"\r\n' % (fieldname, filename))
+ body.write('Content-Type: %s\r\n\r\n' %
+ (get_content_type(filename)))
+ else:
+ data = value
+ writer(body).write('Content-Disposition: form-data; name="%s"\r\n'
+ % (fieldname))
+ body.write('Content-Type: text/plain\r\n\r\n')
+
+ if isinstance(data, int):
+ data = str(data) # Backwards compatibility
+
+ if isinstance(data, unicode):
+ writer(body).write(data)
+ else:
+ body.write(data)
+
+ body.write('\r\n')
+
+ body.write('--%s--\r\n' % (boundary))
+
+ content_type = 'multipart/form-data; boundary=%s' % boundary
+
+ return body.getvalue(), content_type
diff --git a/requests/packages/urllib3/poolmanager.py b/requests/packages/urllib3/poolmanager.py
new file mode 100644
index 0000000..c08e327
--- /dev/null
+++ b/requests/packages/urllib3/poolmanager.py
@@ -0,0 +1,128 @@
+# urllib3/poolmanager.py
+# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+from ._collections import RecentlyUsedContainer
+from .connectionpool import (
+ HTTPConnectionPool, HTTPSConnectionPool,
+ get_host, connection_from_url,
+)
+
+
+__all__ = ['PoolManager', 'ProxyManager', 'proxy_from_url']
+
+
+from .request import RequestMethods
+from .connectionpool import HTTPConnectionPool, HTTPSConnectionPool
+
+
+pool_classes_by_scheme = {
+ 'http': HTTPConnectionPool,
+ 'https': HTTPSConnectionPool,
+}
+
+port_by_scheme = {
+ 'http': 80,
+ 'https': 443,
+}
+
+
+class PoolManager(RequestMethods):
+ """
+ Allows for arbitrary requests while transparently keeping track of
+ necessary connection pools for you.
+
+ :param num_pools:
+ Number of connection pools to cache before discarding the least recently
+ used pool.
+
+ :param \**connection_pool_kw:
+ Additional parameters are used to create fresh
+ :class:`urllib3.connectionpool.ConnectionPool` instances.
+
+ Example: ::
+
+ >>> manager = PoolManager()
+ >>> r = manager.urlopen("http://google.com/")
+ >>> r = manager.urlopen("http://google.com/mail")
+ >>> r = manager.urlopen("http://yahoo.com/")
+ >>> len(r.pools)
+ 2
+
+ """
+
+ # TODO: Make sure there are no memory leaks here.
+
+ def __init__(self, num_pools=10, **connection_pool_kw):
+ self.connection_pool_kw = connection_pool_kw
+ self.pools = RecentlyUsedContainer(num_pools)
+
+ def connection_from_host(self, host, port=80, scheme='http'):
+ """
+ Get a :class:`ConnectionPool` based on the host, port, and scheme.
+
+ Note that an appropriate ``port`` value is required here to normalize
+ connection pools in our container most effectively.
+ """
+ pool_key = (scheme, host, port)
+
+ # If the scheme, host, or port doesn't match existing open connections,
+ # open a new ConnectionPool.
+ pool = self.pools.get(pool_key)
+ if pool:
+ return pool
+
+ # Make a fresh ConnectionPool of the desired type
+ pool_cls = pool_classes_by_scheme[scheme]
+ pool = pool_cls(host, port, **self.connection_pool_kw)
+
+ self.pools[pool_key] = pool
+
+ return pool
+
+ def connection_from_url(self, url):
+ """
+ Similar to :func:`urllib3.connectionpool.connection_from_url` but
+ doesn't pass any additional parameters to the
+ :class:`urllib3.connectionpool.ConnectionPool` constructor.
+
+ Additional parameters are taken from the :class:`.PoolManager`
+ constructor.
+ """
+ scheme, host, port = get_host(url)
+
+ port = port or port_by_scheme.get(scheme, 80)
+
+ return self.connection_from_host(host, port=port, scheme=scheme)
+
+ def urlopen(self, method, url, **kw):
+ """
+ Same as :meth:`urllib3.connectionpool.HTTPConnectionPool.urlopen`.
+
+ ``url`` must be absolute, such that an appropriate
+ :class:`urllib3.connectionpool.ConnectionPool` can be chosen for it.
+ """
+ conn = self.connection_from_url(url)
+ return conn.urlopen(method, url, assert_same_host=False, **kw)
+
+
+class ProxyManager(RequestMethods):
+ """
+ Given a ConnectionPool to a proxy, the ProxyManager's ``urlopen`` method
+ will make requests to any url through the defined proxy.
+ """
+
+ def __init__(self, proxy_pool):
+ self.proxy_pool = proxy_pool
+
+ def urlopen(self, method, url, **kw):
+ "Same as HTTP(S)ConnectionPool.urlopen, ``url`` must be absolute."
+ kw['assert_same_host'] = False
+ return self.proxy_pool.urlopen(method, url, **kw)
+
+
+def proxy_from_url(url, **pool_kw):
+ proxy_pool = connection_from_url(url, **pool_kw)
+ return ProxyManager(proxy_pool)
diff --git a/requests/packages/urllib3/request.py b/requests/packages/urllib3/request.py
new file mode 100644
index 0000000..a7e0b5d
--- /dev/null
+++ b/requests/packages/urllib3/request.py
@@ -0,0 +1,145 @@
+# urllib3/request.py
+# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+
+from urllib import urlencode
+
+from .filepost import encode_multipart_formdata
+
+
+__all__ = ['RequestMethods']
+
+
+class RequestMethods(object):
+ """
+ Convenience mixin for classes who implement a :meth:`urlopen` method, such
+ as :class:`~urllib3.connectionpool.HTTPConnectionPool` and
+ :class:`~urllib3.poolmanager.PoolManager`.
+
+ Provides behavior for making common types of HTTP request methods and
+ decides which type of request field encoding to use.
+
+ Specifically,
+
+ :meth:`.request_encode_url` is for sending requests whose fields are encoded
+ in the URL (such as GET, HEAD, DELETE).
+
+ :meth:`.request_encode_body` is for sending requests whose fields are
+ encoded in the *body* of the request using multipart or www-orm-urlencoded
+ (such as for POST, PUT, PATCH).
+
+ :meth:`.request` is for making any kind of request, it will look up the
+ appropriate encoding format and use one of the above two methods to make
+ the request.
+ """
+
+ _encode_url_methods = set(['DELETE', 'GET', 'HEAD', 'OPTIONS'])
+
+ _encode_body_methods = set(['PATCH', 'POST', 'PUT', 'TRACE'])
+
+ def urlopen(self, method, url, body=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **kw):
+ raise NotImplemented("Classes extending RequestMethods must implement "
+ "their own ``urlopen`` method.")
+
+ def request(self, method, url, fields=None, headers=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the appropriate encoding of
+ ``fields`` based on the ``method`` used.
+
+ This is a convenience method that requires the least amount of manual
+ effort. It can be used in most situations, while still having the option
+ to drop down to more specific methods when necessary, such as
+ :meth:`request_encode_url`, :meth:`request_encode_body`,
+ or even the lowest level :meth:`urlopen`.
+ """
+ method = method.upper()
+
+ if method in self._encode_url_methods:
+ return self.request_encode_url(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+ else:
+ return self.request_encode_body(method, url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
+
+ def request_encode_url(self, method, url, fields=None, **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the url. This is useful for request methods like GET, HEAD, DELETE, etc.
+ """
+ if fields:
+ url += '?' + urlencode(fields)
+ return self.urlopen(method, url, **urlopen_kw)
+
+ def request_encode_body(self, method, url, fields=None, headers=None,
+ encode_multipart=True, multipart_boundary=None,
+ **urlopen_kw):
+ """
+ Make a request using :meth:`urlopen` with the ``fields`` encoded in
+ the body. This is useful for request methods like POST, PUT, PATCH, etc.
+
+ When ``encode_multipart=True`` (default), then
+ :meth:`urllib3.filepost.encode_multipart_formdata` is used to encode the
+ payload with the appropriate content type. Otherwise
+ :meth:`urllib.urlencode` is used with the
+ 'application/x-www-form-urlencoded' content type.
+
+ Multipart encoding must be used when posting files, and it's reasonably
+ safe to use it in other times too. However, it may break request signing,
+ such as with OAuth.
+
+ Supports an optional ``fields`` parameter of key/value strings AND
+ key/filetuple. A filetuple is a (filename, data) tuple. For example: ::
+
+ fields = {
+ 'foo': 'bar',
+ 'fakefile': ('foofile.txt', 'contents of foofile'),
+ 'realfile': ('barfile.txt', open('realfile').read()),
+ 'nonamefile': ('contents of nonamefile field'),
+ }
+
+ When uploading a file, providing a filename (the first parameter of the
+ tuple) is optional but recommended to best mimick behavior of browsers.
+
+ Note that if ``headers`` are supplied, the 'Content-Type' header will be
+ overwritten because it depends on the dynamic random boundary string
+ which is used to compose the body of the request. The random boundary
+ string can be explicitly set with the ``multipart_boundary`` parameter.
+ """
+ if encode_multipart:
+ body, content_type = encode_multipart_formdata(fields or {},
+ boundary=multipart_boundary)
+ else:
+ body, content_type = (urlencode(fields or {}),
+ 'application/x-www-form-urlencoded')
+
+ headers = headers or {}
+ headers.update({'Content-Type': content_type})
+
+ return self.urlopen(method, url, body=body, headers=headers,
+ **urlopen_kw)
+
+ # Deprecated:
+
+ def get_url(self, url, fields=None, **urlopen_kw):
+ """
+ .. deprecated:: 1.0
+ Use :meth:`request` instead.
+ """
+ return self.request_encode_url('GET', url, fields=fields,
+ **urlopen_kw)
+
+ def post_url(self, url, fields=None, headers=None, **urlopen_kw):
+ """
+ .. deprecated:: 1.0
+ Use :meth:`request` instead.
+ """
+ return self.request_encode_body('POST', url, fields=fields,
+ headers=headers,
+ **urlopen_kw)
diff --git a/requests/packages/urllib3/response.py b/requests/packages/urllib3/response.py
new file mode 100644
index 0000000..4cd15c1
--- /dev/null
+++ b/requests/packages/urllib3/response.py
@@ -0,0 +1,181 @@
+# urllib3/response.py
+# Copyright 2008-2011 Andrey Petrov and contributors (see CONTRIBUTORS.txt)
+#
+# This module is part of urllib3 and is released under
+# the MIT License: http://www.opensource.org/licenses/mit-license.php
+
+import gzip
+import logging
+import zlib
+
+
+try:
+ from cStringIO import StringIO
+except ImportError:
+ from StringIO import StringIO # pylint: disable-msg=W0404
+
+
+from .exceptions import HTTPError
+
+
+log = logging.getLogger(__name__)
+
+
+def decode_gzip(data):
+ gzipper = gzip.GzipFile(fileobj=StringIO(data))
+ return gzipper.read()
+
+
+def decode_deflate(data):
+ try:
+ return zlib.decompress(data)
+ except zlib.error:
+ return zlib.decompress(data, -zlib.MAX_WBITS)
+
+
+class HTTPResponse(object):
+ """
+ HTTP Response container.
+
+ Backwards-compatible to httplib's HTTPResponse but the response ``body`` is
+ loaded and decoded on-demand when the ``data`` property is accessed.
+
+ Extra parameters for behaviour not present in httplib.HTTPResponse:
+
+ :param preload_content:
+ If True, the response's body will be preloaded during construction.
+
+ :param decode_content:
+ If True, attempts to decode specific content-encoding's based on headers
+ (like 'gzip' and 'deflate') will be skipped and raw data will be used
+ instead.
+
+ :param original_response:
+ When this HTTPResponse wrapper is generated from an httplib.HTTPResponse
+ object, it's convenient to include the original for debug purposes. It's
+ otherwise unused.
+ """
+
+ CONTENT_DECODERS = {
+ 'gzip': decode_gzip,
+ 'deflate': decode_deflate,
+ }
+
+ def __init__(self, body='', headers=None, status=0, version=0, reason=None,
+ strict=0, preload_content=True, decode_content=True,
+ original_response=None, pool=None, connection=None):
+ self.headers = headers or {}
+ self.status = status
+ self.version = version
+ self.reason = reason
+ self.strict = strict
+
+ self._decode_content = decode_content
+ self._body = None
+ self._fp = None
+ self._original_response = original_response
+
+ self._pool = pool
+ self._connection = connection
+
+ if hasattr(body, 'read'):
+ self._fp = body
+
+ if preload_content:
+ self._body = self.read(decode_content=decode_content)
+
+ def release_conn(self):
+ if not self._pool or not self._connection:
+ return
+
+ self._pool._put_conn(self._connection)
+ self._connection = None
+
+ @property
+ def data(self):
+ # For backwords-compat with earlier urllib3 0.4 and earlier.
+ if self._body:
+ return self._body
+
+ if self._fp:
+ return self.read(decode_content=self._decode_content,
+ cache_content=True)
+
+ def read(self, amt=None, decode_content=True, cache_content=False):
+ """
+ Similar to :meth:`httplib.HTTPResponse.read`, but with two additional
+ parameters: ``decode_content`` and ``cache_content``.
+
+ :param amt:
+ How much of the content to read. If specified, decoding and caching
+ is skipped because we can't decode partial content nor does it make
+ sense to cache partial content as the full response.
+
+ :param decode_content:
+ If True, will attempt to decode the body based on the
+ 'content-encoding' header. (Overridden if ``amt`` is set.)
+
+ :param cache_content:
+ If True, will save the returned data such that the same result is
+ returned despite of the state of the underlying file object. This
+ is useful if you want the ``.data`` property to continue working
+ after having ``.read()`` the file object. (Overridden if ``amt`` is
+ set.)
+ """
+ content_encoding = self.headers.get('content-encoding')
+ decoder = self.CONTENT_DECODERS.get(content_encoding)
+
+ data = self._fp and self._fp.read(amt)
+
+ try:
+
+ if amt:
+ return data
+
+ if not decode_content or not decoder:
+ if cache_content:
+ self._body = data
+
+ return data
+
+ try:
+ data = decoder(data)
+ except IOError:
+ raise HTTPError("Received response with content-encoding: %s, but "
+ "failed to decode it." % content_encoding)
+
+ if cache_content:
+ self._body = data
+
+ return data
+
+ finally:
+
+ if self._original_response and self._original_response.isclosed():
+ self.release_conn()
+
+ @staticmethod
+ def from_httplib(r, **response_kw):
+ """
+ Given an :class:`httplib.HTTPResponse` instance ``r``, return a
+ corresponding :class:`urllib3.response.HTTPResponse` object.
+
+ Remaining parameters are passed to the HTTPResponse constructor, along
+ with ``original_response=r``.
+ """
+
+ return HTTPResponse(body=r,
+ headers=dict(r.getheaders()),
+ status=r.status,
+ version=r.version,
+ reason=r.reason,
+ strict=r.strict,
+ original_response=r,
+ **response_kw)
+
+ # Backwards-compatibility methods for httplib.HTTPResponse
+ def getheaders(self):
+ return self.headers
+
+ def getheader(self, name, default=None):
+ return self.headers.get(name, default)
diff --git a/requests/sessions.py b/requests/sessions.py
index 50b09f6..247aa18 100644
--- a/requests/sessions.py
+++ b/requests/sessions.py
@@ -2,38 +2,92 @@
"""
requests.session
-~~~~~~~~~~~~~~~
+~~~~~~~~~~~~~~~~
This module provides a Session object to manage and persist settings across
requests (cookies, auth, proxies).
"""
-import cookielib
+from .defaults import defaults
+from .models import Request
+from .hooks import dispatch_hook
+from .utils import header_expand
+from .packages.urllib3.poolmanager import PoolManager
-from . import api
-from .utils import add_dict_to_cookiejar
+def merge_kwargs(local_kwarg, default_kwarg):
+ """Merges kwarg dictionaries.
+
+ If a local key in the dictionary is set to None, it will be removed.
+ """
+
+ if default_kwarg is None:
+ return local_kwarg
+
+ if isinstance(local_kwarg, basestring):
+ return local_kwarg
+
+ if local_kwarg is None:
+ return default_kwarg
+
+ # Bypass if not a dictionary (e.g. timeout)
+ if not hasattr(default_kwarg, 'items'):
+ return local_kwarg
+
+ # Update new values.
+ kwargs = default_kwarg.copy()
+ kwargs.update(local_kwarg)
+
+ # Remove keys that are set to None.
+ for (k,v) in local_kwarg.items():
+ if v is None:
+ del kwargs[k]
+
+ return kwargs
class Session(object):
"""A Requests session."""
- __attrs__ = ['headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks']
+ __attrs__ = [
+ 'headers', 'cookies', 'auth', 'timeout', 'proxies', 'hooks',
+ 'params', 'config']
- def __init__(self, **kwargs):
+ def __init__(self,
+ headers=None,
+ cookies=None,
+ auth=None,
+ timeout=None,
+ proxies=None,
+ hooks=None,
+ params=None,
+ config=None):
- # Set up a CookieJar to be used by default
- self.cookies = cookielib.FileCookieJar()
+ self.headers = headers or {}
+ self.cookies = cookies or {}
+ self.auth = auth
+ self.timeout = timeout
+ self.proxies = proxies or {}
+ self.hooks = hooks or {}
+ self.params = params or {}
+ self.config = config or {}
- # Map args from kwargs to instance-local variables
- map(lambda k, v: (k in self.__attrs__) and setattr(self, k, v),
- kwargs.iterkeys(), kwargs.itervalues())
+ for (k, v) in defaults.items():
+ self.config.setdefault(k, v)
- # Map and wrap requests.api methods
- self._map_api_methods()
+ self.poolmanager = PoolManager(
+ num_pools=self.config.get('pool_connections'),
+ maxsize=self.config.get('pool_maxsize')
+ )
+
+ # Set up a CookieJar to be used by default
+ self.cookies = {}
+ # Add passed cookies in.
+ if cookies is not None:
+ self.cookies.update(cookies)
def __repr__(self):
return '<requests-client at 0x%x>' % (id(self))
@@ -42,43 +96,184 @@ class Session(object):
return self
def __exit__(self, *args):
- # print args
pass
+ def request(self, method, url,
+ params=None,
+ data=None,
+ headers=None,
+ cookies=None,
+ files=None,
+ auth=None,
+ timeout=None,
+ allow_redirects=False,
+ proxies=None,
+ hooks=None,
+ return_response=True,
+ config=None,
+ prefetch=False):
- def _map_api_methods(self):
- """Reads each available method from requests.api and decorates
- them with a wrapper, which inserts any instance-local attributes
- (from __attrs__) that have been set, combining them with **kwargs.
+ """Constructs and sends a :class:`Request <Request>`.
+ Returns :class:`Response <Response>` object.
+
+ :param method: method for the new :class:`Request` object.
+ :param url: URL for the new :class:`Request` object.
+ :param params: (optional) Dictionary or bytes to be sent in the query string for the :class:`Request`.
+ :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
+ :param headers: (optional) Dictionary of HTTP Headers to send with the :class:`Request`.
+ :param cookies: (optional) Dict or CookieJar object to send with the :class:`Request`.
+ :param files: (optional) Dictionary of 'filename': file-like-objects for multipart encoding upload.
+ :param auth: (optional) Auth typle to enable Basic/Digest/Custom HTTP Auth.
+ :param timeout: (optional) Float describing the timeout of the request.
+ :param allow_redirects: (optional) Boolean. Set to True if POST/PUT/DELETE redirect following is allowed.
+ :param proxies: (optional) Dictionary mapping protocol to the URL of the proxy.
+ :param return_response: (optional) If False, an un-sent Request object will returned.
+ :param config: (optional) A configuration dictionary.
+ :param prefetch: (optional) if ``True``, the response content will be immediately downloaded.
"""
- def pass_args(func):
- def wrapper_func(*args, **kwargs):
- inst_attrs = dict((k, v) for k, v in self.__dict__.iteritems()
- if k in self.__attrs__)
- # Combine instance-local values with kwargs values, with
- # priority to values in kwargs
- kwargs = dict(inst_attrs.items() + kwargs.items())
+ method = str(method).upper()
+
+ # Default empty dicts for dict params.
+ cookies = {} if cookies is None else cookies
+ data = {} if data is None else data
+ files = {} if files is None else files
+ headers = {} if headers is None else headers
+ params = {} if params is None else params
+ hooks = {} if hooks is None else hooks
+ # use session's hooks as defaults
+ for key, cb in self.hooks.iteritems():
+ hooks.setdefault(key, cb)
+
+ # Expand header values.
+ if headers:
+ for k, v in headers.items() or {}:
+ headers[k] = header_expand(v)
+
+ args = dict(
+ method=method,
+ url=url,
+ data=data,
+ params=params,
+ headers=headers,
+ cookies=cookies,
+ files=files,
+ auth=auth,
+ hooks=hooks,
+ timeout=timeout,
+ allow_redirects=allow_redirects,
+ proxies=proxies,
+ config=config,
+ _poolmanager=self.poolmanager
+ )
+
+ # Merge local kwargs with session kwargs.
+ for attr in self.__attrs__:
+ session_val = getattr(self, attr, None)
+ local_val = args.get(attr)
+
+ args[attr] = merge_kwargs(local_val, session_val)
+
+ # Arguments manipulation hook.
+ args = dispatch_hook('args', args['hooks'], args)
+
+ # Create the (empty) response.
+ r = Request(**args)
+
+ # Give the response some context.
+ r.session = self
+
+ # Don't send if asked nicely.
+ if not return_response:
+ return r
+
+ # Send the HTTP Request.
+ r.send(prefetch=prefetch)
+
+ # Send any cookies back up the to the session.
+ self.cookies.update(r.response.cookies)
+
+ # Return the response.
+ return r.response
+
+
+ def get(self, url, **kwargs):
+ """Sends a GET request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param **kwargs: Optional arguments that ``request`` takes.
+ """
- # If a session request has a cookie_dict, inject the
- # values into the existing CookieJar instead.
- if isinstance(kwargs.get('cookies', None), dict):
- kwargs['cookies'] = add_dict_to_cookiejar(
- inst_attrs['cookies'], kwargs['cookies']
- )
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('get', url, **kwargs)
- if kwargs.get('headers', None) and inst_attrs.get('headers', None):
- kwargs['headers'].update(inst_attrs['headers'])
- return func(*args, **kwargs)
- return wrapper_func
+ def options(self, url, **kwargs):
+ """Sends a OPTIONS request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param **kwargs: Optional arguments that ``request`` takes.
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('options', url, **kwargs)
+
+
+ def head(self, url, **kwargs):
+ """Sends a HEAD request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param **kwargs: Optional arguments that ``request`` takes.
+ """
+
+ kwargs.setdefault('allow_redirects', True)
+ return self.request('head', url, **kwargs)
+
+
+ def post(self, url, data=None, **kwargs):
+ """Sends a POST request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
+ :param **kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('post', url, data=data, **kwargs)
+
+
+ def put(self, url, data=None, **kwargs):
+ """Sends a PUT request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
+ :param **kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('put', url, data=data, **kwargs)
+
+
+ def patch(self, url, data=None, **kwargs):
+ """Sends a PATCH request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param data: (optional) Dictionary or bytes to send in the body of the :class:`Request`.
+ :param **kwargs: Optional arguments that ``request`` takes.
+ """
+
+ return self.request('patch', url, data=data, **kwargs)
+
+
+ def delete(self, url, **kwargs):
+ """Sends a DELETE request. Returns :class:`Response` object.
+
+ :param url: URL for the new :class:`Request` object.
+ :param **kwargs: Optional arguments that ``request`` takes.
+ """
- # Map and decorate each function available in requests.api
- map(lambda fn: setattr(self, fn, pass_args(getattr(api, fn))),
- api.__all__)
+ return self.request('delete', url, **kwargs)
def session(**kwargs):
- """Returns a :class:`Session` for context-managment."""
+ """Returns a :class:`Session` for context-management."""
- return Session(**kwargs) \ No newline at end of file
+ return Session(**kwargs)
diff --git a/requests/status_codes.py b/requests/status_codes.py
index a809de6..fab8e95 100644
--- a/requests/status_codes.py
+++ b/requests/status_codes.py
@@ -47,15 +47,18 @@ _codes = {
412: ('precondition_failed', 'precondition'),
413: ('request_entity_too_large',),
414: ('request_uri_too_large',),
- 415: ('unspported_media_type', 'unspported_media', 'media_type'),
+ 415: ('unsupported_media_type', 'unsupported_media', 'media_type'),
416: ('requested_range_not_satisfiable', 'requested_range', 'range_not_satisfiable'),
417: ('expectation_failed',),
418: ('im_a_teapot', 'teapot', 'i_am_a_teapot'),
422: ('unprocessable_entity', 'unprocessable'),
423: ('locked',),
- 424: ('failed_depdendency', 'depdendency'),
+ 424: ('failed_dependency', 'dependency'),
425: ('unordered_collection', 'unordered'),
426: ('upgrade_required', 'upgrade'),
+ 428: ('precondition_required', 'precondition'),
+ 429: ('too_many_requests', 'too_many'),
+ 431: ('header_fields_too_large', 'fields_too_large'),
444: ('no_response', 'none'),
449: ('retry_with', 'retry'),
450: ('blocked_by_windows_parental_controls', 'parental_controls'),
diff --git a/requests/structures.py b/requests/structures.py
index d068bf9..35a903f 100644
--- a/requests/structures.py
+++ b/requests/structures.py
@@ -4,10 +4,11 @@
requests.structures
~~~~~~~~~~~~~~~~~~~
-Datastructures that power Requests.
+Data structures that power Requests.
"""
+
class CaseInsensitiveDict(dict):
"""Case-insensitive Dictionary
diff --git a/requests/utils.py b/requests/utils.py
index 2e16163..f31cad8 100644
--- a/requests/utils.py
+++ b/requests/utils.py
@@ -12,8 +12,109 @@ that are also useful for external consumption.
import cgi
import codecs
import cookielib
+import os
+import random
import re
import zlib
+import urllib
+
+from urllib2 import parse_http_list as _parse_list_header
+
+
+def guess_filename(obj):
+ """Tries to guess the filename of the given object."""
+ name = getattr(obj, 'name', None)
+ if name and name[0] != '<' and name[-1] != '>':
+ return name
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_list_header(value):
+ """Parse lists as described by RFC 2068 Section 2.
+
+ In particular, parse comma-separated lists where the elements of
+ the list may include quoted-strings. A quoted-string could
+ contain a comma. A non-quoted string could have quotes in the
+ middle. Quotes are removed automatically after parsing.
+
+ It basically works like :func:`parse_set_header` just that items
+ may appear multiple times and case sensitivity is preserved.
+
+ The return value is a standard :class:`list`:
+
+ >>> parse_list_header('token, "quoted value"')
+ ['token', 'quoted value']
+
+ To create a header from the :class:`list` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a list header.
+ :return: :class:`list`
+ """
+ result = []
+ for item in _parse_list_header(value):
+ if item[:1] == item[-1:] == '"':
+ item = unquote_header_value(item[1:-1])
+ result.append(item)
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def parse_dict_header(value):
+ """Parse lists of key, value pairs as described by RFC 2068 Section 2 and
+ convert them into a python dict:
+
+ >>> d = parse_dict_header('foo="is a fish", bar="as well"')
+ >>> type(d) is dict
+ True
+ >>> sorted(d.items())
+ [('bar', 'as well'), ('foo', 'is a fish')]
+
+ If there is no value for a key it will be `None`:
+
+ >>> parse_dict_header('key_without_value')
+ {'key_without_value': None}
+
+ To create a header from the :class:`dict` again, use the
+ :func:`dump_header` function.
+
+ :param value: a string with a dict header.
+ :return: :class:`dict`
+ """
+ result = {}
+ for item in _parse_list_header(value):
+ if '=' not in item:
+ result[item] = None
+ continue
+ name, value = item.split('=', 1)
+ if value[:1] == value[-1:] == '"':
+ value = unquote_header_value(value[1:-1])
+ result[name] = value
+ return result
+
+
+# From mitsuhiko/werkzeug (used with permission).
+def unquote_header_value(value, is_filename=False):
+ r"""Unquotes a header value. (Reversal of :func:`quote_header_value`).
+ This does not use the real unquoting but what browsers are actually
+ using for quoting.
+
+ :param value: the header value to unquote.
+ """
+ if value and value[0] == value[-1] == '"':
+ # this is not the real unquoting, but fixing this so that the
+ # RFC is met will result in bugs with internet explorer and
+ # probably some other browsers as well. IE for example is
+ # uploading files with "C:\foo\bar.txt" as filename
+ value = value[1:-1]
+
+ # if this is a filename and the starting characters look like
+ # a UNC path, then just return the value without quotes. Using the
+ # replace sequence below on a UNC path has the effect of turning
+ # the leading double slash into a single slash and then
+ # _fix_ie_filename() doesn't work correctly. See #458.
+ if not is_filename or value[:2] != '\\\\':
+ return value.replace('\\\\', '\\').replace('\\"', '"')
+ return value
def header_expand(headers):
@@ -55,7 +156,7 @@ def header_expand(headers):
collector.append(', ')
- # Remove trailing seperators.
+ # Remove trailing separators.
if collector[-1] in (', ', '; '):
del collector[-1]
@@ -63,6 +164,21 @@ def header_expand(headers):
+def randombytes(n):
+ """Return n random bytes."""
+ # Use /dev/urandom if it is available. Fall back to random module
+ # if not. It might be worthwhile to extend this function to use
+ # other platform-specific mechanisms for getting random bytes.
+ if os.path.exists("/dev/urandom"):
+ f = open("/dev/urandom")
+ s = f.read(n)
+ f.close()
+ return s
+ else:
+ L = [chr(random.randrange(0, 256)) for i in range(n)]
+ return "".join(L)
+
+
def dict_from_cookiejar(cj):
"""Returns a key/value dictionary from a CookieJar.
@@ -180,13 +296,13 @@ def unicode_from_html(content):
def stream_decode_response_unicode(iterator, r):
"""Stream decodes a iterator."""
- encoding = get_encoding_from_headers(r.headers)
- if encoding is None:
+
+ if r.encoding is None:
for item in iterator:
yield item
return
- decoder = codecs.getincrementaldecoder(encoding)(errors='replace')
+ decoder = codecs.getincrementaldecoder(r.encoding)(errors='replace')
for chunk in iterator:
rv = decoder.decode(chunk)
if rv:
@@ -199,7 +315,7 @@ def stream_decode_response_unicode(iterator, r):
def get_unicode_from_response(r):
"""Returns the requested content back in unicode.
- :param r: Reponse object to get unicode content from.
+ :param r: Response object to get unicode content from.
Tried:
@@ -252,3 +368,14 @@ def stream_decode_gzip(iterator):
yield rv
except zlib.error:
pass
+
+
+def requote_path(path):
+ """Re-quote the given URL path component.
+
+ This function passes the given path through an unquote/quote cycle to
+ ensure that it is fully and consistenty quoted.
+ """
+ parts = path.split("/")
+ parts = (urllib.quote(urllib.unquote(part), safe="") for part in parts)
+ return "/".join(parts)