aboutsummaryrefslogtreecommitdiff
path: root/requests/adapters.py
diff options
context:
space:
mode:
Diffstat (limited to 'requests/adapters.py')
-rw-r--r--requests/adapters.py95
1 files changed, 61 insertions, 34 deletions
diff --git a/requests/adapters.py b/requests/adapters.py
index d557b74..dd10e95 100644
--- a/requests/adapters.py
+++ b/requests/adapters.py
@@ -22,8 +22,9 @@ from .packages.urllib3.exceptions import MaxRetryError
from .packages.urllib3.exceptions import TimeoutError
from .packages.urllib3.exceptions import SSLError as _SSLError
from .packages.urllib3.exceptions import HTTPError as _HTTPError
+from .packages.urllib3.exceptions import ProxyError as _ProxyError
from .cookies import extract_cookies_to_jar
-from .exceptions import ConnectionError, Timeout, SSLError
+from .exceptions import ConnectionError, Timeout, SSLError, ProxyError
from .auth import _basic_auth_str
DEFAULT_POOLBLOCK = False
@@ -54,14 +55,16 @@ class HTTPAdapter(BaseAdapter):
:param pool_connections: The number of urllib3 connection pools to cache.
:param pool_maxsize: The maximum number of connections to save in the pool.
- :param max_retries: The maximum number of retries each connection should attempt.
+ :param int max_retries: The maximum number of retries each connection
+ should attempt. Note, this applies only to failed connections and
+ timeouts, never to requests where the server returns a response.
:param pool_block: Whether the connection pool should block for connections.
Usage::
>>> import requests
>>> s = requests.Session()
- >>> a = requests.adapters.HTTPAdapter()
+ >>> a = requests.adapters.HTTPAdapter(max_retries=3)
>>> s.mount('http://', a)
"""
__attrs__ = ['max_retries', 'config', '_pool_connections', '_pool_maxsize',
@@ -87,6 +90,11 @@ class HTTPAdapter(BaseAdapter):
self.__attrs__)
def __setstate__(self, state):
+ # Can't handle by adding 'proxy_manager' to self.__attrs__ because
+ # because self.poolmanager uses a lambda function, which isn't pickleable.
+ self.proxy_manager = {}
+ self.config = {}
+
for attr, value in state.items():
setattr(self, attr, value)
@@ -201,11 +209,17 @@ class HTTPAdapter(BaseAdapter):
if not proxy in self.proxy_manager:
self.proxy_manager[proxy] = proxy_from_url(
proxy,
- proxy_headers=proxy_headers)
+ proxy_headers=proxy_headers,
+ num_pools=self._pool_connections,
+ maxsize=self._pool_maxsize,
+ block=self._pool_block)
conn = self.proxy_manager[proxy].connection_from_url(url)
else:
- conn = self.poolmanager.connection_from_url(url.lower())
+ # Only scheme should be lower case
+ parsed = urlparse(url)
+ url = parsed.geturl()
+ conn = self.poolmanager.connection_from_url(url)
return conn
@@ -220,8 +234,8 @@ class HTTPAdapter(BaseAdapter):
def request_url(self, request, proxies):
"""Obtain the url to use when making the final request.
- If the message is being sent through a proxy, the full URL has to be
- used. Otherwise, we should only use the path portion of the URL.
+ If the message is being sent through a HTTP proxy, the full URL has to
+ be used. Otherwise, we should only use the path portion of the URL.
This should not be called from user code, and is only exposed for use
when subclassing the
@@ -231,9 +245,10 @@ class HTTPAdapter(BaseAdapter):
:param proxies: A dictionary of schemes to proxy URLs.
"""
proxies = proxies or {}
- proxy = proxies.get(urlparse(request.url).scheme)
+ scheme = urlparse(request.url).scheme
+ proxy = proxies.get(scheme)
- if proxy:
+ if proxy and scheme != 'https':
url, _ = urldefrag(request.url)
else:
url = request.path_url
@@ -271,10 +286,6 @@ class HTTPAdapter(BaseAdapter):
username, password = get_auth_from_url(proxy)
if username and password:
- # Proxy auth usernames and passwords will be urlencoded, we need
- # to decode them.
- username = unquote(username)
- password = unquote(password)
headers['Proxy-Authorization'] = _basic_auth_str(username,
password)
@@ -287,7 +298,7 @@ class HTTPAdapter(BaseAdapter):
:param stream: (optional) Whether to stream the request content.
:param timeout: (optional) The timeout on the request.
:param verify: (optional) Whether to verify SSL certificates.
- :param vert: (optional) Any user-provided SSL certificate to be trusted.
+ :param cert: (optional) Any user-provided SSL certificate to be trusted.
:param proxies: (optional) The proxies dictionary to apply to the request.
"""
@@ -325,27 +336,40 @@ class HTTPAdapter(BaseAdapter):
conn = conn.proxy_pool
low_conn = conn._get_conn(timeout=timeout)
- low_conn.putrequest(request.method, url, skip_accept_encoding=True)
-
- for header, value in request.headers.items():
- low_conn.putheader(header, value)
- low_conn.endheaders()
-
- for i in request.body:
- low_conn.send(hex(len(i))[2:].encode('utf-8'))
- low_conn.send(b'\r\n')
- low_conn.send(i)
- low_conn.send(b'\r\n')
- low_conn.send(b'0\r\n\r\n')
-
- r = low_conn.getresponse()
- resp = HTTPResponse.from_httplib(r,
- pool=conn,
- connection=low_conn,
- preload_content=False,
- decode_content=False
- )
+ try:
+ low_conn.putrequest(request.method,
+ url,
+ skip_accept_encoding=True)
+
+ for header, value in request.headers.items():
+ low_conn.putheader(header, value)
+
+ low_conn.endheaders()
+
+ for i in request.body:
+ low_conn.send(hex(len(i))[2:].encode('utf-8'))
+ low_conn.send(b'\r\n')
+ low_conn.send(i)
+ low_conn.send(b'\r\n')
+ low_conn.send(b'0\r\n\r\n')
+
+ r = low_conn.getresponse()
+ resp = HTTPResponse.from_httplib(
+ r,
+ pool=conn,
+ connection=low_conn,
+ preload_content=False,
+ decode_content=False
+ )
+ except:
+ # If we hit any problems here, clean up the connection.
+ # Then, reraise so that we can handle the actual exception.
+ low_conn.close()
+ raise
+ else:
+ # All is well, return the connection to the pool.
+ conn._put_conn(low_conn)
except socket.error as sockerr:
raise ConnectionError(sockerr)
@@ -353,6 +377,9 @@ class HTTPAdapter(BaseAdapter):
except MaxRetryError as e:
raise ConnectionError(e)
+ except _ProxyError as e:
+ raise ProxyError(e)
+
except (_SSLError, _HTTPError) as e:
if isinstance(e, _SSLError):
raise SSLError(e)