aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
Diffstat (limited to 'test')
-rw-r--r--test/__init__.py35
-rw-r--r--test/appengine/__init__.py71
-rw-r--r--test/appengine/app.yaml11
-rw-r--r--test/appengine/nose.cfg4
-rw-r--r--test/appengine/requirements.txt1
-rw-r--r--test/appengine/test_urlfetch.py49
-rw-r--r--test/contrib/test_gae_manager.py185
-rw-r--r--test/test_collections.py3
-rw-r--r--test/test_connectionpool.py14
-rw-r--r--test/test_exceptions.py10
-rw-r--r--test/test_no_ssl.py2
-rw-r--r--test/test_proxymanager.py4
-rw-r--r--test/test_response.py4
-rw-r--r--test/with_dummyserver/test_connectionpool.py25
-rw-r--r--test/with_dummyserver/test_https.py5
-rw-r--r--test/with_dummyserver/test_poolmanager.py13
-rw-r--r--test/with_dummyserver/test_proxy_poolmanager.py42
-rw-r--r--test/with_dummyserver/test_socketlevel.py159
18 files changed, 619 insertions, 18 deletions
diff --git a/test/__init__.py b/test/__init__.py
index 2fce71c..172493c 100644
--- a/test/__init__.py
+++ b/test/__init__.py
@@ -2,6 +2,7 @@ import warnings
import sys
import errno
import functools
+import logging
import socket
from nose.plugins.skip import SkipTest
@@ -91,3 +92,37 @@ def requires_network(test):
raise SkipTest(msg)
raise
return wrapper
+
+
+class _ListHandler(logging.Handler):
+ def __init__(self):
+ super(_ListHandler, self).__init__()
+ self.records = []
+
+ def emit(self, record):
+ self.records.append(record)
+
+
+class LogRecorder(object):
+ def __init__(self, target=logging.root):
+ super(LogRecorder, self).__init__()
+ self._target = target
+ self._handler = _ListHandler()
+
+ @property
+ def records(self):
+ return self._handler.records
+
+ def install(self):
+ self._target.addHandler(self._handler)
+
+ def uninstall(self):
+ self._target.removeHandler(self._handler)
+
+ def __enter__(self):
+ self.install()
+ return self.records
+
+ def __exit__(self, exc_type, exc_value, traceback):
+ self.uninstall()
+ return False
diff --git a/test/appengine/__init__.py b/test/appengine/__init__.py
new file mode 100644
index 0000000..917544d
--- /dev/null
+++ b/test/appengine/__init__.py
@@ -0,0 +1,71 @@
+import os
+import sys
+import unittest
+from nose.plugins.skip import SkipTest
+
+
+def activate_sandbox():
+ """
+ Enables parts of the GAE sandbox that are relevant.
+
+ Inserts the stub module import hook which causes the usage of appengine-specific
+ httplib, httplib2, socket, etc.
+ """
+ from google.appengine.tools.devappserver2.python import sandbox
+
+ for name in list(sys.modules):
+ if name in sandbox.dist27.MODULE_OVERRIDES:
+ del sys.modules[name]
+ sys.meta_path.insert(0, sandbox.StubModuleImportHook())
+ sys.path_importer_cache = {}
+
+
+def deactivate_sandbox():
+ from google.appengine.tools.devappserver2.python import sandbox
+
+ sys.meta_path = [
+ x for x in sys.meta_path if not isinstance(x, sandbox.StubModuleImportHook)]
+ sys.path_importer_cache = {}
+
+ # Delete any instances of sandboxed modules.
+ for name in list(sys.modules):
+ if name in sandbox.dist27.MODULE_OVERRIDES:
+ del sys.modules[name]
+
+
+class AppEngineSandboxTest(unittest.TestCase):
+
+ @classmethod
+ def setUpClass(cls):
+
+ if sys.version_info[:2] != (2, 7):
+ raise SkipTest("App Engine only tests on py2.7")
+
+ if 'APPLICATION_ID' not in os.environ:
+ raise SkipTest("NoseGAE plugin not used.")
+
+ try:
+ activate_sandbox()
+ except ImportError:
+ raise SkipTest("App Engine SDK not available.")
+
+ @classmethod
+ def tearDownClass(self):
+ try:
+ deactivate_sandbox()
+ except ImportError:
+ pass
+
+
+class MockResponse(object):
+ def __init__(self, content, status_code, content_was_truncated, final_url, headers):
+ import httplib
+ from StringIO import StringIO
+
+ self.content = content
+ self.status_code = status_code
+ self.content_was_truncated = content_was_truncated
+ self.final_url = final_url
+ self.header_msg = httplib.HTTPMessage(StringIO(''.join(
+ ["%s: %s\n" % (k, v) for k, v in headers.iteritems()] + ["\n"])))
+ self.headers = self.header_msg.items()
diff --git a/test/appengine/app.yaml b/test/appengine/app.yaml
new file mode 100644
index 0000000..907c57f
--- /dev/null
+++ b/test/appengine/app.yaml
@@ -0,0 +1,11 @@
+# dummy app.yaml for nosegae
+
+api_version: 1
+runtime: python27
+threadsafe: true
+
+handlers:
+- url: /
+ static_files: README.md
+ upload: README.md
+ mime_type: text/plain
diff --git a/test/appengine/nose.cfg b/test/appengine/nose.cfg
new file mode 100644
index 0000000..8d8b3f1
--- /dev/null
+++ b/test/appengine/nose.cfg
@@ -0,0 +1,4 @@
+[nosetests]
+cover-min-percentage=0
+with-gae=1
+gae-application=test/appengine/app.yaml
diff --git a/test/appengine/requirements.txt b/test/appengine/requirements.txt
new file mode 100644
index 0000000..b6d79e0
--- /dev/null
+++ b/test/appengine/requirements.txt
@@ -0,0 +1 @@
+NoseGAE==0.5.7
diff --git a/test/appengine/test_urlfetch.py b/test/appengine/test_urlfetch.py
new file mode 100644
index 0000000..3f72023
--- /dev/null
+++ b/test/appengine/test_urlfetch.py
@@ -0,0 +1,49 @@
+from . import AppEngineSandboxTest, MockResponse
+
+from mock import patch
+from nose.plugins.skip import SkipTest
+from ..test_no_ssl import TestWithoutSSL
+
+
+class TestHTTP(AppEngineSandboxTest, TestWithoutSSL):
+ nosegae_urlfetch = True
+
+ def test_urlfetch_called_with_http(self):
+ """
+ Check that URLFetch is used to fetch non-https resources
+ """
+ resp = MockResponse(
+ 'OK',
+ 200,
+ False,
+ 'http://www.google.com',
+ {'content-type': 'text/plain'})
+ with patch('google.appengine.api.urlfetch.fetch', return_value=resp) as fetchmock:
+ import urllib3
+ pool = urllib3.HTTPConnectionPool('www.google.com', '80')
+ r = pool.request('GET', '/')
+ self.assertEqual(r.status, 200, r.data)
+ self.assertEqual(fetchmock.call_count, 1)
+
+
+class TestHTTPS(AppEngineSandboxTest):
+ nosegae_urlfetch = True
+
+ def test_urlfetch_called_with_https(self):
+ """
+ Check that URLFetch is used when fetching https resources
+ """
+ raise SkipTest() # Skipped for now because it fails.
+ resp = MockResponse(
+ 'OK',
+ 200,
+ False,
+ 'https://www.google.com',
+ {'content-type': 'text/plain'})
+ with patch('google.appengine.api.urlfetch.fetch', return_value=resp) as fetchmock:
+ import urllib3
+ pool = urllib3.HTTPSConnectionPool('www.google.com', '443')
+ pool.ConnectionCls = urllib3.connection.UnverifiedHTTPSConnection
+ r = pool.request('GET', '/')
+ self.assertEqual(r.status, 200, r.data)
+ self.assertEqual(fetchmock.call_count, 1)
diff --git a/test/contrib/test_gae_manager.py b/test/contrib/test_gae_manager.py
new file mode 100644
index 0000000..aa909e9
--- /dev/null
+++ b/test/contrib/test_gae_manager.py
@@ -0,0 +1,185 @@
+import unittest
+
+from dummyserver.testcase import HTTPSDummyServerTestCase
+from nose.plugins.skip import SkipTest
+
+try:
+ from google.appengine.api import urlfetch
+ (urlfetch)
+except ImportError:
+ raise SkipTest("App Engine SDK not available.")
+
+from urllib3.contrib.appengine import AppEngineManager, AppEnginePlatformError
+from urllib3.exceptions import (
+ TimeoutError,
+ ProtocolError,
+ SSLError)
+from urllib3.util.url import Url
+from urllib3.util.retry import Retry
+
+from test.with_dummyserver.test_connectionpool import (
+ TestConnectionPool, TestRetry)
+
+
+# Prevent nose from running these test.
+TestConnectionPool.__test__ = False
+TestRetry.__test__ = False
+
+
+# This class is used so we can re-use the tests from the connection pool.
+# It proxies all requests to the manager.
+class MockPool(object):
+ def __init__(self, host, port, manager, scheme='http'):
+ self.host = host
+ self.port = port
+ self.manager = manager
+ self.scheme = scheme
+
+ def request(self, method, url, *args, **kwargs):
+ url = self._absolute_url(url)
+ return self.manager.request(method, url, *args, **kwargs)
+
+ def urlopen(self, method, url, *args, **kwargs):
+ url = self._absolute_url(url)
+ return self.manager.urlopen(method, url, *args, **kwargs)
+
+ def _absolute_url(self, path):
+ return Url(
+ scheme=self.scheme,
+ host=self.host,
+ port=self.port,
+ path=path).url
+
+
+# Note that this doesn't run in the sandbox, it only runs with the URLFetch
+# API stub enabled. There's no need to enable the sandbox as we know for a fact
+# that URLFetch is used by the connection manager.
+class TestGAEConnectionManager(TestConnectionPool):
+ __test__ = True
+
+ # Magic class variable that tells NoseGAE to enable the URLFetch stub.
+ nosegae_urlfetch = True
+
+ def setUp(self):
+ self.manager = AppEngineManager()
+ self.pool = MockPool(self.host, self.port, self.manager)
+
+ # Tests specific to AppEngineManager
+
+ def test_exceptions(self):
+ # DeadlineExceededError -> TimeoutError
+ self.assertRaises(
+ TimeoutError,
+ self.pool.request,
+ 'GET',
+ '/sleep?seconds=0.005',
+ timeout=0.001)
+
+ # InvalidURLError -> ProtocolError
+ self.assertRaises(
+ ProtocolError,
+ self.manager.request,
+ 'GET',
+ 'ftp://invalid/url')
+
+ # DownloadError -> ProtocolError
+ self.assertRaises(
+ ProtocolError,
+ self.manager.request,
+ 'GET',
+ 'http://0.0.0.0')
+
+ # ResponseTooLargeError -> AppEnginePlatformError
+ self.assertRaises(
+ AppEnginePlatformError,
+ self.pool.request,
+ 'GET',
+ '/nbytes?length=33554433') # One byte over 32 megabtyes.
+
+ # URLFetch reports the request too large error as a InvalidURLError,
+ # which maps to a AppEnginePlatformError.
+ body = b'1' * 10485761 # One byte over 10 megabytes.
+ self.assertRaises(
+ AppEnginePlatformError,
+ self.manager.request,
+ 'POST',
+ '/',
+ body=body)
+
+ # Re-used tests below this line.
+ # Subsumed tests
+ test_timeout_float = None # Covered by test_exceptions.
+
+ # Non-applicable tests
+ test_conn_closed = None
+ test_nagle = None
+ test_socket_options = None
+ test_disable_default_socket_options = None
+ test_defaults_are_applied = None
+ test_tunnel = None
+ test_keepalive = None
+ test_keepalive_close = None
+ test_connection_count = None
+ test_connection_count_bigpool = None
+ test_for_double_release = None
+ test_release_conn_parameter = None
+ test_stream_keepalive = None
+ test_cleanup_on_connection_error = None
+
+ # Tests that should likely be modified for appengine specific stuff
+ test_timeout = None
+ test_connect_timeout = None
+ test_connection_error_retries = None
+ test_total_timeout = None
+ test_none_total_applies_connect = None
+ test_timeout_success = None
+ test_source_address_error = None
+ test_bad_connect = None
+ test_partial_response = None
+ test_dns_error = None
+
+
+class TestGAEConnectionManagerWithSSL(HTTPSDummyServerTestCase):
+ nosegae_urlfetch = True
+
+ def setUp(self):
+ self.manager = AppEngineManager()
+ self.pool = MockPool(self.host, self.port, self.manager, 'https')
+
+ def test_exceptions(self):
+ # SSLCertificateError -> SSLError
+ # SSLError is raised with dummyserver because URLFetch doesn't allow
+ # self-signed certs.
+ self.assertRaises(
+ SSLError,
+ self.pool.request,
+ 'GET',
+ '/')
+
+
+class TestGAERetry(TestRetry):
+ __test__ = True
+
+ # Magic class variable that tells NoseGAE to enable the URLFetch stub.
+ nosegae_urlfetch = True
+
+ def setUp(self):
+ self.manager = AppEngineManager()
+ self.pool = MockPool(self.host, self.port, self.manager)
+
+ def test_default_method_whitelist_retried(self):
+ """ urllib3 should retry methods in the default method whitelist """
+ retry = Retry(total=1, status_forcelist=[418])
+ # Use HEAD instead of OPTIONS, as URLFetch doesn't support OPTIONS
+ resp = self.pool.request(
+ 'HEAD', '/successful_retry',
+ headers={'test-name': 'test_default_whitelist'},
+ retries=retry)
+ self.assertEqual(resp.status, 200)
+
+ #test_max_retry = None
+ #test_disabled_retry = None
+
+
+if __name__ == '__main__':
+ unittest.main()
diff --git a/test/test_collections.py b/test/test_collections.py
index 0b36512..9d72939 100644
--- a/test/test_collections.py
+++ b/test/test_collections.py
@@ -237,7 +237,7 @@ class TestHTTPHeaderDict(unittest.TestCase):
def test_copy(self):
h = self.d.copy()
self.assertTrue(self.d is not h)
- self.assertEqual(self.d, h)
+ self.assertEqual(self.d, h)
def test_getlist(self):
self.assertEqual(self.d.getlist('cookie'), ['foo', 'bar'])
@@ -302,6 +302,7 @@ class TestHTTPHeaderDict(unittest.TestCase):
hdict = {'Content-Length': '0', 'Content-type': 'text/plain', 'Server': 'TornadoServer/1.2.3'}
h = dict(HTTPHeaderDict(hdict).items())
self.assertEqual(hdict, h)
+ self.assertEqual(hdict, dict(HTTPHeaderDict(hdict)))
def test_string_enforcement(self):
# This currently throws AttributeError on key.lower(), should probably be something nicer
diff --git a/test/test_connectionpool.py b/test/test_connectionpool.py
index 0718b0f..ee37913 100644
--- a/test/test_connectionpool.py
+++ b/test/test_connectionpool.py
@@ -217,15 +217,23 @@ class TestConnectionPool(unittest.TestCase):
old_pool_queue = pool.pool
self.assertEqual(pool.pool, None)
-
self.assertRaises(ClosedPoolError, pool._get_conn)
pool._put_conn(conn3)
-
self.assertRaises(ClosedPoolError, pool._get_conn)
-
self.assertRaises(Empty, old_pool_queue.get, block=False)
+ def test_absolute_url(self):
+ c = connection_from_url('http://google.com:80')
+ self.assertEqual(
+ 'http://google.com:80/path?query=foo',
+ c._absolute_url('path?query=foo'))
+
+ def test_ca_certs_default_cert_required(self):
+ with connection_from_url('https://google.com:80', ca_certs='/etc/ssl/certs/custom.pem') as pool:
+ conn = pool._get_conn()
+ self.assertEqual(conn.cert_reqs, 'CERT_REQUIRED')
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/test_exceptions.py b/test/test_exceptions.py
index 4190a61..b5bb93e 100644
--- a/test/test_exceptions.py
+++ b/test/test_exceptions.py
@@ -4,7 +4,7 @@ import pickle
from urllib3.exceptions import (HTTPError, MaxRetryError, LocationParseError,
ClosedPoolError, EmptyPoolError,
HostChangedError, ReadTimeoutError,
- ConnectTimeoutError)
+ ConnectTimeoutError, HeaderParsingError)
from urllib3.connectionpool import HTTPConnectionPool
@@ -44,3 +44,11 @@ class TestPickle(unittest.TestCase):
assert self.verify_pickling(
ReadTimeoutError(HTTPConnectionPool('localhost'), '/', None))
+
+
+class TestFormat(unittest.TestCase):
+ def test_header_parsing_errors(self):
+ hpe = HeaderParsingError('defects', 'unparsed_data')
+
+ self.assertTrue('defects' in str(hpe))
+ self.assertTrue('unparsed_data' in str(hpe))
diff --git a/test/test_no_ssl.py b/test/test_no_ssl.py
index b5961b8..79058f7 100644
--- a/test/test_no_ssl.py
+++ b/test/test_no_ssl.py
@@ -71,7 +71,7 @@ class TestWithoutSSL(unittest.TestCase):
sys.meta_path.insert(0, ssl_blocker)
def tearDown(self):
- assert sys.meta_path.pop(0) == ssl_blocker
+ sys.meta_path.remove(ssl_blocker)
module_stash.pop()
diff --git a/test/test_proxymanager.py b/test/test_proxymanager.py
index e7b5c48..7b19334 100644
--- a/test/test_proxymanager.py
+++ b/test/test_proxymanager.py
@@ -38,6 +38,10 @@ class TestProxyManager(unittest.TestCase):
p = ProxyManager('https://something')
self.assertEqual(p.proxy.port, 443)
+ def test_invalid_scheme(self):
+ self.assertRaises(AssertionError, ProxyManager, 'invalid://host/p')
+ self.assertRaises(ValueError, ProxyManager, 'invalid://host/p')
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/test_response.py b/test/test_response.py
index 2e2be0e..47d0521 100644
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -7,7 +7,7 @@ try:
except ImportError:
import httplib
from urllib3.response import HTTPResponse
-from urllib3.exceptions import DecodeError, ResponseNotChunked
+from urllib3.exceptions import DecodeError, ResponseNotChunked, ProtocolError
from base64 import b64decode
@@ -487,7 +487,7 @@ class TestResponse(unittest.TestCase):
r.chunked = True
r.chunk_left = None
resp = HTTPResponse(r, preload_content=False, headers={'transfer-encoding': 'chunked'})
- self.assertRaises(httplib.IncompleteRead, next, resp.read_chunked())
+ self.assertRaises(ProtocolError, next, resp.read_chunked())
def test_chunked_response_without_crlf_on_end(self):
stream = [b"foo", b"bar", b"baz"]
diff --git a/test/with_dummyserver/test_connectionpool.py b/test/with_dummyserver/test_connectionpool.py
index d6cb162..741ae7b 100644
--- a/test/with_dummyserver/test_connectionpool.py
+++ b/test/with_dummyserver/test_connectionpool.py
@@ -36,7 +36,7 @@ from urllib3.util.timeout import Timeout
import tornado
from dummyserver.testcase import HTTPDummyServerTestCase
-from dummyserver.server import NoIPv6Warning
+from dummyserver.server import NoIPv6Warning, HAS_IPV6_AND_DNS
from nose.tools import timed
@@ -600,7 +600,7 @@ class TestConnectionPool(HTTPDummyServerTestCase):
def test_source_address(self):
for addr, is_ipv6 in VALID_SOURCE_ADDRESSES:
- if is_ipv6 and not socket.has_ipv6:
+ if is_ipv6 and not HAS_IPV6_AND_DNS:
warnings.warn("No IPv6 support: skipping.",
NoIPv6Warning)
continue
@@ -647,6 +647,27 @@ class TestConnectionPool(HTTPDummyServerTestCase):
self.assertEqual(b'123' * 4, response.read())
+ def test_cleanup_on_connection_error(self):
+ '''
+ Test that connections are recycled to the pool on
+ connection errors where no http response is received.
+ '''
+ poolsize = 3
+ with HTTPConnectionPool(self.host, self.port, maxsize=poolsize, block=True) as http:
+ self.assertEqual(http.pool.qsize(), poolsize)
+
+ # force a connection error by supplying a non-existent
+ # url. We won't get a response for this and so the
+ # conn won't be implicitly returned to the pool.
+ self.assertRaises(MaxRetryError,
+ http.request, 'GET', '/redirect', fields={'target': '/'}, release_conn=False, retries=0)
+
+ r = http.request('GET', '/redirect', fields={'target': '/'}, release_conn=False, retries=1)
+ r.release_conn()
+
+ # the pool should still contain poolsize elements
+ self.assertEqual(http.pool.qsize(), http.pool.maxsize)
+
class TestRetry(HTTPDummyServerTestCase):
def setUp(self):
diff --git a/test/with_dummyserver/test_https.py b/test/with_dummyserver/test_https.py
index 992b8ef..63aea66 100644
--- a/test/with_dummyserver/test_https.py
+++ b/test/with_dummyserver/test_https.py
@@ -419,6 +419,11 @@ class TestHTTPS_TLSv1(HTTPSDummyServerTestCase):
self._pool.ca_certs = DEFAULT_CA
self._pool.request('GET', '/')
+ def test_set_cert_default_cert_required(self):
+ conn = VerifiedHTTPSConnection(self.host, self.port)
+ conn.set_cert(ca_certs='/etc/ssl/certs/custom.pem')
+ self.assertEqual(conn.cert_reqs, 'CERT_REQUIRED')
+
class TestHTTPS_NoSAN(HTTPSDummyServerTestCase):
certs = NO_SAN_CERTS
diff --git a/test/with_dummyserver/test_poolmanager.py b/test/with_dummyserver/test_poolmanager.py
index 7e51c73..099ac52 100644
--- a/test/with_dummyserver/test_poolmanager.py
+++ b/test/with_dummyserver/test_poolmanager.py
@@ -1,6 +1,8 @@
import unittest
import json
+from nose.plugins.skip import SkipTest
+from dummyserver.server import HAS_IPV6
from dummyserver.testcase import (HTTPDummyServerTestCase,
IPv6HTTPDummyServerTestCase)
from urllib3.poolmanager import PoolManager
@@ -128,6 +130,14 @@ class TestPoolManager(HTTPDummyServerTestCase):
def test_headers(self):
http = PoolManager(headers={'Foo': 'bar'})
+ r = http.request('GET', '%s/headers' % self.base_url)
+ returned_headers = json.loads(r.data.decode())
+ self.assertEqual(returned_headers.get('Foo'), 'bar')
+
+ r = http.request('POST', '%s/headers' % self.base_url)
+ returned_headers = json.loads(r.data.decode())
+ self.assertEqual(returned_headers.get('Foo'), 'bar')
+
r = http.request_encode_url('GET', '%s/headers' % self.base_url)
returned_headers = json.loads(r.data.decode())
self.assertEqual(returned_headers.get('Foo'), 'bar')
@@ -154,6 +164,9 @@ class TestPoolManager(HTTPDummyServerTestCase):
class TestIPv6PoolManager(IPv6HTTPDummyServerTestCase):
+ if not HAS_IPV6:
+ raise SkipTest("IPv6 is not supported on this system.")
+
def setUp(self):
self.base_url = 'http://[%s]:%d' % (self.host, self.port)
diff --git a/test/with_dummyserver/test_proxy_poolmanager.py b/test/with_dummyserver/test_proxy_poolmanager.py
index df300fe..c593f2d 100644
--- a/test/with_dummyserver/test_proxy_poolmanager.py
+++ b/test/with_dummyserver/test_proxy_poolmanager.py
@@ -4,11 +4,12 @@ import unittest
from nose.tools import timed
-from dummyserver.testcase import HTTPDummyProxyTestCase
+from dummyserver.testcase import HTTPDummyProxyTestCase, IPv6HTTPDummyProxyTestCase
from dummyserver.server import (
DEFAULT_CA, DEFAULT_CA_BAD, get_unreachable_address)
from .. import TARPIT_HOST
+from urllib3._collections import HTTPHeaderDict
from urllib3.poolmanager import proxy_from_url, ProxyManager
from urllib3.exceptions import (
MaxRetryError, SSLError, ProxyError, ConnectTimeoutError)
@@ -48,7 +49,7 @@ class TestHTTPProxyManager(HTTPDummyProxyTestCase):
def test_proxy_conn_fail(self):
host, port = get_unreachable_address()
- http = proxy_from_url('http://%s:%s/' % (host, port), retries=1)
+ http = proxy_from_url('http://%s:%s/' % (host, port), retries=1, timeout=0.05)
self.assertRaises(MaxRetryError, http.request, 'GET',
'%s/' % self.https_url)
self.assertRaises(MaxRetryError, http.request, 'GET',
@@ -223,6 +224,22 @@ class TestHTTPProxyManager(HTTPDummyProxyTestCase):
self.assertEqual(returned_headers.get('Host'),
'%s:%s'%(self.https_host,self.https_port))
+ def test_headerdict(self):
+ default_headers = HTTPHeaderDict(a='b')
+ proxy_headers = HTTPHeaderDict()
+ proxy_headers.add('foo', 'bar')
+
+ http = proxy_from_url(
+ self.proxy_url,
+ headers=default_headers,
+ proxy_headers=proxy_headers)
+
+ request_headers = HTTPHeaderDict(baz='quux')
+ r = http.request('GET', '%s/headers' % self.http_url, headers=request_headers)
+ returned_headers = json.loads(r.data.decode())
+ self.assertEqual(returned_headers.get('Foo'), 'bar')
+ self.assertEqual(returned_headers.get('Baz'), 'quux')
+
def test_proxy_pooling(self):
http = proxy_from_url(self.proxy_url)
@@ -283,5 +300,26 @@ class TestHTTPProxyManager(HTTPDummyProxyTestCase):
except MaxRetryError as e:
assert isinstance(e.reason, ConnectTimeoutError)
+
+class TestIPv6HTTPProxyManager(IPv6HTTPDummyProxyTestCase):
+
+ def setUp(self):
+ self.http_url = 'http://%s:%d' % (self.http_host, self.http_port)
+ self.http_url_alt = 'http://%s:%d' % (self.http_host_alt,
+ self.http_port)
+ self.https_url = 'https://%s:%d' % (self.https_host, self.https_port)
+ self.https_url_alt = 'https://%s:%d' % (self.https_host_alt,
+ self.https_port)
+ self.proxy_url = 'http://[%s]:%d' % (self.proxy_host, self.proxy_port)
+
+ def test_basic_ipv6_proxy(self):
+ http = proxy_from_url(self.proxy_url)
+
+ r = http.request('GET', '%s/' % self.http_url)
+ self.assertEqual(r.status, 200)
+
+ r = http.request('GET', '%s/' % self.https_url)
+ self.assertEqual(r.status, 200)
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/with_dummyserver/test_socketlevel.py b/test/with_dummyserver/test_socketlevel.py
index 6c99653..5af00e0 100644
--- a/test/with_dummyserver/test_socketlevel.py
+++ b/test/with_dummyserver/test_socketlevel.py
@@ -10,17 +10,24 @@ from urllib3.exceptions import (
SSLError,
ProtocolError,
)
+from urllib3.response import httplib
from urllib3.util.ssl_ import HAS_SNI
from urllib3.util.timeout import Timeout
from urllib3.util.retry import Retry
+from urllib3._collections import HTTPHeaderDict
from dummyserver.testcase import SocketDummyServerTestCase
from dummyserver.server import (
DEFAULT_CERTS, DEFAULT_CA, get_unreachable_address)
-from .. import onlyPy3
+from .. import onlyPy3, LogRecorder
from nose.plugins.skip import SkipTest
+try:
+ from mimetools import Message as MimeToolMessage
+except ImportError:
+ class MimeToolMessage(object):
+ pass
from threading import Event
import socket
import ssl
@@ -119,8 +126,9 @@ class TestSocketClosing(SocketDummyServerTestCase):
def test_connection_refused(self):
# Does the pool retry if there is no listener on the port?
host, port = get_unreachable_address()
- pool = HTTPConnectionPool(host, port)
- self.assertRaises(MaxRetryError, pool.request, 'GET', '/', retries=0)
+ http = HTTPConnectionPool(host, port, maxsize=3, block=True)
+ self.assertRaises(MaxRetryError, http.request, 'GET', '/', retries=0, release_conn=False)
+ self.assertEqual(http.pool.qsize(), http.pool.maxsize)
def test_connection_read_timeout(self):
timed_out = Event()
@@ -133,13 +141,15 @@ class TestSocketClosing(SocketDummyServerTestCase):
sock.close()
self._start_server(socket_handler)
- pool = HTTPConnectionPool(self.host, self.port, timeout=0.001, retries=False)
+ http = HTTPConnectionPool(self.host, self.port, timeout=0.001, retries=False, maxsize=3, block=True)
try:
- self.assertRaises(ReadTimeoutError, pool.request, 'GET', '/')
+ self.assertRaises(ReadTimeoutError, http.request, 'GET', '/', release_conn=False)
finally:
timed_out.set()
+ self.assertEqual(http.pool.qsize(), http.pool.maxsize)
+
def test_https_connection_read_timeout(self):
""" Handshake timeouts should fail with a Timeout"""
timed_out = Event()
@@ -297,6 +307,63 @@ class TestSocketClosing(SocketDummyServerTestCase):
self.assertEqual(response.status, 200)
self.assertEqual(response.data, b'foo')
+ def test_connection_cleanup_on_read_timeout(self):
+ timed_out = Event()
+
+ def socket_handler(listener):
+ sock = listener.accept()[0]
+ buf = b''
+ body = 'Hi'
+ while not buf.endswith(b'\r\n\r\n'):
+ buf = sock.recv(65536)
+ sock.send(('HTTP/1.1 200 OK\r\n'
+ 'Content-Type: text/plain\r\n'
+ 'Content-Length: %d\r\n'
+ '\r\n' % len(body)).encode('utf-8'))
+
+ timed_out.wait()
+ sock.close()
+
+ self._start_server(socket_handler)
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ poolsize = pool.pool.qsize()
+ response = pool.urlopen('GET', '/', retries=0, preload_content=False,
+ timeout=Timeout(connect=1, read=0.001))
+ try:
+ self.assertRaises(ReadTimeoutError, response.read)
+ self.assertEqual(poolsize, pool.pool.qsize())
+ finally:
+ timed_out.set()
+
+ def test_connection_cleanup_on_protocol_error_during_read(self):
+ body = 'Response'
+ partial_body = body[:2]
+
+ def socket_handler(listener):
+ sock = listener.accept()[0]
+
+ # Consume request
+ buf = b''
+ while not buf.endswith(b'\r\n\r\n'):
+ buf = sock.recv(65536)
+
+ # Send partial response and close socket.
+ sock.send((
+ 'HTTP/1.1 200 OK\r\n'
+ 'Content-Type: text/plain\r\n'
+ 'Content-Length: %d\r\n'
+ '\r\n'
+ '%s' % (len(body), partial_body)).encode('utf-8')
+ )
+ sock.close()
+
+ self._start_server(socket_handler)
+ with HTTPConnectionPool(self.host, self.port) as pool:
+ poolsize = pool.pool.qsize()
+ response = pool.request('GET', '/', retries=0, preload_content=False)
+
+ self.assertRaises(ProtocolError, response.read)
+ self.assertEqual(poolsize, pool.pool.qsize())
class TestProxyManager(SocketDummyServerTestCase):
@@ -355,7 +422,7 @@ class TestProxyManager(SocketDummyServerTestCase):
base_url = 'http://%s:%d' % (self.host, self.port)
# Define some proxy headers.
- proxy_headers = {'For The Proxy': 'YEAH!'}
+ proxy_headers = HTTPHeaderDict({'For The Proxy': 'YEAH!'})
proxy = proxy_from_url(base_url, proxy_headers=proxy_headers)
conn = proxy.connection_from_url('http://www.google.com/')
@@ -617,6 +684,86 @@ class TestHeaders(SocketDummyServerTestCase):
r = pool.request('GET', '/')
self.assertEqual(HEADERS, dict(r.headers.items())) # to preserve case sensitivity
+ def test_headers_are_sent_with_the_original_case(self):
+ headers = {'foo': 'bar', 'bAz': 'quux'}
+ parsed_headers = {}
+
+ def socket_handler(listener):
+ sock = listener.accept()[0]
+
+ buf = b''
+ while not buf.endswith(b'\r\n\r\n'):
+ buf += sock.recv(65536)
+
+ headers_list = [header for header in buf.split(b'\r\n')[1:] if header]
+
+ for header in headers_list:
+ (key, value) = header.split(b': ')
+ parsed_headers[key.decode()] = value.decode()
+
+ # Send incomplete message (note Content-Length)
+ sock.send((
+ 'HTTP/1.1 204 No Content\r\n'
+ 'Content-Length: 0\r\n'
+ '\r\n').encode('utf-8'))
+
+ sock.close()
+
+ self._start_server(socket_handler)
+ expected_headers = {'Accept-Encoding': 'identity',
+ 'Host': '{0}:{1}'.format(self.host, self.port)}
+ expected_headers.update(headers)
+
+ pool = HTTPConnectionPool(self.host, self.port, retries=False)
+ pool.request('GET', '/', headers=HTTPHeaderDict(headers))
+ self.assertEqual(expected_headers, parsed_headers)
+
+
+class TestBrokenHeaders(SocketDummyServerTestCase):
+ def setUp(self):
+ if issubclass(httplib.HTTPMessage, MimeToolMessage):
+ raise SkipTest('Header parsing errors not available')
+
+ super(TestBrokenHeaders, self).setUp()
+
+ def _test_broken_header_parsing(self, headers):
+ handler = create_response_handler((
+ b'HTTP/1.1 200 OK\r\n'
+ b'Content-Length: 0\r\n'
+ b'Content-type: text/plain\r\n'
+ ) + b'\r\n'.join(headers) + b'\r\n'
+ )
+
+ self._start_server(handler)
+ pool = HTTPConnectionPool(self.host, self.port, retries=False)
+
+ with LogRecorder() as logs:
+ pool.request('GET', '/')
+
+ for record in logs:
+ if 'Failed to parse headers' in record.msg and \
+ pool._absolute_url('/') == record.args[0]:
+ return
+ self.fail('Missing log about unparsed headers')
+
+ def test_header_without_name(self):
+ self._test_broken_header_parsing([
+ b': Value\r\n',
+ b'Another: Header\r\n',
+ ])
+
+ def test_header_without_name_or_value(self):
+ self._test_broken_header_parsing([
+ b':\r\n',
+ b'Another: Header\r\n',
+ ])
+
+ def test_header_without_colon_or_value(self):
+ self._test_broken_header_parsing([
+ b'Broken Header',
+ b'Another: Header',
+ ])
+
class TestHEAD(SocketDummyServerTestCase):
def test_chunked_head_response_does_not_hang(self):