aboutsummaryrefslogtreecommitdiff
path: root/test
diff options
context:
space:
mode:
authorSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:19:35 -0700
committerSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:19:35 -0700
commit52980ebd0a4eb75acf055a2256e095772c1fa7c6 (patch)
treef339a7e66934caa7948c15e8c5bd3ea04ee72e5a /test
parent92b84b67f7b187b81dacbf1ae46d59a1d0b5b125 (diff)
downloadpython-urllib3-52980ebd0a4eb75acf055a2256e095772c1fa7c6.tar
python-urllib3-52980ebd0a4eb75acf055a2256e095772c1fa7c6.tar.gz
Imported Upstream version 1.7.1
Diffstat (limited to 'test')
-rw-r--r--test/__init__.py0
-rw-r--r--test/benchmark.py77
-rw-r--r--test/test_connectionpool.py29
-rw-r--r--test/test_exceptions.py30
-rw-r--r--test/test_fields.py44
-rw-r--r--test/test_filepost.py14
-rw-r--r--test/test_proxymanager.py18
-rw-r--r--test/test_response.py135
-rw-r--r--test/test_util.py155
9 files changed, 474 insertions, 28 deletions
diff --git a/test/__init__.py b/test/__init__.py
new file mode 100644
index 0000000..e69de29
--- /dev/null
+++ b/test/__init__.py
diff --git a/test/benchmark.py b/test/benchmark.py
new file mode 100644
index 0000000..e7049c4
--- /dev/null
+++ b/test/benchmark.py
@@ -0,0 +1,77 @@
+#!/usr/bin/env python
+
+"""
+Really simple rudimentary benchmark to compare ConnectionPool versus standard
+urllib to demonstrate the usefulness of connection re-using.
+"""
+from __future__ import print_function
+
+import sys
+import time
+import urllib
+
+sys.path.append('../')
+import urllib3
+
+
+# URLs to download. Doesn't matter as long as they're from the same host, so we
+# can take advantage of connection re-using.
+TO_DOWNLOAD = [
+ 'http://code.google.com/apis/apps/',
+ 'http://code.google.com/apis/base/',
+ 'http://code.google.com/apis/blogger/',
+ 'http://code.google.com/apis/calendar/',
+ 'http://code.google.com/apis/codesearch/',
+ 'http://code.google.com/apis/contact/',
+ 'http://code.google.com/apis/books/',
+ 'http://code.google.com/apis/documents/',
+ 'http://code.google.com/apis/finance/',
+ 'http://code.google.com/apis/health/',
+ 'http://code.google.com/apis/notebook/',
+ 'http://code.google.com/apis/picasaweb/',
+ 'http://code.google.com/apis/spreadsheets/',
+ 'http://code.google.com/apis/webmastertools/',
+ 'http://code.google.com/apis/youtube/',
+]
+
+
+def urllib_get(url_list):
+ assert url_list
+ for url in url_list:
+ now = time.time()
+ r = urllib.urlopen(url)
+ elapsed = time.time() - now
+ print("Got in %0.3f: %s" % (elapsed, url))
+
+
+def pool_get(url_list):
+ assert url_list
+ pool = urllib3.connection_from_url(url_list[0])
+ for url in url_list:
+ now = time.time()
+ r = pool.get_url(url)
+ elapsed = time.time() - now
+ print("Got in %0.3fs: %s" % (elapsed, url))
+
+
+if __name__ == '__main__':
+ print("Running pool_get ...")
+ now = time.time()
+ pool_get(TO_DOWNLOAD)
+ pool_elapsed = time.time() - now
+
+ print("Running urllib_get ...")
+ now = time.time()
+ urllib_get(TO_DOWNLOAD)
+ urllib_elapsed = time.time() - now
+
+ print("Completed pool_get in %0.3fs" % pool_elapsed)
+ print("Completed urllib_get in %0.3fs" % urllib_elapsed)
+
+
+"""
+Example results:
+
+Completed pool_get in 1.163s
+Completed urllib_get in 2.318s
+"""
diff --git a/test/test_connectionpool.py b/test/test_connectionpool.py
index a7e104a..ac1768e 100644
--- a/test/test_connectionpool.py
+++ b/test/test_connectionpool.py
@@ -1,6 +1,11 @@
import unittest
-from urllib3.connectionpool import connection_from_url, HTTPConnectionPool
+from urllib3.connectionpool import (
+ connection_from_url,
+ HTTPConnection,
+ HTTPConnectionPool,
+)
+from urllib3.util import Timeout
from urllib3.packages.ssl_match_hostname import CertificateError
from urllib3.exceptions import (
ClosedPoolError,
@@ -8,7 +13,7 @@ from urllib3.exceptions import (
HostChangedError,
MaxRetryError,
SSLError,
- TimeoutError,
+ ReadTimeoutError,
)
from socket import error as SocketError, timeout as SocketTimeout
@@ -52,6 +57,7 @@ class TestConnectionPool(unittest.TestCase):
c = connection_from_url(a)
self.assertFalse(c.is_same_host(b), "%s =? %s" % (a, b))
+
def test_max_connections(self):
pool = HTTPConnectionPool(host='localhost', maxsize=1, block=True)
@@ -108,6 +114,7 @@ class TestConnectionPool(unittest.TestCase):
"Max retries exceeded with url: Test. "
"(Caused by {0}: Test)".format(str(err.__class__)))
+
def test_pool_size(self):
POOL_SIZE = 1
pool = HTTPConnectionPool(host='localhost', maxsize=POOL_SIZE, block=True)
@@ -122,8 +129,8 @@ class TestConnectionPool(unittest.TestCase):
self.assertEqual(pool.pool.qsize(), POOL_SIZE)
#make sure that all of the exceptions return the connection to the pool
- _test(Empty, TimeoutError)
- _test(SocketTimeout, TimeoutError)
+ _test(Empty, ReadTimeoutError)
+ _test(SocketTimeout, ReadTimeoutError)
_test(BaseSSLError, SSLError)
_test(CertificateError, SSLError)
@@ -166,6 +173,20 @@ class TestConnectionPool(unittest.TestCase):
self.assertRaises(Empty, old_pool_queue.get, block=False)
+ def test_pool_timeouts(self):
+ pool = HTTPConnectionPool(host='localhost')
+ conn = pool._new_conn()
+ self.assertEqual(conn.__class__, HTTPConnection)
+ self.assertEqual(pool.timeout.__class__, Timeout)
+ self.assertEqual(pool.timeout._read, Timeout.DEFAULT_TIMEOUT)
+ self.assertEqual(pool.timeout._connect, Timeout.DEFAULT_TIMEOUT)
+ self.assertEqual(pool.timeout.total, None)
+
+ pool = HTTPConnectionPool(host='localhost', timeout=3)
+ self.assertEqual(pool.timeout._read, 3)
+ self.assertEqual(pool.timeout._connect, 3)
+ self.assertEqual(pool.timeout.total, None)
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/test_exceptions.py b/test/test_exceptions.py
index 3e02ca6..e20649b 100644
--- a/test/test_exceptions.py
+++ b/test/test_exceptions.py
@@ -1,19 +1,35 @@
import unittest
import pickle
-from urllib3.exceptions import HTTPError, MaxRetryError, LocationParseError
+from urllib3.exceptions import (HTTPError, MaxRetryError, LocationParseError,
+ ClosedPoolError, EmptyPoolError,
+ HostChangedError, ReadTimeoutError,
+ ConnectTimeoutError)
from urllib3.connectionpool import HTTPConnectionPool
class TestPickle(unittest.TestCase):
+ def cycle(self, item):
+ return pickle.loads(pickle.dumps(item))
+
def test_exceptions(self):
- assert pickle.dumps(HTTPError(None))
- assert pickle.dumps(MaxRetryError(None, None, None))
- assert pickle.dumps(LocationParseError(None))
+ assert self.cycle(HTTPError(None))
+ assert self.cycle(MaxRetryError(None, None, None))
+ assert self.cycle(LocationParseError(None))
+ assert self.cycle(ConnectTimeoutError(None))
def test_exceptions_with_objects(self):
- assert pickle.dumps(HTTPError('foo'))
- assert pickle.dumps(MaxRetryError(HTTPConnectionPool('localhost'), '/', None))
- assert pickle.dumps(LocationParseError('fake location'))
+ assert self.cycle(HTTPError('foo'))
+ assert self.cycle(MaxRetryError(HTTPConnectionPool('localhost'),
+ '/', None))
+ assert self.cycle(LocationParseError('fake location'))
+ assert self.cycle(ClosedPoolError(HTTPConnectionPool('localhost'),
+ None))
+ assert self.cycle(EmptyPoolError(HTTPConnectionPool('localhost'),
+ None))
+ assert self.cycle(HostChangedError(HTTPConnectionPool('localhost'),
+ '/', None))
+ assert self.cycle(ReadTimeoutError(HTTPConnectionPool('localhost'),
+ '/', None))
diff --git a/test/test_fields.py b/test/test_fields.py
new file mode 100644
index 0000000..888c2d5
--- /dev/null
+++ b/test/test_fields.py
@@ -0,0 +1,44 @@
+import unittest
+
+from urllib3.fields import guess_content_type, RequestField
+from urllib3.packages.six import b, u
+
+
+class TestRequestField(unittest.TestCase):
+
+ def test_guess_content_type(self):
+ self.assertEqual(guess_content_type('image.jpg'), 'image/jpeg')
+ self.assertEqual(guess_content_type('notsure'), 'application/octet-stream')
+ self.assertEqual(guess_content_type(None), 'application/octet-stream')
+
+ def test_create(self):
+ simple_field = RequestField('somename', 'data')
+ self.assertEqual(simple_field.render_headers(), '\r\n')
+ filename_field = RequestField('somename', 'data', filename='somefile.txt')
+ self.assertEqual(filename_field.render_headers(), '\r\n')
+ headers_field = RequestField('somename', 'data', headers={'Content-Length': 4})
+ self.assertEqual(headers_field.render_headers(),
+ 'Content-Length: 4\r\n'
+ '\r\n')
+
+ def test_make_multipart(self):
+ field = RequestField('somename', 'data')
+ field.make_multipart(content_type='image/jpg', content_location='/test')
+ self.assertEqual(field.render_headers(),
+ 'Content-Disposition: form-data; name="somename"\r\n'
+ 'Content-Type: image/jpg\r\n'
+ 'Content-Location: /test\r\n'
+ '\r\n')
+
+ def test_render_parts(self):
+ field = RequestField('somename', 'data')
+ parts = field._render_parts({'name': 'value', 'filename': 'value'})
+ self.assertTrue('name="value"' in parts)
+ self.assertTrue('filename="value"' in parts)
+ parts = field._render_parts([('name', 'value'), ('filename', 'value')])
+ self.assertEqual(parts, 'name="value"; filename="value"')
+
+ def test_render_part(self):
+ field = RequestField('somename', 'data')
+ param = field._render_part('filename', u('n\u00e4me'))
+ self.assertEqual(param, "filename*=utf-8''n%C3%A4me")
diff --git a/test/test_filepost.py b/test/test_filepost.py
index 70ab100..ca33d61 100644
--- a/test/test_filepost.py
+++ b/test/test_filepost.py
@@ -1,6 +1,7 @@
import unittest
from urllib3.filepost import encode_multipart_formdata, iter_fields
+from urllib3.fields import RequestField
from urllib3.packages.six import b, u
@@ -117,3 +118,16 @@ class TestMultipartEncoding(unittest.TestCase):
self.assertEqual(content_type,
'multipart/form-data; boundary=' + str(BOUNDARY))
+
+ def test_request_fields(self):
+ fields = [RequestField('k', b'v', filename='somefile.txt', headers={'Content-Type': 'image/jpeg'})]
+
+ encoded, content_type = encode_multipart_formdata(fields, boundary=BOUNDARY)
+
+ self.assertEquals(encoded,
+ b'--' + b(BOUNDARY) + b'\r\n'
+ b'Content-Type: image/jpeg\r\n'
+ b'\r\n'
+ b'v\r\n'
+ b'--' + b(BOUNDARY) + b'--\r\n'
+ )
diff --git a/test/test_proxymanager.py b/test/test_proxymanager.py
index 64c86e8..e7b5c48 100644
--- a/test/test_proxymanager.py
+++ b/test/test_proxymanager.py
@@ -5,7 +5,7 @@ from urllib3.poolmanager import ProxyManager
class TestProxyManager(unittest.TestCase):
def test_proxy_headers(self):
- p = ProxyManager(None)
+ p = ProxyManager('http://something:1234')
url = 'http://pypi.python.org/test'
# Verify default headers
@@ -23,5 +23,21 @@ class TestProxyManager(unittest.TestCase):
self.assertEqual(headers, provided_headers)
+ # Verify proxy with nonstandard port
+ provided_headers = {'Accept': 'application/json'}
+ expected_headers = provided_headers.copy()
+ expected_headers.update({'Host': 'pypi.python.org:8080'})
+ url_with_port = 'http://pypi.python.org:8080/test'
+ headers = p._set_proxy_headers(url_with_port, provided_headers)
+
+ self.assertEqual(headers, expected_headers)
+
+ def test_default_port(self):
+ p = ProxyManager('http://something')
+ self.assertEqual(p.proxy.port, 80)
+ p = ProxyManager('https://something')
+ self.assertEqual(p.proxy.port, 443)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/test_response.py b/test/test_response.py
index 199e379..90d34eb 100644
--- a/test/test_response.py
+++ b/test/test_response.py
@@ -1,6 +1,6 @@
import unittest
-from io import BytesIO
+from io import BytesIO, BufferedReader
from urllib3.response import HTTPResponse
from urllib3.exceptions import DecodeError
@@ -112,5 +112,138 @@ class TestResponse(unittest.TestCase):
self.assertEqual(r.read(1), b'f')
self.assertEqual(r.read(2), b'oo')
+ def test_io(self):
+ import socket
+ try:
+ from http.client import HTTPResponse as OldHTTPResponse
+ except:
+ from httplib import HTTPResponse as OldHTTPResponse
+
+ fp = BytesIO(b'foo')
+ resp = HTTPResponse(fp, preload_content=False)
+
+ self.assertEqual(resp.closed, False)
+ self.assertEqual(resp.readable(), True)
+ self.assertEqual(resp.writable(), False)
+ self.assertRaises(IOError, resp.fileno)
+
+ resp.close()
+ self.assertEqual(resp.closed, True)
+
+ # Try closing with an `httplib.HTTPResponse`, because it has an
+ # `isclosed` method.
+ hlr = OldHTTPResponse(socket.socket())
+ resp2 = HTTPResponse(hlr, preload_content=False)
+ self.assertEqual(resp2.closed, False)
+ resp2.close()
+ self.assertEqual(resp2.closed, True)
+
+ #also try when only data is present.
+ resp3 = HTTPResponse('foodata')
+ self.assertRaises(IOError, resp3.fileno)
+
+ resp3._fp = 2
+ # A corner case where _fp is present but doesn't have `closed`,
+ # `isclosed`, or `fileno`. Unlikely, but possible.
+ self.assertEqual(resp3.closed, True)
+ self.assertRaises(IOError, resp3.fileno)
+
+ def test_io_bufferedreader(self):
+ fp = BytesIO(b'foo')
+ resp = HTTPResponse(fp, preload_content=False)
+ br = BufferedReader(resp)
+
+ self.assertEqual(br.read(), b'foo')
+
+ br.close()
+ self.assertEqual(resp.closed, True)
+
+ def test_streaming(self):
+ fp = BytesIO(b'foo')
+ resp = HTTPResponse(fp, preload_content=False)
+ stream = resp.stream(2, decode_content=False)
+
+ self.assertEqual(next(stream), b'fo')
+ self.assertEqual(next(stream), b'o')
+ self.assertRaises(StopIteration, next, stream)
+
+ def test_gzipped_streaming(self):
+ import zlib
+ compress = zlib.compressobj(6, zlib.DEFLATED, 16 + zlib.MAX_WBITS)
+ data = compress.compress(b'foo')
+ data += compress.flush()
+
+ fp = BytesIO(data)
+ resp = HTTPResponse(fp, headers={'content-encoding': 'gzip'},
+ preload_content=False)
+ stream = resp.stream(2)
+
+ self.assertEqual(next(stream), b'f')
+ self.assertEqual(next(stream), b'oo')
+ self.assertRaises(StopIteration, next, stream)
+
+ def test_deflate_streaming(self):
+ import zlib
+ data = zlib.compress(b'foo')
+
+ fp = BytesIO(data)
+ resp = HTTPResponse(fp, headers={'content-encoding': 'deflate'},
+ preload_content=False)
+ stream = resp.stream(2)
+
+ self.assertEqual(next(stream), b'f')
+ self.assertEqual(next(stream), b'oo')
+ self.assertRaises(StopIteration, next, stream)
+
+ def test_deflate2_streaming(self):
+ import zlib
+ compress = zlib.compressobj(6, zlib.DEFLATED, -zlib.MAX_WBITS)
+ data = compress.compress(b'foo')
+ data += compress.flush()
+
+ fp = BytesIO(data)
+ resp = HTTPResponse(fp, headers={'content-encoding': 'deflate'},
+ preload_content=False)
+ stream = resp.stream(2)
+
+ self.assertEqual(next(stream), b'f')
+ self.assertEqual(next(stream), b'oo')
+ self.assertRaises(StopIteration, next, stream)
+
+ def test_empty_stream(self):
+ fp = BytesIO(b'')
+ resp = HTTPResponse(fp, preload_content=False)
+ stream = resp.stream(2, decode_content=False)
+
+ self.assertRaises(StopIteration, next, stream)
+
+ def test_mock_httpresponse_stream(self):
+ # Mock out a HTTP Request that does enough to make it through urllib3's
+ # read() and close() calls, and also exhausts and underlying file
+ # object.
+ class MockHTTPRequest(object):
+ self.fp = None
+
+ def read(self, amt):
+ data = self.fp.read(amt)
+ if not data:
+ self.fp = None
+
+ return data
+
+ def close(self):
+ self.fp = None
+
+ bio = BytesIO(b'foo')
+ fp = MockHTTPRequest()
+ fp.fp = bio
+ resp = HTTPResponse(fp, preload_content=False)
+ stream = resp.stream(2)
+
+ self.assertEqual(next(stream), b'fo')
+ self.assertEqual(next(stream), b'o')
+ self.assertRaises(StopIteration, next, stream)
+
+
if __name__ == '__main__':
unittest.main()
diff --git a/test/test_util.py b/test/test_util.py
index a989da6..b465fef 100644
--- a/test/test_util.py
+++ b/test/test_util.py
@@ -1,10 +1,23 @@
-import unittest
import logging
+import unittest
+
+from mock import patch
from urllib3 import add_stderr_logger
-from urllib3.util import get_host, make_headers, split_first, parse_url, Url
-from urllib3.exceptions import LocationParseError
+from urllib3.util import (
+ get_host,
+ make_headers,
+ split_first,
+ parse_url,
+ Timeout,
+ Url,
+)
+from urllib3.exceptions import LocationParseError, TimeoutStateError
+# This number represents a time in seconds, it doesn't mean anything in
+# isolation. Setting to a high-ish value to avoid conflicts with the smaller
+# numbers used for timeouts
+TIMEOUT_EPOCH = 1000
class TestUtil(unittest.TestCase):
def test_get_host(self):
@@ -34,20 +47,20 @@ class TestUtil(unittest.TestCase):
'http://173.194.35.7:80/test': ('http', '173.194.35.7', 80),
# IPv6
- '[2a00:1450:4001:c01::67]': ('http', '2a00:1450:4001:c01::67', None),
- 'http://[2a00:1450:4001:c01::67]': ('http', '2a00:1450:4001:c01::67', None),
- 'http://[2a00:1450:4001:c01::67]/test': ('http', '2a00:1450:4001:c01::67', None),
- 'http://[2a00:1450:4001:c01::67]:80': ('http', '2a00:1450:4001:c01::67', 80),
- 'http://[2a00:1450:4001:c01::67]:80/test': ('http', '2a00:1450:4001:c01::67', 80),
+ '[2a00:1450:4001:c01::67]': ('http', '[2a00:1450:4001:c01::67]', None),
+ 'http://[2a00:1450:4001:c01::67]': ('http', '[2a00:1450:4001:c01::67]', None),
+ 'http://[2a00:1450:4001:c01::67]/test': ('http', '[2a00:1450:4001:c01::67]', None),
+ 'http://[2a00:1450:4001:c01::67]:80': ('http', '[2a00:1450:4001:c01::67]', 80),
+ 'http://[2a00:1450:4001:c01::67]:80/test': ('http', '[2a00:1450:4001:c01::67]', 80),
# More IPv6 from http://www.ietf.org/rfc/rfc2732.txt
- 'http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:8000/index.html': ('http', 'FEDC:BA98:7654:3210:FEDC:BA98:7654:3210', 8000),
- 'http://[1080:0:0:0:8:800:200C:417A]/index.html': ('http', '1080:0:0:0:8:800:200C:417A', None),
- 'http://[3ffe:2a00:100:7031::1]': ('http', '3ffe:2a00:100:7031::1', None),
- 'http://[1080::8:800:200C:417A]/foo': ('http', '1080::8:800:200C:417A', None),
- 'http://[::192.9.5.5]/ipng': ('http', '::192.9.5.5', None),
- 'http://[::FFFF:129.144.52.38]:42/index.html': ('http', '::FFFF:129.144.52.38', 42),
- 'http://[2010:836B:4179::836B:4179]': ('http', '2010:836B:4179::836B:4179', None),
+ 'http://[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]:8000/index.html': ('http', '[FEDC:BA98:7654:3210:FEDC:BA98:7654:3210]', 8000),
+ 'http://[1080:0:0:0:8:800:200C:417A]/index.html': ('http', '[1080:0:0:0:8:800:200C:417A]', None),
+ 'http://[3ffe:2a00:100:7031::1]': ('http', '[3ffe:2a00:100:7031::1]', None),
+ 'http://[1080::8:800:200C:417A]/foo': ('http', '[1080::8:800:200C:417A]', None),
+ 'http://[::192.9.5.5]/ipng': ('http', '[::192.9.5.5]', None),
+ 'http://[::FFFF:129.144.52.38]:42/index.html': ('http', '[::FFFF:129.144.52.38]', 42),
+ 'http://[2010:836B:4179::836B:4179]': ('http', '[2010:836B:4179::836B:4179]', None),
}
for url, expected_host in url_host_map.items():
returned_host = get_host(url)
@@ -57,6 +70,8 @@ class TestUtil(unittest.TestCase):
# TODO: Add more tests
invalid_host = [
'http://google.com:foo',
+ 'http://::1/',
+ 'http://::1:80/',
]
for location in invalid_host:
@@ -83,6 +98,9 @@ class TestUtil(unittest.TestCase):
returned_url = parse_url(url)
self.assertEquals(returned_url, expected_url)
+ def test_parse_url_invalid_IPv6(self):
+ self.assertRaises(ValueError, parse_url, '[::1')
+
def test_request_uri(self):
url_host_map = {
'http://google.com/mail': '/mail',
@@ -99,6 +117,17 @@ class TestUtil(unittest.TestCase):
returned_url = parse_url(url)
self.assertEquals(returned_url.request_uri, expected_request_uri)
+ def test_netloc(self):
+ url_netloc_map = {
+ 'http://google.com/mail': 'google.com',
+ 'http://google.com:80/mail': 'google.com:80',
+ 'google.com/foobar': 'google.com',
+ 'google.com:12345': 'google.com:12345',
+ }
+
+ for url, expected_netloc in url_netloc_map.items():
+ self.assertEquals(parse_url(url).netloc, expected_netloc)
+
def test_make_headers(self):
self.assertEqual(
make_headers(accept_encoding=True),
@@ -148,3 +177,99 @@ class TestUtil(unittest.TestCase):
logger.debug('Testing add_stderr_logger')
logger.removeHandler(handler)
+
+ def _make_time_pass(self, seconds, timeout, time_mock):
+ """ Make some time pass for the timeout object """
+ time_mock.return_value = TIMEOUT_EPOCH
+ timeout.start_connect()
+ time_mock.return_value = TIMEOUT_EPOCH + seconds
+ return timeout
+
+ def test_invalid_timeouts(self):
+ try:
+ Timeout(total=-1)
+ self.fail("negative value should throw exception")
+ except ValueError as e:
+ self.assertTrue('less than' in str(e))
+ try:
+ Timeout(connect=2, total=-1)
+ self.fail("negative value should throw exception")
+ except ValueError as e:
+ self.assertTrue('less than' in str(e))
+
+ try:
+ Timeout(read=-1)
+ self.fail("negative value should throw exception")
+ except ValueError as e:
+ self.assertTrue('less than' in str(e))
+
+ # Booleans are allowed also by socket.settimeout and converted to the
+ # equivalent float (1.0 for True, 0.0 for False)
+ Timeout(connect=False, read=True)
+
+ try:
+ Timeout(read="foo")
+ self.fail("string value should not be allowed")
+ except ValueError as e:
+ self.assertTrue('int or float' in str(e))
+
+
+ @patch('urllib3.util.current_time')
+ def test_timeout(self, current_time):
+ timeout = Timeout(total=3)
+
+ # make 'no time' elapse
+ timeout = self._make_time_pass(seconds=0, timeout=timeout,
+ time_mock=current_time)
+ self.assertEqual(timeout.read_timeout, 3)
+ self.assertEqual(timeout.connect_timeout, 3)
+
+ timeout = Timeout(total=3, connect=2)
+ self.assertEqual(timeout.connect_timeout, 2)
+
+ timeout = Timeout()
+ self.assertEqual(timeout.connect_timeout, Timeout.DEFAULT_TIMEOUT)
+
+ # Connect takes 5 seconds, leaving 5 seconds for read
+ timeout = Timeout(total=10, read=7)
+ timeout = self._make_time_pass(seconds=5, timeout=timeout,
+ time_mock=current_time)
+ self.assertEqual(timeout.read_timeout, 5)
+
+ # Connect takes 2 seconds, read timeout still 7 seconds
+ timeout = Timeout(total=10, read=7)
+ timeout = self._make_time_pass(seconds=2, timeout=timeout,
+ time_mock=current_time)
+ self.assertEqual(timeout.read_timeout, 7)
+
+ timeout = Timeout(total=10, read=7)
+ self.assertEqual(timeout.read_timeout, 7)
+
+ timeout = Timeout(total=None, read=None, connect=None)
+ self.assertEqual(timeout.connect_timeout, None)
+ self.assertEqual(timeout.read_timeout, None)
+ self.assertEqual(timeout.total, None)
+
+
+ def test_timeout_str(self):
+ timeout = Timeout(connect=1, read=2, total=3)
+ self.assertEqual(str(timeout), "Timeout(connect=1, read=2, total=3)")
+ timeout = Timeout(connect=1, read=None, total=3)
+ self.assertEqual(str(timeout), "Timeout(connect=1, read=None, total=3)")
+
+
+ @patch('urllib3.util.current_time')
+ def test_timeout_elapsed(self, current_time):
+ current_time.return_value = TIMEOUT_EPOCH
+ timeout = Timeout(total=3)
+ self.assertRaises(TimeoutStateError, timeout.get_connect_duration)
+
+ timeout.start_connect()
+ self.assertRaises(TimeoutStateError, timeout.start_connect)
+
+ current_time.return_value = TIMEOUT_EPOCH + 2
+ self.assertEqual(timeout.get_connect_duration(), 2)
+ current_time.return_value = TIMEOUT_EPOCH + 37
+ self.assertEqual(timeout.get_connect_duration(), 37)
+
+