aboutsummaryrefslogtreecommitdiff
path: root/test/benchmark.py
diff options
context:
space:
mode:
authorSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:19:36 -0700
committerSVN-Git Migration <python-modules-team@lists.alioth.debian.org>2015-10-08 13:19:36 -0700
commit5f949ee35667a6065ab02a3e7ab8c98c9fcdcaed (patch)
tree35ce945e6f6fe74276a6745c96e6a48f6e5d3c68 /test/benchmark.py
parent52980ebd0a4eb75acf055a2256e095772c1fa7c6 (diff)
downloadpython-urllib3-5f949ee35667a6065ab02a3e7ab8c98c9fcdcaed.tar
python-urllib3-5f949ee35667a6065ab02a3e7ab8c98c9fcdcaed.tar.gz
Imported Upstream version 1.8
Diffstat (limited to 'test/benchmark.py')
-rw-r--r--test/benchmark.py77
1 files changed, 0 insertions, 77 deletions
diff --git a/test/benchmark.py b/test/benchmark.py
deleted file mode 100644
index e7049c4..0000000
--- a/test/benchmark.py
+++ /dev/null
@@ -1,77 +0,0 @@
-#!/usr/bin/env python
-
-"""
-Really simple rudimentary benchmark to compare ConnectionPool versus standard
-urllib to demonstrate the usefulness of connection re-using.
-"""
-from __future__ import print_function
-
-import sys
-import time
-import urllib
-
-sys.path.append('../')
-import urllib3
-
-
-# URLs to download. Doesn't matter as long as they're from the same host, so we
-# can take advantage of connection re-using.
-TO_DOWNLOAD = [
- 'http://code.google.com/apis/apps/',
- 'http://code.google.com/apis/base/',
- 'http://code.google.com/apis/blogger/',
- 'http://code.google.com/apis/calendar/',
- 'http://code.google.com/apis/codesearch/',
- 'http://code.google.com/apis/contact/',
- 'http://code.google.com/apis/books/',
- 'http://code.google.com/apis/documents/',
- 'http://code.google.com/apis/finance/',
- 'http://code.google.com/apis/health/',
- 'http://code.google.com/apis/notebook/',
- 'http://code.google.com/apis/picasaweb/',
- 'http://code.google.com/apis/spreadsheets/',
- 'http://code.google.com/apis/webmastertools/',
- 'http://code.google.com/apis/youtube/',
-]
-
-
-def urllib_get(url_list):
- assert url_list
- for url in url_list:
- now = time.time()
- r = urllib.urlopen(url)
- elapsed = time.time() - now
- print("Got in %0.3f: %s" % (elapsed, url))
-
-
-def pool_get(url_list):
- assert url_list
- pool = urllib3.connection_from_url(url_list[0])
- for url in url_list:
- now = time.time()
- r = pool.get_url(url)
- elapsed = time.time() - now
- print("Got in %0.3fs: %s" % (elapsed, url))
-
-
-if __name__ == '__main__':
- print("Running pool_get ...")
- now = time.time()
- pool_get(TO_DOWNLOAD)
- pool_elapsed = time.time() - now
-
- print("Running urllib_get ...")
- now = time.time()
- urllib_get(TO_DOWNLOAD)
- urllib_elapsed = time.time() - now
-
- print("Completed pool_get in %0.3fs" % pool_elapsed)
- print("Completed urllib_get in %0.3fs" % urllib_elapsed)
-
-
-"""
-Example results:
-
-Completed pool_get in 1.163s
-Completed urllib_get in 2.318s
-"""