aboutsummaryrefslogtreecommitdiff
path: root/vim/bundle/YouCompleteMe/third_party/pythonfutures/crawl.py
diff options
context:
space:
mode:
authorKarel Kočí <cynerd@email.cz>2016-06-30 16:11:56 +0200
committerKarel Kočí <cynerd@email.cz>2016-06-30 16:11:56 +0200
commit9931e0888b2419326ae10ebbfae532261c5c125f (patch)
tree7504be5daccbb7b7d1ea396754de47b11ed790e5 /vim/bundle/YouCompleteMe/third_party/pythonfutures/crawl.py
parente573b3020c032400eed60b649a2cbf55266e6bb0 (diff)
downloadmyconfigs-9931e0888b2419326ae10ebbfae532261c5c125f.tar.gz
myconfigs-9931e0888b2419326ae10ebbfae532261c5c125f.tar.bz2
myconfigs-9931e0888b2419326ae10ebbfae532261c5c125f.zip
Fix submodules
Diffstat (limited to 'vim/bundle/YouCompleteMe/third_party/pythonfutures/crawl.py')
m---------vim/bundle/YouCompleteMe0
-rwxr-xr-xvim/bundle/YouCompleteMe/third_party/pythonfutures/crawl.py74
2 files changed, 0 insertions, 74 deletions
diff --git a/vim/bundle/YouCompleteMe b/vim/bundle/YouCompleteMe
new file mode 160000
+Subproject 0de1c0c9bb13ce82172b472c676035cd47cf6a6
diff --git a/vim/bundle/YouCompleteMe/third_party/pythonfutures/crawl.py b/vim/bundle/YouCompleteMe/third_party/pythonfutures/crawl.py
deleted file mode 100755
index 86e0af7..0000000
--- a/vim/bundle/YouCompleteMe/third_party/pythonfutures/crawl.py
+++ /dev/null
@@ -1,74 +0,0 @@
-"""Compare the speed of downloading URLs sequentially vs. using futures."""
-
-import functools
-import time
-import timeit
-import sys
-
-try:
- from urllib2 import urlopen
-except ImportError:
- from urllib.request import urlopen
-
-from concurrent.futures import (as_completed, ThreadPoolExecutor,
- ProcessPoolExecutor)
-
-URLS = ['http://www.google.com/',
- 'http://www.apple.com/',
- 'http://www.ibm.com',
- 'http://www.thisurlprobablydoesnotexist.com',
- 'http://www.slashdot.org/',
- 'http://www.python.org/',
- 'http://www.bing.com/',
- 'http://www.facebook.com/',
- 'http://www.yahoo.com/',
- 'http://www.youtube.com/',
- 'http://www.blogger.com/']
-
-def load_url(url, timeout):
- kwargs = {'timeout': timeout} if sys.version_info >= (2, 6) else {}
- return urlopen(url, **kwargs).read()
-
-def download_urls_sequential(urls, timeout=60):
- url_to_content = {}
- for url in urls:
- try:
- url_to_content[url] = load_url(url, timeout=timeout)
- except:
- pass
- return url_to_content
-
-def download_urls_with_executor(urls, executor, timeout=60):
- try:
- url_to_content = {}
- future_to_url = dict((executor.submit(load_url, url, timeout), url)
- for url in urls)
-
- for future in as_completed(future_to_url):
- try:
- url_to_content[future_to_url[future]] = future.result()
- except:
- pass
- return url_to_content
- finally:
- executor.shutdown()
-
-def main():
- for name, fn in [('sequential',
- functools.partial(download_urls_sequential, URLS)),
- ('processes',
- functools.partial(download_urls_with_executor,
- URLS,
- ProcessPoolExecutor(10))),
- ('threads',
- functools.partial(download_urls_with_executor,
- URLS,
- ThreadPoolExecutor(10)))]:
- sys.stdout.write('%s: ' % name.ljust(12))
- start = time.time()
- url_map = fn()
- sys.stdout.write('%.2f seconds (%d of %d downloaded)\n' %
- (time.time() - start, len(url_map), len(URLS)))
-
-if __name__ == '__main__':
- main()