# download3.py - Download many URLs using multiple threads, with the ``map`` method.
import os
import urllib
import workerpool
def download(url):
url = url.strip()
save_to = os.path.basename(url)
urllib.urlretrieve(url, save_to)
print "Downloaded %s" % url
# Initialize a pool, 5 threads in this case
pool = workerpool.WorkerPool(size=5)
# The ``download`` method will be called with a line from the second
# parameter for each job.
pool.map(download, open("urls.txt").readlines())
# Send shutdown jobs to all threads, and wait until all the jobs have been completed
pool.shutdown()
pool.wait()