import concurrent.futures
import blog_spider

with concurrent.futures.ThreadPoolExecutor() as pool:
    htmls = pool.map(blog_spider.craw, blog_spider.urls)
    htmls = list(zip(blog_spider.urls, htmls))
    for url, html in htmls:
        print(url, len(html))
print("craw over")

with concurrent.futures.ThreadPoolExecutor() as pool:
    futures = dict()
    # 使用submit的方式
    for url, html in htmls:
        future = pool.submit(blog_spider.parse, html)
        # future与url建立一个关系
        futures[future] = url
    for future, url in futures.items():
        print(url, future.result())
    # as_completed 哪一个任务先执行完就返回哪一个结果
    # for future in concurrent.futures.as_completed(futures):
    #     url = futures[future]
    #     print(url, future.result())
print("parse over")