# 引入线程池包
from concurrent.futures import ThreadPoolExecutor, as_completed
import time

def crawl(url):
    msg = f'请求地址为：{url}'
    time.sleep(3)
    # print(msg)
    return msg;


if __name__ == '__main__':
    base_url = 'http://www.baidu.com?page={}'
    list1 = [base_url.format(i) for i in range(1, 6)]
    print('使用生成表达式生成列表：', list1)
    # 创建包含3条线程的线程池
    pool = ThreadPoolExecutor(max_workers=3)
    # 一次性把所有的任务都放进线程池，得到一个句柄，但是最多只能同时执行3个任务
    all_task = [ pool.submit(crawl, url) for url in list1]

    for future in as_completed( all_task ):
        data = future.result()
        print("in main:get page {}s success".format(data))