# 引入线程池包
from concurrent.futures import ThreadPoolExecutor, wait, ALL_COMPLETED
import time

def crawl(url):
    msg = f'请求地址为：{url}'
    time.sleep(3)
    # print(msg)
    return msg;


if __name__ == '__main__':
    base_url = 'http://www.baidu.com?page={}'
    list1 = [base_url.format(i) for i in range(1, 6)]
    print('使用生成表达式生成列表：', list1)
    # 设置最多2个线程运行，其他等待
    pool = ThreadPoolExecutor(2)
    all_task = [pool.submit(crawl, url) for url in list1]
    wait(all_task, return_when=ALL_COMPLETED)

    for future in all_task:
        print(future.result())

    print("main执行结束。。。。")
