# coding=utf-8

from concurrent.futures import ThreadPoolExecutor, wait, FIRST_COMPLETED, ALL_COMPLETED
import time


def spider(page):
    time.sleep(page)
    print(f"crawl task{page} finished")
    return page


### wait
"""

"""
with ThreadPoolExecutor(max_workers=5) as t: 
    all_task = [t.submit(spider, page) for page in range(1, 5)]
    wait(all_task, return_when=FIRST_COMPLETED)
    print('finished')
    print(wait(all_task, timeout=2.5))

### wait and done
with ThreadPoolExecutor() as executor:
    futures = [executor.submit(process_data, d) for d in data]

    done, not_done = concurrent.futures.wait(futures, timeout=10)

    for future in done:
        result = future.result()
        # 处理已完成的任务结果

    for future in not_done:
        # 处理未完成的任务


### as_completed
"""
as_completed() 方法是一个生成器，在没有任务完成的时候，会一直阻塞，除非设置了 timeout。
当有某个任务完成的时候，会 yield 这个任务，就能执行 for 循环下面的语句，然后继续阻塞住，循环到所有的任务结束。同时，先完成的任务会先返回给主线程。
"""

with ThreadPoolExecutor(max_workers=5) as t:
        obj_list = []
        for page in range(1, 5):
            obj = t.submit(spider, page)
            obj_list.append(obj)

        for future in as_completed(obj_list):
            data = future.result()
            print(f"main: {data}")


### map
"""

# map 方法与 python 高阶函数 map 的含义相同，都是将序列中的每个元素都执行同一个函数。

"""
start = time.time()
executor = ThreadPoolExecutor(max_workers=4)

i = 1
for result in executor.map(spider, [2, 3, 1, 4]):
    print("task{}:{}".format(i, result))
    i += 1


## executer

def main2():
    count_n = 8
    process_pool = ProcessPoolExecutor(max_workers=count_n)
    future = [process_pool.submit(sum, num) for num in range(1000, 16000)]
    print('result is %s' % str(future[0].result()))
    process_pool.shutdown()


### asyncio
"""
asyncio 比threading 方法更快，因为threading 使用了 OS（操作系统）线程，所以线程由操作系统调度，其中线程切换被操作系统抢占。asyncio 使用由 Python 解释器定义的协程。程序决定何时以最佳方式切换任务。这是由asyncio 中的 event_loop 来处理的。
"""
import time
import asyncio
import httpx
 
async def fetch(client, n):
    await client.get("https://httpbin.org/get", params={'number': n})
 
async def main():
    async with httpx.AsyncClient() as client:
        await asyncio.gather(
            *[fetch(client, num) for num in range(20)]
        )
 
 
start_time = time.perf_counter()
asyncio.get_event_loop().run_until_complete(main())
end_time = time.perf_counter()
print(f"Elapsed run time: {end_time - start_time} seconds.")
