import asyncio
import time
import aiohttp


# Python中，协程是通过生成器实现的
def dog(name):
    foods = []
    while True:
        food = yield foods
        foods.append(food)
        print(f'狗哥{name} 吃了 {food}')


async def task(i):
    await asyncio.sleep(2)
    print(f'index : {i}')


# 包装一下，相当于异步运行了 100个 task()
async def main():
    t_list = []
    for i in range(100):
        t = asyncio.create_task(task(i))
        t_list.append(t)
    [await t for t in t_list]


async def request_tplink(i):
    async with aiohttp.ClientSession() as session:
        async with session.get('http://192.168.0.1/') as response:
            result = await response.text()
            return f'{i}\n{result}'

if __name__ == '__main__':
    g = dog('旺财')
    next(g)
    g.send('肉包子')
    g.send('骨头')
    all_food = g.send('狗粮')
    g.close()
    print('狗哥今天的食物：', all_food)

    # 编写asyncio程序
    # asyncio.run(main())
    print('啦啦啦啦')
    task_list = [task(i) for i in range(100)]
    loop = asyncio.get_event_loop()
    loop.run_until_complete(asyncio.gather(*task_list))
    # 一旦close后边的就不能用了，所以如果要运行后边的例子，得先注释掉下面这行
    # loop.close()

    # 编写aiohttp异步请求网站接口
    task_list = [request_tplink(i) for i in range(10)]
    loop = asyncio.get_event_loop()
    # run_until_complete()的返回值是所有异步任务执行完毕返回值的list
    res = loop.run_until_complete(asyncio.gather(*task_list))
    loop.close()
    print(type(res))
    with open('1.html', 'w', encoding='utf-8') as f:
        f.write(res[2])


