import asyncio
import time
import requests
import aiohttp

start = time.time()
urls =[
    'http://news.baidu.com/',
    'https://news.163.com/',
    'https://www.qq.com/'
]


async def get_pageText(url):
    # 开始一个连接请求
    async with aiohttp.ClientSession() as s:
        # 发送连接请求
        async with await s.get(url=url) as response:
            page_text = await response.text()
            return page_text

from lxml import etree
def parse(task):
    # 获取执行函数调用的结果
    page_text = task.result()
    tree = etree.HTML(page_text)
    page_date = tree.xpath('//a/text()')
    print(page_date)


loop = asyncio.get_event_loop()
tasks = []
for url in urls:
    c = get_pageText(url)
    task = loop.create_task(c)
    task.add_done_callback(parse)
    tasks.append(task)
loop.run_until_complete(asyncio.wait(tasks))
print(time.time()-start)

