import asyncio

import aiohttp
import requests
from util.headers import get_header


urls = [
    f"https://sousuo.www.gov.cn/search-gov/data?t=zhengcelibrary&sort=score&sortType=1&searchfield=title&p={i}&n=5&type=gwyzcwjk"
    for i in range(1, 2)
]
# url = "https://sousuo.www.gov.cn/search-gov/data?t=zhengcelibrary&sort=score&sortType=1&searchfield=title&p=1&n=5&type=gwyzcwjk"
# res = requests.get(url, headers=get_header())
# json_data = res.json()["searchVO"]["catMap"]["gongwen"]["listVO"]
# for item in json_data:
#     title = item["title"]
#     summary = item["summary"]
#     url = item["url"]
#     data = {
#         "title": title,
#         "summary": summary,
#         "url": url,
#     }
#     print(data)


#
async def fetch(session, url):
    async with session.get(url, headers=get_header()) as f:
        json_data = f.json()["searchVO"]["catMap"]["gongwen"]["listVO"]
        for item in json_data:
            title = item["title"]
            summary = item["summary"]
            url = item["url"]
            data = {
                "title": title,
                "summary": summary,
                "url": url,
            }

        return data


async def main():
    async with aiohttp.ClientSession() as res:
        res = await asyncio.gather(*[fetch(res, url) for url in urls])
        for ress in res:
            print(ress)
            break


asyncio.run(main())
