import json, requests, asyncio, aiohttp
from lxml import etree
from util.logger import get_looger

logger = get_looger("古诗文", __file__)
datas = []

def get_astrs():
    url = 'https://www.gushiwen.cn/mingjus/'
    response = requests.get(url)
    tree = etree.HTML(response.text)
    astrs = tree.xpath('//div[@id="type2"]/div[@class="sright"]/a/text()')
    return astrs

astrs = get_astrs()

async def fetch(session, url, datas):
    async with session.get(url) as response:
        html = await response.text()
        tree = etree.HTML(html)
        next_urls = tree.xpath('//a[@class="amore"]/@href')

        if next_urls:
            next_url = f'https://www.gushiwen.cn{next_urls[0]}'
            await fetch(session, next_url, datas)

        items = tree.xpath("//div[@class='left']/div[@class='sons']/div[@class='cont']")
        for item in items:
            title = item.xpath('.//a[1]/text()')
            source = item.xpath('.//a[2]/text()')
            if title:
                title_val = title[0]
                source_val = source[0] if source else '无法寻源'
                datas.append({
                    'title': title_val,
                    'source': source_val
                })
                line = {'title': title_val, 'source': source_val}
                logger.info(json.dumps(line, ensure_ascii=False))

        return html

async def main():
    async with aiohttp.ClientSession() as session:
        from urllib.parse import quote
        tasks = []
        for i in astrs:
            encoded_i = quote(i)
            url = f'https://www.gushiwen.cn/mingjus/default.aspx?astr={encoded_i}'
            tasks.append(fetch(session, url, datas))

        await asyncio.gather(*tasks)
    return datas

mingju = asyncio.run(main())