import asyncio
import aiohttp
from lxml import etree


async def fetch(session, url, headers, params=None):
    try:
        if params:
            async with session.post(url, headers=headers, params=params) as response:
                return await response.json()
        else:
            async with session.get(url, headers=headers) as response:
                return await response.text()
    except Exception as e:
        print(f"Error fetching {url}: {e}")
        return None


async def producer(session, queue, headers):
    num = 1
    while True:
        url = 'https://www.zongheng.com/api2/catefine/storeSearch'
        payload = {
            "worksTypes": 0,
            "bookType": 1,
            "subWorksTypes": 0,
            "totalWord": 0,
            "serialStatus": 1,
            "vip": 9,
            "pageNum": num,
            "pageSize": 20,
            "categoryId": 0,
            "categoryPid": 0,
            "naodongFilter": 0
        }
        result = await fetch(session, url, headers, payload)
        if result and 'result' in result and 'bookList' in result['result']:
            for book in result['result']['bookList']:
                await queue.put(book['bookId'])
            num += 1
        else:
            # 没有更多数据，发送结束信号
            for _ in range(3):  # 假设 3 个消费者
                await queue.put(None)
            break


async def consumer(session, queue, headers):
    while True:
        book_id = await queue.get()
        if book_id is None:
            break
        urls = f"https://huayu.zongheng.com/showchapter/{book_id}.html"
        html = await fetch(session, urls, headers)
        if html:
            tree = etree.HTML(html)
            links = tree.xpath('//li[@class=" col-4"]/a/@href')
            titles = tree.xpath('//li[@class=" col-4"]/a/text()')
            h1 = tree.xpath('//h1/text()')
            print(h1)
            tasks = []
            for link, title in zip(links, titles):
                print(f"章节: {title}")
                task = asyncio.create_task(fetch(session, link, headers))
                tasks.append(task)
            contents = await asyncio.gather(*tasks)
            for content in contents:
                if content:
                    tree = etree.HTML(content)
                    yue = tree.xpath('//div[@class="content"]/p/text()')
                    print(yue)
        queue.task_done()


async def main():
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/135.0.0.0 Safari/537.36 Edg/135.0.0.0'
    }
    queue = asyncio.Queue()
    async with aiohttp.ClientSession() as session:
        producer_task = asyncio.create_task(producer(session, queue, headers))
        consumer_tasks = [asyncio.create_task(consumer(session, queue, headers)) for _ in range(3)]
        await producer_task
        await queue.join()
        for task in consumer_tasks:
            task.cancel()


if __name__ == "__main__":
    asyncio.run(main())
