import json
from queue import Queue
from threading import Thread

import MySQLdb
import requests
from lxml import etree
import time
import redis
import re


def task():
    global counter, task_queue, redis_conn
    while task_queue.qsize() > 0:
        page = task_queue.get()
        url = f'https://book.zongheng.com/store/c0/c0/b0/u4/p{page}/v0/s9/t0/u0/i1/ALL.html'
        response = requests.get(url)
        html_str = response.content.decode()
        root = etree.HTML(html_str)
        story_list = root.xpath("//div[@class='bookinfo']")
        # 定义缓存列表
        cache_data_list = []
        for story in story_list:
            author = " "
            book_name = "".join(story.xpath(".//div[@class='bookname']/a/text()"))
            target = story.xpath(".//div[@class='bookilnk']/a/text()")
            if len(target) == 2:
                author = target[0]
            story_link = "".join(story.xpath("../div[@class='bookimg']/a/@href"))
            book_id = re.search(r'\d+', story_link).group()
            # print(book_name, author, book_id)
            # 获取所有章节链接
            url = "https://bookapi.zongheng.com/api/chapter/getChapterList"
            response = requests.post(url, data={"bookId": book_id})
            try:
                for vol in response.json()["result"]["chapterList"]:
                    for chapter in vol["chapterViewList"]:
                        chapter_name = chapter["chapterName"]
                        chapter_id = chapter["chapterId"]
                        book_id = chapter["bookId"]
                        chapter_url = f"https://read.zongheng.com/chapter/{book_id}/{chapter_id}.html"
                        task = {
                            "story_name": book_name,
                            "chapter_name": chapter_name,
                            "chapter_url": chapter_url
                        }

                        counter += 1
                        print(counter, book_name, chapter_name, chapter_url)
                        redis_conn.rpush("zongheng_task_queue", json.dumps(task, ensure_ascii=False))
            except Exception as e:
                with open("error.txt", "a", encoding="utf-8") as f:
                    f.write(f"{book_name}, {author}, {story_link} {type(e)} {e}\n")
            cache_data_list.append((book_id, book_name, author))
        if cache_data_list:
            sql = """
                   INSERT INTO tb_novel (id, book_name, author)
                   VALUES (%s, %s, %s)
                   """
            cursor.executemany(sql, cache_data_list)
            conn.commit()
            print(f"成功插入 {len(cache_data_list)} 条数据")
            cache_data_list.clear()
        else:
            print(f"没有可插入的数据")


if __name__ == '__main__':

    # 配置数据库
    db_config = {
        'host': 'localhost',
        'user': 'root',  # 替换为你的用户名
        'password': '',  # 替换为你的密码
        'database': 'bilibili',  # 替换为你的数据库名
        'port': 3307,
        'autocommit': True  # 自动提交事务
    }
    # 建立数据库连接
    conn = MySQLdb.connect(**db_config)
    cursor = conn.cursor()
    print("数据库连接成功")

    # 建立连接
    redis_conn = redis.Redis(decode_responses=True)
    # 计数
    counter = 0
    # 页码
    task_queue = Queue()
    for page in range(1, 145):
        task_queue.put(page)

    # 起始时间
    start_time = time.time()
    # 创建线程
    thead_list = [Thread(target=task) for _ in range(8)]
    for t in thead_list:
        t.start()
    for t in thead_list:
        t.join()
    print("耗时:", time.time() - start_time)

    cursor.close()
    conn.close()
    print("数据库连接已关闭")