import pandas as pd
import requests
from fake_useragent import UserAgent
from loguru import logger
from bs4 import BeautifulSoup
from tqdm import tqdm
import json


def get_novel_info(url):
    headers = {
        'User-Agent': UserAgent().random,
        'Cookie': 'e1=%7B%22l6%22%3A%221%22%2C%22l7%22%3A%22%22%2C%22l1%22%3A%22%22%2C%22l3%22%3A%22%22%2C%22pid%22%3A%22qd_P_xiangqing%22%2C%22eid%22%3A%22%22%7D; e2=%7B%22l6%22%3A%221%22%2C%22l7%22%3A%22%22%2C%22l1%22%3A40%2C%22l3%22%3A%22%22%2C%22pid%22%3A%22qd_P_xiangqing%22%2C%22eid%22%3A%22qd_A64%22%7D; newstatisticUUID=1747271872_1531538002; _csrfToken=DSq13PwFcgnyr57PYLT3q4S2hpONLEKP7kPNCbxg; fu=1231178626; supportWebp=true; supportwebp=true; _gid=GA1.2.1885231255.1748934625; e1=%7B%22l6%22%3A%22%22%2C%22l7%22%3A%22%22%2C%22l1%22%3A2%2C%22l3%22%3A%22%22%2C%22pid%22%3A%22qd_p_qidian%22%2C%22eid%22%3A%22%22%7D; e2=%7B%22l6%22%3A%22%22%2C%22l7%22%3A%22%22%2C%22l1%22%3A2%2C%22l3%22%3A%22%22%2C%22pid%22%3A%22qd_p_qidian%22%2C%22eid%22%3A%22%22%7D; _ga_FZMMH98S83=deleted; traffic_search_engine=; Hm_lvt_f00f67093ce2f38f215010b699629083=1748428889,1748934624,1749001382,1749086676; HMACCOUNT=50C50DDAFB29127A; traffic_utm_referer=; Hm_lpvt_f00f67093ce2f38f215010b699629083=1749088338; _gat_gtag_UA_199934072_2=1; _ga=GA1.1.807643693.1747271874; _ga_FZMMH98S83=GS2.1.s1749086677$o6$g1$t1749088338$j60$l0$h0; _ga_PFYW0QLV3P=GS2.1.s1749086677$o6$g1$t1749088338$j60$l0$h0; w_tsfp=ltvuV0MF2utBvS0Q7aPpnEOtEzwncjA4h0wpEaR0f5thQLErU5mB2IZytsjxNnLd4cxnvd7DsZoyJTLYCJI3dwNCQJnEIY4ZilyRx9dwiogWBhU2Q8iIXVMcIrJwuDZHeHhCNxS00jA8eIUd379yilkMsyN1zap3TO14fstJ019E6KDQmI5uDW3HlFWQRzaLbjcMcuqPr6g18L5a5W2J7FP7KF92AbsQ2RCX031JX3gh6RGzdOxbNxmoJ8uvSqA='
    }
    headers = {
        "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.7",
        "Accept-Encoding": "gzip, deflate, br, zstd",
        "Accept-Language": "zh-CN,zh;q=0.9",
        "Connection": "keep-alive",
        "Cookie": "e1=%7B%22l6%22%3A%221%22%2C%22l7%22%3A%22%22%2C%22l1%22%3A%22%22%2C%22l3%22%3A%22%22%2C%22pid%22%3A%22qd_P_xiangqing%22%2C%22eid%22%3A%22%22%7D; e2=%7B%22l6%22%3A%221%22%2C%22l7%22%3A%22%22%2C%22l1%22%3A40%2C%22l3%22%3A%22%22%2C%22pid%22%3A%22qd_P_xiangqing%22%2C%22eid%22%3A%22qd_A64%22%7D; newstatisticUUID=1747271872_1531538002; _csrfToken=DSq13PwFcgnyr57PYLT3q4S2hpONLEKP7kPNCbxg; fu=1231178626; supportWebp=true; supportwebp=true; _gid=GA1.2.1885231255.1748934625; e1=%7B%22l6%22%3A%22%22%2C%22l7%22%3A%22%22%2C%22l1%22%3A2%2C%22l3%22%3A%22%22%2C%22pid%22%3A%22qd_p_qidian%22%2C%22eid%22%3A%22%22%7D; e2=%7B%22l6%22%3A%22%22%2C%22l7%22%3A%22%22%2C%22l1%22%3A2%2C%22l3%22%3A%22%22%2C%22pid%22%3A%22qd_p_qidian%22%2C%22eid%22%3A%22%22%7D; _ga_FZMMH98S83=deleted; traffic_search_engine=; Hm_lvt_f00f67093ce2f38f215010b699629083=1748428889,1748934624,1749001382,1749086676; HMACCOUNT=50C50DDAFB29127A; traffic_utm_referer=; Hm_lpvt_f00f67093ce2f38f215010b699629083=1749088338; _gat_gtag_UA_199934072_2=1; _ga=GA1.1.807643693.1747271874; _ga_FZMMH98S83=GS2.1.s1749086677$o6$g1$t1749088338$j60$l0$h0; _ga_PFYW0QLV3P=GS2.1.s1749086677$o6$g1$t1749088338$j60$l0$h0; w_tsfp=ltvuV0MF2utBvS0Q7aPpnEOtEzwncjA4h0wpEaR0f5thQLErU5mB2IZytsjxNnLd4cxnvd7DsZoyJTLYCJI3dwNCQJnEIY4ZilyRx9dwiogWBhU2Q8iIXVMcIrJwuDZHeHhCNxS00jA8eIUd379yilkMsyN1zap3TO14fstJ019E6KDQmI5uDW3HlFWQRzaLbjcMcuqPr6g18L5a5W2J7FP7KF92AbsQ2RCX031JX3gh6RGzdOxbNxmoJ8uvSqA=",
        "Host": "www.qidian.com",
        "Sec-Fetch-Dest": "document",
        "Sec-Fetch-Mode": "navigate",
        "Sec-Fetch-Site": "none",
        "Upgrade-Insecure-Requests": "1",
        "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/133.0.0.0 Safari/537.36",
        "sec-ch-ua-platform": "Windows"
    }
    result = {'intro': '', 'labels': ''}
    try:
        response = requests.get(url=url, headers=headers).text  # 向页面发出请求并得到回应
    except Exception as e:
        logger.error(e)
        return result
    soup = BeautifulSoup(response, 'html.parser')
    intro_node = soup.find(class_="intro")
    if intro_node is not None:
        result['intro'] = intro_node.text
    label_node = soup.find(class_="all-label")
    if label_node is not None:
        result['labels'] = label_node.text
    return result


if __name__ == '__main__':
    print(get_novel_info('https://www.qidian.com/book/1036504904/'))

    df = pd.read_csv('./novel_infos_unique.csv', encoding='GB18030')
    all_items = []
    for idx, row in tqdm(df.iterrows()):
        item = row.to_dict()
        url = "https://" + str(row['地址'])
        try:
            res = get_novel_info(url)
            item['简介'] = res['intro']
            item['标签'] = res['labels']
            row['简介'] = res['intro']
            row['标签'] = res['labels']
            all_items.append(item)
            df.loc[idx] = row
        except Exception as e:
            logger.info(e)
            item['简介'] = ''
            item['标签'] = ''
            row['简介'] = ''
            row['标签'] = ''
            all_items.append(item)
            df.loc[idx] = row
        if int(idx) % 100 == 0:
            print(item)
    df.to_csv('./novel_infos_unique_labelsx.csv', encoding='GB18030', index=False)
    with open('./novel_infos_unique_labels.json', 'w', encoding='GB18030', newline='') as f:
        json.dump(all_items, f)
