import traceback

from selenium import webdriver
import os
import time
import re
from bs4 import BeautifulSoup
import random

from selenium.webdriver.common.by import By
import mysql_connect

def get_label_ones():
    conn,cursor = mysql_connect.get_conn()
    sql = """
        select content,zhihu_data_id from shane_label;
    """
    cursor.execute(sql)
    result = cursor.fetchall()
    result_list: list() = list(result)
    # print(result_list)
    label_one_dict_list = [{"content":item[0],"zhihu_data_id":item[1]} for item in result_list]
    print(label_one_dict_list)
    cursor.close()
    conn.close()
    return label_one_dict_list
def get_data():
    label_one_dict_list = get_label_ones()
    option = webdriver.ChromeOptions()
    # option.add_argument('headless')
    option.add_argument("disable-blink-features=AutomationControlled")  # 就是这一行告诉chrome去掉了webdriver痕迹
    # option.add_experimental_option("detach", True)
    browser = webdriver.Chrome(r'D:\Driver\chromedriver.exe', options=option)

    base_url: str = "https://www.zhihu.com/topics#"
    for label_index,label_one_dict in enumerate(label_one_dict_list):
        print(f"开始爬取第{label_index}个网页,{label_one_dict['content']},总共{len(label_one_dict_list)}个")

        the_url = base_url + label_one_dict["content"]
        browser.get(the_url)
        # browser.page_source
        time.sleep(3)
        browser.execute_script("var q=document.documentElement.scrollTop=10000")
        time.sleep(0.2)
        more_button = browser.find_element(By.CSS_SELECTOR, "a.zg-btn-white.zu-button-more")
        more_button.click()
        time.sleep(3)
        browser.execute_script("var q=document.documentElement.scrollTop=10000")
        time.sleep(0.2)
        more_button = browser.find_element(By.CSS_SELECTOR, "a.zg-btn-white.zu-button-more")
        more_button.click()
        time.sleep(3)
        browser.execute_script("var q=document.documentElement.scrollTop=10000")
        time.sleep(0.2)
        more_button = browser.find_element(By.CSS_SELECTOR, "a.zg-btn-white.zu-button-more")
        more_button.click()
        time.sleep(3)

            # print(browser.page_source)
        with open(f"other_files/label_htmls/{label_one_dict['zhihu_data_id']}_{label_one_dict['content']}.html","w",encoding="utf-8") as w_file:
            w_file.write(browser.page_source)

def extract_one_data(file_name: str):
    folder_path = 'other_files/label_htmls'
    file_dir = f"{folder_path}/{file_name}"
    html_text: str = ""
    parent_zhihu_id = file_name.split("_")[0]
    with open(file_dir,"r",encoding="utf-8") as r_file:
        html_text = r_file.read()
    soup = BeautifulSoup(html_text,features="html.parser")
    soup.find_all()
    # print(soup.prettify())
    soup_div: BeautifulSoup = soup.find("div", class_="zm-topic-cat-sub")

    # print(soup_div.prettify())
    item_list: list[BeautifulSoup] = soup_div.find_all("div",{"class","blk"})
    label_list = []

    for item in item_list:
        # print(item)
        a_tag = item.find("a")
        href_value = a_tag.get("href")
        zhihu_id = href_value.split("/")[-1]
        strong_tag = item.find("strong")
        content = strong_tag.string
        label_dict = dict()
        label_dict["content"] = content
        label_dict["zhihu_data_id"] = zhihu_id
        label_dict["parent_zhihu_id"] = parent_zhihu_id
        label_list.append(label_dict)
    # print(label_list)
    return label_list



def extract_datas():
    folder_path = 'other_files/label_htmls'
    file_list = os.listdir(folder_path)
    label_list = []
    for file_name in file_list:
        print(file_name)
        label_list.extend( extract_one_data(file_name) )
    print(label_list)
    return label_list

def get_label_parent_dict():
    conn, cursor = mysql_connect.get_conn()
    parent_label_dict = dict()
    try:
        sql = "select id,content,zhihu_data_id from shane_label"
        cursor.execute(sql)
        result = cursor.fetchall()
        # parent_label_list = [ {label["zhihu_data_id"]: label["id"]} for label in result]
        for label in result:
            parent_label_dict[label[2]] = label[0]
        print(parent_label_dict)
    except:
        traceback.print_exc()
    finally:
        cursor.close()
        conn.close()
    return parent_label_dict

def save_in_mysql():
    label_list = extract_datas()
    parent_label_dict = get_label_parent_dict()
    print(parent_label_dict)
    insert_list = [ (  label_dict["content"], parent_label_dict[ label_dict["parent_zhihu_id"] ], label_dict["zhihu_data_id"]) for label_dict in label_list ]

    print(insert_list)
    conn,cursor = mysql_connect.get_conn()
    try:
        print("开始插入")
        sql = f"""
            
            INSERT INTO shane_label ( id, content, `level`, parentId, zhihu_data_id )
            VALUES
	        ( FLOOR( RAND() * ( 9223372036854775807 - 1 ) + 1 ), %s, 2, %s, %s );
        """
        cursor.executemany(sql,insert_list)
        # for label_dict in label_list:
        #     # print(label_dict["parent_zhihu_id"])
        #     sql = f"""
        #         SET @parentId = ( SELECT id FROM shane_label WHERE zhihu_data_id = '{label_dict["parent_zhihu_id"]}' );
        #         INSERT INTO shane_label ( id, content, `level`, parentId, zhihu_data_id )
        #         VALUES
        #         ( FLOOR( RAND() * ( 9223372036854775807 - 1 ) + 1 ), '{label_dict["content"]}', 2, @parentId, '{label_dict["zhihu_data_id"]}' );
        #     """
        #     cursor.execute(sql)

        affected_rows = cursor.rowcount
        print("受影响的行数：", affected_rows)
        conn.commit()
    except:
        traceback.print_exc()
    finally:
        cursor.close()
        conn.close()


if __name__ == "__main__" :
    # get_data()
    # get_label_ones()
    # extract_datas()
    save_in_mysql()