import json
import time
from concurrent.futures.thread import ThreadPoolExecutor
import pymysql
import requests
from lxml import etree
import DBConfig
from DBConfig import *


class ZhihuAnt:
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.146 Safari/537.36',
        'cookie': '_zap=1c849c0c-39c4-4dcc-a05c-2334b3ebd5ab; d_c0="AGDWx4VKlxKPTq0PJ7no2Km9dxhQjA8drPw=|1612170619"; _xsrf=kBGPMrIZsloVZdf9HqSkfZwao2mkiVuT; tst=h; __snaker__id=SHnFdEXuXPeIQzhv; _9755xjdesxxd_=32; q_c1=40a02b16f4b2480b832eaa8b79cdbd32|1623310097000|1615212396000; YD00517437729195%3AWM_NI=XuhQpYUYAtARkrp6O%2BlH%2BCtKnh8EjEipz81gNVzdrH5QGMhjh162yU%2FkGlzM2apI4hHWsgt4fri%2Fkh0a96Dhn5gLetdztJ8YKWI4AMNgCVMSnYaq4t84Y8r%2BFeFtqYOkcVc%3D; YD00517437729195%3AWM_NIKE=9ca17ae2e6ffcda170e2e6ee92ec5ba69ff889b77d98968fa6c45b939f9babf17292eea1b9c140b8b699d7bc2af0fea7c3b92a9caba8b2f35397878f95c45aa9aec0d7ce21b098e5a4f741aeb8f8a6e94fabb0b6d9b46d8e8ab685f144f78f89aeeb62aceffad3fc5fedb5898df947f2bfc0aab7648c89bd88f740a6aea7d0ea64f4bff7d7c764aeeba9ade880a8a6bcabbb53898987d5c4478eb98db7d059ae87af92b780fcec97d9f12186bb8a95f4538db5adb7e237e2a3; YD00517437729195%3AWM_TID=jOwJzD%2F4trJABEUVRFc%2FnQi4WE94yG%2F5; gdxidpyhxdE=J7nGqyfGJUOMsBI8t76t%2FZW1sEJx10Cxi8OC62%5CCyONITaOvV82YwKxwGwfiLp5%2FIeN30yvjEUb7g9yoR0akZDKyYaw3YakCHxD%2F%2F8ujGCVPT4o7E%5CUgZUQXP4LMB9zyzbcbEB%5CCATsEyHU2LJNjGfJu%5CAuKABTeZSUsRCyLNWTvoDad%3A1624273450606; Hm_lvt_98beee57fd2ef70ccdd5ca52b9740c49=1624173572,1624271707,1624272255,1624272670; SESSIONID=hQxm5KVNb73KscqxiSN6hL4AeJI3CUxyfQd4VhWgrJj; JOID=VFkcC0JG5ZBfjz25CkjtAQuAEfsWKbzpM7Vx50Ur1asV_VHPelaDQDGHMrAK3-FtYEw8shfW-oxW2mO1nUTL-ow=; osd=VlEdA0lE7ZFXhD-xC0DmAwOBGfAUIb3hOLd55k0g16MU9VrNcleLSzOPM7gB3elsaEc-uhbe8Y5e22u-n0zK8oc=; captcha_session_v2="2|1:0|10:1624272683|18:captcha_session_v2|88:cngyY3VQWktSVkdZd2Q0WUZyaUpMU2IyeVdyV0o0Ykt0ZHdDZ3AvVWl2YStXazdxekpLQ3FKWWo4c1l6cEVGUQ==|fda3386f16ee3d24bd8496d7877f4b5640ba085e63373635a16db2ac4abc937c"; captcha_ticket_v2="2|1:0|10:1624272687|17:captcha_ticket_v2|704:eyJ2YWxpZGF0ZSI6IkNOMzFfTzk2Z2ZpckpTQ3lwenZqaS5obkxMQnphSVMyWHVzZk5wS3hYWVJSOEwtSmp2dzlZdEtNRW9MR3BlRm5SN1FDSUg1WlEydUVIeV9sWGJaUzJud0FZSUNBMFlJSkdEdklmNm80MHdfSmpGUlNfaDYuX29nb0NnaHRqdV8wbzlyRDFWUUY5b0tPWFZuNmVqSEVfcW9aaE5jbjh3Z2JhWDBlUXJfLXIyR1FqU2lFNVJfSDZXczFnRVF0T3FhVk1CUWZkUWQyN3poNHdXTUdvWGpnVHFTcVRyZjdoeGEyX0NmQ0VzZFRMZFV3cnNzOWlBckFOeHdYOWRjRWFncUxRQ3hwZ19wdnByMFNqN1N4RzhWc0NiN0toSjFOajBId1haZURsXzRzYmthelk5azhaQ29SU3JsclBMQmV0clEuMnpRcm9FZ0VRZS1pdHhPWFJKQmlxWXc2QXJYcWVCUGswX2dfYm5ScktxQ0o5YWJfdmJ4NTgtQTlMZDdVR2Z0VzBQYnRaeHpzY24wc0VUU3lHLjAtZFRkVC0xZ1pYMS5wUU9rX09YOWk0aWFhZkZrMVR2d2JIeDJzSEF5V0FvVEtPMFRyNVVPV3B0MTFMVzlWcnM1MWlIWUtfVDFIN01iMTI5NDFUYVctQlpRenl0Z2RWNWtJUk9TdHU1NlE2ejh6MyJ9|7fcef623cda4744cc8f4c05abc72f3931259cd9b48cbe481fd6488fb2b952890"; z_c0="2|1:0|10:1624272705|4:z_c0|92:Mi4xc2xiSURnQUFBQUFBWU5iSGhVcVhFaVlBQUFCZ0FsVk5RYjI5WVFBczV3M3ViS0lkZURPMkJhc0tGdXhHcFRNbHlB|fe8f5a7360cb539284381f934d84c9577992d747c64a326a561268a0ad926502"; unlock_ticket="AFCkh2u2Ig8mAAAAYAJVTUl20GDKFHjuo8Y50BKjzaBnRm3aaM6D-w=="; tshl=; KLBRSID=57358d62405ef24305120316801fd92a|1624273197|1624271706; Hm_lpvt_98beee57fd2ef70ccdd5ca52b9740c49=1624273198'
    }

    def __init__(self):
        # 建立数据库连接
        while True:
            try:
                print('数据库连接中')
                self.db = SQLHandler("127.0.0.1", 3306, "root", "123456", "zhihu")
                print('数据库连接成功')
                break
            except Exception as e:
                print(e)
                with open("../logs/db_init.log", "w+") as f:
                    f.write(str(e))
                print('等待重新连接中')

    def getQuestionId(self):
        sql_search = 'select question_id from basic_zhihu_answer'
        results = []
        try:
            results = self.db.fetch_all(sql_search)
        except Exception as e:
            print(e)
        question_list = []
        for index in results:
            question_id = index['question_id']
            # print(link_number)
            question_list.append(question_id)
        return question_list

    def getAnswerNum(self,questionId):
        sql_find = "SELECT COUNT(*) FROM `basic_zhihu_answer` where question_id = %s "
        res = self.db.fetch_one(sql_find, questionId)
        return int(res['COUNT(*)'])

    def isAnswerExist(self,commentId):
        sql_exist = "SELECT answer_id FROM `basic_zhihu_answer` where comment_id = %s "
        res = self.db.fetch_one(sql_exist,str(commentId))
        if res == None:
            return True
        else:
            return False

    def getAllLink(self):
        results = []
        sql_search = 'select hot_link from basic_zhihu_hot'
        results = self.db.fetch_all(sql_search, args=None)
        link_dict = {}
        question_list = []
        for index in results:
            total_link = index["hot_link"]
            link_number = total_link.split('/')[4]
            # print(link_number)
            question_list.append(link_number)
            link_dict[link_number] = "true"
        return link_dict, question_list

    def GetAnswers(self, question_id):
        offset = 0
        while True:
            url = 'https://www.zhihu.com/api/v4/questions/{0}/answers' \
                  '?include=data%5B%2A%5D.is_normal%2Cadmin_closed_comment%2Creward_info%2Cis_collapsed%2Cannotation_action%' \
                  '2Cannotation_detail%2Ccollapse_reason%2Cis_sticky%2Ccollapsed_by%2Csuggest_edit%2Ccomment_count%' \
                  '2Ccan_comment%2Ccontent%2Ceditable_content%2Cattachment%2Cvoteup_count%2Creshipment_settings%' \
                  '2Ccomment_permission%2Ccreated_time%2Cupdated_time%2Creview_info%2Crelevant_info%2Cquestion%2Cexcerpt%' \
                  '2Cis_labeled%2Cpaid_info%2Cpaid_info_content%2Crelationship.is_authorized%2Cis_author%2Cvoting%2Cis_thanked%' \
                  '2Cis_nothelp%2Cis_recognized%3Bdata%5B%2A%5D.mark_infos%5B%2A%5D.url%3Bdata%5B%2A%5D.author.follower_count%' \
                  '2Cbadge%5B%2A%5D.topics%3Bdata%5B%2A%5D.settings.table_of_content.enabled&limit=5&offset={1}&platform=desktop&' \
                  'sort_by=default'.format(question_id, offset)

            state = 1
            while state:
                try:
                    res = requests.get(url, headers=self.headers, timeout=(3, 7))
                    state = 0
                except Exception as e:
                    print(e)
                    break

            res.encoding = 'utf-8'
            try:
                jsonAnswer = json.loads(res.text)
                datas = jsonAnswer['data']
            except Exception as e:
                print("404")
                break
            # print(jsonAnswer)
            # is_end = jsonAnswer['paging']['is_end']

            for data in datas:
                # l = list()
                # l.append(answer_id)
                # l.append(answer_time)
                # l.append(author_name)
                # l.append(answer_content)
                # print(l)
                try:
                    answer_id = str(data['id'])
                    answer_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(data['created_time']))
                    author_name = data['author']['name']
                    answer_content = ''.join(etree.HTML(data['content']).xpath('//p//text()'))
                    if len(answer_content) > 20000:
                        answer_content = answer_content[0:20000] + '...'
                except Exception as e:
                    with open("wrong.txt", "a") as f:
                        f.write(str(e))
                isExist = self.isAnswerExist(commentId=answer_id)
                if isExist == False:
                    sql = "INSERT INTO basic_zhihu_answer(question_id, comment_id, author_name, answer_content, answer_time) VALUES " + \
                          "(%s, %s, %s, %s, %s)"
                    self.db.insert_one(sql, args=[question_id, answer_id, author_name, answer_content, answer_time])
                    print(answer_id + '回答已入库')
                else:
                    print(answer_id + '此回答已存在')
                # if data['admin_closed_comment'] == False and data['can_comment']['status'] and data['comment_count'] > 0:
                #     GetComments(answer_id)

            offset += 5
            print('获取到到第{0}页'.format(int(offset / 5)))

            # if is_end:
            #     break
            if offset >= 50:
                break

            time.sleep(1)

    def getHotAnswer(self):
        _, question_ids = self.getAllLink()
        print(question_ids)
        for question_id in question_ids:
            self.GetAnswers(question_id=question_id)

    def getHotList(self, url):
        # hot_link, hot_detail, hot_title, img_url
        while True:
            try:
                page_text = requests.get(url=url, headers=self.headers).text
                print('网络连接成功')
                break
            except Exception as e:
                print(e)
                with open("../logs/network.log", "w+") as f:
                    f.write(str(e))
                time.sleep(5)
                print('网络连接失败，请检查网络设置')

        # if not os.path.exists('./ZhiHuPic'):
        #     os.mkdir('./ZhiHuPic')

        tree = etree.HTML(page_text)
        hot_type = None
        hot_map = ['全站', '科学', '数码', '体育', '时尚', '影视', '汽车']
        key = url.split('/')[3]
        all_index = 0
        print(key)
        if key == "hot":
            all_index = 50
            hot_type = hot_map[0]
        elif key == "hot?list=science":
            all_index = 15
            hot_type = hot_map[1]
        elif key == "hot?list=digital":
            all_index = 15
            hot_type = hot_map[2]
        elif key == "hot?list=sport":
            all_index = 15
            hot_type = hot_map[3]
        elif key == "hot?list=fashion":
            all_index = 15
            hot_type = hot_map[4]
        elif key == "hot?list=film":
            all_index = 15
            hot_type = hot_map[5]
        elif key == "hot?list=car":
            all_index = 15
            hot_type = hot_map[6]

        hot_list = tree.xpath('//section[@class="HotItem"]')
        exist_link_dict, _ = self.getAllLink()
        for hot_msg in hot_list[0:all_index]:
            # 数据初始化
            # 热搜连接
            hot_link = None
            hot_detail = None
            img_url = None
            hot_link = hot_msg.xpath('./div[@class="HotItem-content"]/a/@href')[0]
            # 热搜标题
            hot_title = hot_msg.xpath('./div[@class="HotItem-content"]/a/@title')[0]
            # hot_titles.append(hot_title)
            # print(hot_title)
            # 热搜详情
            # print(hot_title,hot_type)
            try:
                hot_detail = hot_msg.xpath('./div[@class="HotItem-content"]/a/p/text()')[0]
            except Exception as e:
                """TODO"""
            # 热搜图片
            try:
                img_url = hot_msg.xpath('./a[@class="HotItem-img"]/img/@src')[0]
                # 请求图片进行持久化存储
                # print(hot_title)
                # img_data = requests.get(url=img_url,headers=headers).content
                # img_name = img_url
                # img_path = 'ZhiHuPic/' + hot_title[0:2] + '.jpg'
                # with open(img_path,'wb') as fp:
                #     fp.write(img_data)
                #     print(img_name,'下载成功')
            except:
                """TODO"""
            # print(hot_link)
            tmp_link_number = str(hot_link).split('/')[4]
            print(tmp_link_number)
            res = exist_link_dict.get(tmp_link_number, "false")
            print(res)
            if res == "false":
                sql = "INSERT INTO basic_zhihu_hot(hot_title, question_id, hot_detail, hot_link, hot_img_url, hot_type, gmt_create) VALUES " + \
                      "(%s, %s, %s, %s, %s, %s,%s)"
                try:
                    self.db.insert_one(sql, args=[hot_title, tmp_link_number, hot_detail, hot_link, img_url, hot_type,
                                                  time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())])
                    print(hot_title + '已入库')
                except Exception as e:
                    print(e)
            else:
                print(hot_title + '已存在')
            print(tmp_link_number)
            answer_num = self.getAnswerNum(questionId=tmp_link_number)
            print('an_num = ' + str(answer_num))
            if answer_num == 50:
                print("此问题的回答已经入库了")
            else:
                self.GetAnswers(question_id=tmp_link_number)
            # question_list = self.getQuestionId()
            # if tmp_link_number in question_list:
            #     print("此问题的回答已经入库了")
            # elif res == "false":
            #     self.GetAnswers(question_id=tmp_link_number)
    """
    更新数据的question_id 仅供测试使用
    def UpdateHot(self):
        _, question_ids = self.getAllLink()
        print(question_ids)
        for question_id in question_ids:
            sql = "update basic_zhihu_hot set question_id = %s where hot_link = %s "
            val = (question_id,"https://www.zhihu.com/question/" + question_id)
            self.cursor.execute(sql,val)
            self.db.commit()
            print("更新成功")
    """


if __name__ == '__main__':
    s = time.time()
    zhihuAnt = ZhihuAnt()
    # zhihuAnt.getAllLink()
    url_map = ["https://www.zhihu.com/hot", "https://www.zhihu.com/hot?list=science",
               "https://www.zhihu.com/hot?list=digital", "https://www.zhihu.com/hot?list=sport",
               "https://www.zhihu.com/hot?list=fashion", "https://www.zhihu.com/hot?list=film",
               "https://www.zhihu.com/hot?list=car"]
    pool = ThreadPoolExecutor(max_workers=8)
    for url in url_map:
        pool.submit(zhihuAnt.getHotList(url=url))
    # result = zhihuAnt.getQuestionId()
    # pool.submit()
