import json
import time
from concurrent.futures.thread import ThreadPoolExecutor
import pymysql
import requests
from lxml import etree
import DBConfig
from DBConfig import *
class ZhihuAnt:
    headers = {
        'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/88.0.4324.146 Safari/537.36',
        'cookie': '_zap=1c849c0c-39c4-4dcc-a05c-2334b3ebd5ab; d_c0="AGDWx4VKlxKPTq0PJ7no2Km9dxhQjA8drPw=|1612170619"; _xsrf=kBGPMrIZsloVZdf9HqSkfZwao2mkiVuT; q_c1=40a02b16f4b2480b832eaa8b79cdbd32|1615212396000|1615212396000; tst=h; __snaker__id=SHnFdEXuXPeIQzhv; gdxidpyhxdE=Yb4fbsac8xjSmhCcyrfAOEpn%2FrxuKfOX7d2IT5l%5CAivYU1wbXXKznKlgXWXp1KxrfbdB%5CKV%2FIJfAzcfariAPG4vr0AuM%2BRisfgUJnM%2BTbZgK7TzJRk77xdEdrWsL0oku4NQp5a7DWBWaS1RQ%5CK9KZOhC793sd47EXmr7l%5CR3AcafdDJl%3A1621522401807; _9755xjdesxxd_=32; YD00517437729195%3AWM_NI=bZsp6ySwtddk8%2Fe3FuR9a%2BWySQ%2FGfkNCi%2B77T7glRXGPtCdMLPJeeEgX99gaA1N%2F6AhAouQsSad5fZQT4%2BpqTlyEhCdlBdidfXBH0J5XAJt8EhHfzCdlaVFw1SQJvvjORmE%3D; YD00517437729195%3AWM_NIKE=9ca17ae2e6ffcda170e2e6ee89dc4ff28788a5f76f89968ba2c85f978b8aaeb661ad89adb9b23981bcfc98d02af0fea7c3b92aa286bd91b57391a9bc85e761fcf1abd4c57f92e7afccd673918fe1a8e5618e9da7ccb63d98e9a98dd1548590bca4e74790908686e84fa588f7dae66b83ef82d0b54bac9fa196d47991bfacd6e552938cfcccf37b98b4b7a6eb49a8b48e85ec70a6888ea9b342f7888fabf73487b88887f949a8b4afb7c57df38d9fabf57eaa919e8ec837e2a3; YD00517437729195%3AWM_TID=hErpeQ6%2BqtRBREVAFQNrgtWcTymTJ90Z; captcha_session_v2="2|1:0|10:1621521765|18:captcha_session_v2|88:VUhsQVN5L1YzMzJoQTRTY0ZxVU04czVJeUJvTTkyUU5YV0luRHlTUzg5a0dWWHRSZkV5ZlVMVGh3eGJxT2M0bw==|2237ce8c146e6a2e988f6c6c264432ae865f767032be3ac398904d64d0c4400f"; captcha_ticket_v2="2|1:0|10:1621521783|17:captcha_ticket_v2|704:eyJ2YWxpZGF0ZSI6IkNOMzFfTU5NS3dJLkhHVDVQekJPYy1uTVFKMmlHcXR1WGZ6RnVtN0g0STJLSmdUbkFPSDl6aS1IMEMudENSaFNUSEYyYnZOWUVBSWJwX0FzSEpmWjltY0RtanUtYi5TNkhkcnViSmZFT3BHX1BiVXhIWHZYRWFMZXhwMF9nanloS3VZVFlRdU9kV05SWDY0UDhuSi1WQldhbGI3SHBYR0hqSGEyMDFObnhtSldsUzhBTmIxNmZvdUl4NkJCOGpXWHgwVkVZbzltcHNQQXVaLkpSdDBOZVVnNllMZnAtNEFiaEhUUk11ZHBkVmUwQUdxaU1LeGs4Zlp4eklKRTZOMDh0OXR1dVZrYUZxeUFEdGF1TFpXcVlmWWFKNjdsZUNYQjRXU1VPUmlwU2NueDVWVGFXWnNaa1phbmNNbkcuYnYyeUVUTnBSeGQ0VDVhUnlGSTZpRlNlRjFiRy5ZbVZPVXBVdHc0ZHhDYy1yZWFoLkdyUlNWTjZUeVFjMFhqLlROaGlIcmllU2lxS3R1NnJaalNOTnBrLi1tT0hpMDdPRm1GX0VoT0ZlSTkxalFMNTdvQW9hLm9kUHlKNkgtZ3otSW1CaHA2U3JUR2pWclpwZjQxaFQxSng2NG95MXNxT2lmWDQ4ZVhhWEN1TDdvQlNReTluc3Z1dzVnN2FHNDBpUGRBMyJ9|1826ba7bc11b1271cace7e231a793be58ac00378617d057d1aa6d808fd40ddc3"; z_c0="2|1:0|10:1621521793|4:z_c0|92:Mi4xc2xiSURnQUFBQUFBWU5iSGhVcVhFaVlBQUFCZ0FsVk5nY09UWVFEclM3VzlNWXNNZ3ozNEs3OEQycmUyQmNBUk53|6134bc7b208f364d49091013a5c95f1f2d54b5fdaef9b665603edab2d6d13f3a"; unlock_ticket="AFCkh2u2Ig8mAAAAYAJVTYl8pmBI-_O1CJkESc7fcZ2MLz1Mem7uyw=="; Hm_lvt_98beee57fd2ef70ccdd5ca52b9740c49=1621522047,1621522144,1621522150,1621522160; Hm_lpvt_98beee57fd2ef70ccdd5ca52b9740c49=1621522181; SESSIONID=u95JKOQL3nXmMihR2RJk5fMnVeAr5yql4yLbUl7BOKV; JOID=V10TC0xlTNdbOiXRNWgOzeq8YkQkEHbsHnNqg3dUIow4eE65ZGE2cDY4Jtw0ZS7Z5tRoSi4xCi9_q7lEnOje1Uw=; osd=WlgdA0NoSdlTNSjUO2ABwO-yakspFXjkEX5vjX9bL4k2cEG0YW8-fzs9KNQ7aCvX7ttlTyA5BSJ6pbFLke3Q3UM=; tshl=; KLBRSID=4843ceb2c0de43091e0ff7c22eadca8c|1621522193|1621521413'
    }
    def __init__(self):
        self.db = SQLHandler("127.0.0.1", 3306, "root", "123456", "zhihu")
        #建立数据库连接
        # self.db = pymysql.connect("127.0.0.1",
        #                           "root",
        #                           "123456",
        #                           "zhihu",
        #                           charset="utf8")
        # self.cursor = self.db.create_conn_cursor
    def getQuestionId(self):
        sql_search = 'select question_id from basic_zhihu_answer'
        try:
            results = self.db.fetch_all(sql_search)
        except Exception as e:
            print(e)
        question_list = list()
        for index in results:
            question_id = index['question_id']
            # print(link_number)
            question_list.append(question_id)
        return question_list

    def getAllLink(self):
        sql_search = 'select hot_link from basic_zhihu_hot'
        try:
            self.cursor.execute(sql_search,args = None)
        except Exception as e:
            print(e)
        results = self.cursor.fetchall()
        link_dict = {}
        question_list = list()
        for index in results:
            total_link = index[0]
            link_number = total_link.split('/')[4]
            # print(link_number)
            question_list.append(link_number)
            link_dict[link_number] = "true"
        return link_dict,question_list


    def GetAnswers(self,question_id):
        offset = 0
        while True:
            url = 'https://www.zhihu.com/api/v4/questions/{0}/answers' \
                  '?include=data%5B%2A%5D.is_normal%2Cadmin_closed_comment%2Creward_info%2Cis_collapsed%2Cannotation_action%' \
                  '2Cannotation_detail%2Ccollapse_reason%2Cis_sticky%2Ccollapsed_by%2Csuggest_edit%2Ccomment_count%' \
                  '2Ccan_comment%2Ccontent%2Ceditable_content%2Cattachment%2Cvoteup_count%2Creshipment_settings%' \
                  '2Ccomment_permission%2Ccreated_time%2Cupdated_time%2Creview_info%2Crelevant_info%2Cquestion%2Cexcerpt%' \
                  '2Cis_labeled%2Cpaid_info%2Cpaid_info_content%2Crelationship.is_authorized%2Cis_author%2Cvoting%2Cis_thanked%' \
                  '2Cis_nothelp%2Cis_recognized%3Bdata%5B%2A%5D.mark_infos%5B%2A%5D.url%3Bdata%5B%2A%5D.author.follower_count%' \
                  '2Cbadge%5B%2A%5D.topics%3Bdata%5B%2A%5D.settings.table_of_content.enabled&limit=5&offset={1}&platform=desktop&' \
                  'sort_by=default'.format(question_id, offset)

            state = 1
            while state:
                try:
                    res = requests.get(url, headers=self.headers, timeout=(3, 7))
                    state = 0
                except Exception as e:
                    print(e)
                    break

            res.encoding = 'utf-8'
            try:
                jsonAnswer = json.loads(res.text)
                datas = jsonAnswer['data']
            except Exception as e:
                print("404")
                break
            # print(jsonAnswer)
            # is_end = jsonAnswer['paging']['is_end']

            for data in datas:
                # l = list()
                # l.append(answer_id)
                # l.append(answer_time)
                # l.append(author_name)
                # l.append(answer_content)
                # print(l)
                try:
                    answer_id = str(data['id'])
                    answer_time = time.strftime("%Y-%m-%d %H:%M:%S", time.localtime(data['created_time']))
                    author_name = data['author']['name']
                    answer_content = ''.join(etree.HTML(data['content']).xpath('//p//text()'))
                    if len(answer_content) > 20000:
                        answer_content = answer_content[0:20000] + '...'
                except Exception as e:
                    with open("wrong.txt", "a") as f:
                        f.write(str(e))
                sql = "INSERT INTO basic_zhihu_answer(question_id, comment_id, author_name, answer_content, answer_time) VALUES " + \
                      "(%s, %s, %s, %s, %s)"
                try:
                    self.cursor.execute(sql, [question_id, answer_id, author_name, answer_content, answer_time])
                    print(answer_id + '回答已入库')
                    self.db.commit()
                except Exception as e:
                    print(e)
                    self.db.rollback()  # 发生错误时回滚
                    continue
                # if data['admin_closed_comment'] == False and data['can_comment']['status'] and data['comment_count'] > 0:
                #     GetComments(answer_id)

            offset += 5
            print('获取到到第{0}页'.format(int(offset / 5)))

            # if is_end:
            #     break
            if offset >= 50:
                break

            time.sleep(1)
    def getHotAnswer(self):
        _,question_ids = self.getAllLink()
        print(question_ids)
        for question_id in question_ids:
            self.GetAnswers(question_id=question_id)
    def getHotList(self,url):
        # hot_link, hot_detail, hot_title, img_url
        page_text = requests.get(url=url,headers=self.headers).text
        # if not os.path.exists('./ZhiHuPic'):
        #     os.mkdir('./ZhiHuPic')

        tree = etree.HTML(page_text)
        hot_type = None
        hot_map = ['全站', '科学', '数码', '体育', '时尚', '影视', '汽车']
        key = url.split('/')[3]
        all_index = 0
        print(key)
        if key == "hot":
            all_index = 50
            hot_type = hot_map[0]
        elif key == "hot?list=science":
            all_index = 15
            hot_type = hot_map[1]
        elif key == "hot?list=digital":
            all_index = 15
            hot_type = hot_map[2]
        elif key == "hot?list=sport":
            all_index = 15
            hot_type = hot_map[3]
        elif key == "hot?list=fashion":
            all_index = 15
            hot_type = hot_map[4]
        elif key == "hot?list=film":
            all_index = 15
            hot_type = hot_map[5]
        elif key == "hot?list=car":
            all_index = 15
            hot_type = hot_map[6]

        hot_list = tree.xpath('//section[@class="HotItem"]')
        exist_link_dict,_ = self.getAllLink()
        for hot_msg in hot_list[0:all_index]:
            #数据初始化
            #热搜连接
            hot_link = None
            hot_detail = None
            img_url = None
            hot_link = hot_msg.xpath('./div[@class="HotItem-content"]/a/@href')[0]
            #热搜标题
            hot_title = hot_msg.xpath('./div[@class="HotItem-content"]/a/@title')[0]
            # hot_titles.append(hot_title)
            # print(hot_title)
            #热搜详情
            # print(hot_title,hot_type)
            try:
                hot_detail = hot_msg.xpath('./div[@class="HotItem-content"]/a/p/text()')[0]
            except Exception as e:
                """TODO"""
            #热搜图片
            """
            try:
                img_url = hot_msg.xpath('./a[@class="HotItem-img"]/img/@src')[0]
                请求图片进行持久化存储
                print(hot_title)
                img_data = requests.get(url=img_url,headers=headers).content
                img_name = img_url
                img_path = 'ZhiHuPic/' + hot_title[0:2] + '.jpg'
                with open(img_path,'wb') as fp:
                    fp.write(img_data)
                    print(img_name,'下载成功')
            except:
                TODO
            """
            # print(hot_link)
            tmp_link_number = str(hot_link).split('/')[4]
            print(tmp_link_number)
            res = exist_link_dict.get(tmp_link_number,"false")
            print(res)
            if res == "false":
                sql = "INSERT INTO basic_zhihu_hot(hot_title, question_id, hot_detail, hot_link, hot_img_url, hot_type, gmt_create) VALUES " + \
                      "(%s, %s, %s, %s, %s, %s,%s)"
                try:
                    self.cursor.execute(sql, [hot_title, tmp_link_number, hot_detail, hot_link, img_url, hot_type,
                                              time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())])
                    print(hot_title + '已入库')
                    self.db.commit()
                except Exception as e:
                    print(e)
                    self.db.rollback()  # 发生错误时回滚
            else:
                print(hot_title + '已存在')
            question_list = self.getQuestionId()
            if tmp_link_number in question_list:
                print("此问题的回答已经入库了")
            elif res == "false":
                self.GetAnswers(question_id=tmp_link_number)

    """
    更新数据的question_id 仅供测试使用
    def UpdateHot(self):
        _, question_ids = self.getAllLink()
        print(question_ids)
        for question_id in question_ids:
            sql = "update basic_zhihu_hot set question_id = %s where hot_link = %s "
            val = (question_id,"https://www.zhihu.com/question/" + question_id)
            self.cursor.execute(sql,val)
            self.db.commit()
            print("更新成功")
    """
if __name__ == '__main__':
    s = time.time()
    zhihuAnt = ZhihuAnt()
    # zhihquestion_listuAnt.getAllLink()
    url_map = ["https://www.zhihu.com/hot", "https://www.zhihu.com/hot?list=science",
               "https://www.zhihu.com/hot?list=digital", "https://www.zhihu.com/hot?list=sport",
               "https://www.zhihu.com/hot?list=fashion", "https://www.zhihu.com/hot?list=film",
               "https://www.zhihu.com/hot?list=car"]
    pool = ThreadPoolExecutor(max_workers=8)
    for url in url_map:
        pool.submit(zhihuAnt.getHotList(url=url))
    # pool.submit(zhihuAnt.UpdateHot())
    # 关闭指针对象
    zhihuAnt.cursor.close()
    # 关闭连接对象
    zhihuAnt.db.close()