import random
import time
import math
import requests
import pymongo
from bson.objectid import ObjectId
from bs4 import BeautifulSoup
from fake_useragent import UserAgent

headers = {
    "accept": "*/*",
    "user-agent": "Mozilla/5.0 (Linux; Android 6.0; Nexus 5 Build/MRA58N) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/102.0.0.0 Mobile Safari/537.36",
    "referer": "https://www.zhihu.com/question/50327690/answers/updated",
    'Connection': 'close',
    "cookie": "_xsrf=Sr9Ok9Tysg4KdhXQPVg1ZlHrJrrX3gr8; _zap=a4d291dd-f27c-49d5-9b01-685d3241f4e1; d_c0=AHCWwcLv8BaPTly2I7eLvG6PIkrZzXIZb38=|1686906101; YD00517437729195%3AWM_TID=1%2FbPQk68OSVAVRQABFLBhPAb0XhmRUJJ; __snaker__id=GS7levFKgU619Qtu; l_cap_id=\"NTk1MTNmNDExNmRlNDFjNDkyMTJiYzVjYTA1MDgxNWE=|1688715217|d0fec3773bba1380a4455fe619c7c36ee0909104\"; r_cap_id=\"ZDNkNjRlOGIzNzM4NDA4Yzg3MGFiNWRjODZiMmRjNmY=|1688715217|a2f647f85768cc1b72fef83b831f621f1ab1f5c8\"; cap_id=\"MzJmOWRjZDc1ZTZiNGYwMjgwMjI3MjIxZjkxYTY5YjE=|1688715217|8dce32b4cafb49fe0aa93778ba91a6f9298d4cdf\"; Hm_lvt_98beee57fd2ef70ccdd5ca52b9740c49=1688345903,1688950760,1689060802,1689580605; YD00517437729195%3AWM_NI=tlUooZndzRm5CFt8foBUSacFxF0EUo5w9OOsgQA5W%2FFTmxWTS%2BJXyM4LtOwDr3jeRYjX5XXIRbJqA3WxomGBnTKWZhy1r3kSrpxqafjxK8%2FPkRH8%2B3%2Ftxe34gJQI1%2BdAWWI%3D; YD00517437729195%3AWM_NIKE=9ca17ae2e6ffcda170e2e6eed7c874e9b29bdad74792928fb2d85f868e8facc46dbaa8ad94ee7d9cac97b9ae2af0fea7c3b92ab8b49b90ee52aebcb6b3f03f819abcd9e565828b9d82e24ebc9a00a6d865f7a9f7b0ce79a58f8fd6d625868ca399d66091f586b9fc47f894bcb0f650b2eda686d153aa88fbb9db4392be8692c43995a8a18ec64aae8c85b8d964bbefbed4ea59b2b0b892e46eaaa885d9ae5ca98e88d9aa39edacb886b240a5969faab421a89f99b8cc37e2a3; gdxidpyhxdE=KwZEL29jShDCf6LGi%5CakRtbVqkxRbBXuC%5C%2FNKns%2BrzPYvIrQsV3lAjg2oJwI4Z%2FWkLKbM62higkNkZ0i4YC%5CzhRSqfxOjYr6erALiz%2BEKZ6GbeZ2mgRs1K1ZXm49O5mdm%5CN4nt7QUqfB9E1R7yp5cm%2B7yZdCaomGXndCK%5CUPr9ZnSRxB%3A1689642573062; Hm_lpvt_98beee57fd2ef70ccdd5ca52b9740c49=1689642458; captcha_session_v2=2|1:0|10:1689642460|18:captcha_session_v2|88:THpmc2d1VnRuM0Z6L1lCNmNKNk5JZ051UTY3dHNBb1I2ZGthZjNPTGZTZjByR1Y2MVBvd2tzc0hMZ280cUN3MQ==|f9b97c39f44f847f07f922f0e8c7ad5192171eaa04987b801b68ecbe08d2aca5; KLBRSID=af132c66e9ed2b57686ff5c489976b91|1689642470|1689641669"
}

mongo_client = pymongo.MongoClient("mongodb://localhost:27017/")
db = mongo_client['surge']
article = db['cookie']

directory_db = db['directory']
directory_obj = directory_db.find_one({'_id': ObjectId('64b5045e40a6a21d002765ca'), 'type': 'question'})

source_db = db['source']
source_obj = source_db.find_one({'text': '知乎'})

type_db = db['type']
type_obj = type_db.find_one({'type': 'answer'})

# r_filter = RedisFilter(redis_key='question/341298685')


def get_data(url):
    headers['user-agent'] = UserAgent().random
    res = requests.get(url, headers=headers).json()

    if res['data']:
        answers_list = []

        for answers in res['data']:
            target = answers['target']

            try:
                content_text = BeautifulSoup(target['content'], 'html.parser').text
            except Exception as e:
                print("https://www.zhihu.com/question/{}/answer/{}".format(target['question']['id'], target['id']), e)
                continue

            question_link = "https://www.zhihu.com/question/{}".format(target['question']['id'])
            answers_link = "https://www.zhihu.com/question/{}/answer/{}".format(target['question']['id'], target['id'])
            try:
                # 去重
                # if not r_filter.is_exists(link):
                answers_list.append({
                    "reprint_time": math.floor(time.time() * 1000),
                    "type_id": type_obj['_id'],
                    "type": type_obj['type'],
                    "source": source_obj['text'],
                    "source_id": source_obj['_id'],
                    "question": {
                        "title_id": directory_obj['_id'],
                        "title": target['question']['title'],
                        "created_time": target['question']['created'] * 1000,
                        "updated_time": target['question']['updated_time'] * 1000,
                        "type": target['question']['type'],
                        "url": question_link
                    },
                    "answer": {
                        "type": "answer",
                        "url": answers_link,
                        "eye": None,
                        "comment": target['comment_count'],
                        "like": target['voteup_count'],
                        "created_time": target['created_time'] * 1000,
                        "updated_time": target['updated_time'] * 1000,
                        "content_html": target['content'],
                        "content_text": content_text,
                    },
                    "author": {
                        "name": target['author']['name'],
                        "headline": target['author']['headline'],
                        "avatar": target['author']['avatar_url_template'],
                        "gender": target['author']['gender'],
                        "user_type": target['author']['type'],
                        "url_token": target['author']['url_token'],
                        "url": "https://www.zhihu.com/people/{}".format(target['author']['url_token'])
                    }
                })
                # r_filter.save(link)
            # else:
            #     print('已经存在：', url, link)
            except AttributeError as e:
                print(e)
                continue

        if answers_list:
            article.insert_many(answers_list)

        return res['paging']['next']
    else:
        print(res)
        return None


if __name__ == '__main__':
    url = 'https://www.zhihu.com/api/v4/questions/50327690/feeds?include=data%5B*%5D.is_normal%2Cadmin_closed_comment%2Creward_info%2Cis_collapsed%2Cannotation_action%2Cannotation_detail%2Ccollapse_reason%2Cis_sticky%2Ccollapsed_by%2Csuggest_edit%2Ccomment_count%2Ccan_comment%2Ccontent%2Ceditable_content%2Cattachment%2Cvoteup_count%2Creshipment_settings%2Ccomment_permission%2Ccreated_time%2Cupdated_time%2Creview_info%2Crelevant_info%2Cquestion%2Cexcerpt%2Cis_labeled%2Cpaid_info%2Cpaid_info_content%2Creaction_instruction%2Crelationship.is_authorized%2Cis_author%2Cvoting%2Cis_thanked%2Cis_nothelp%3Bdata%5B*%5D.mark_infos%5B*%5D.url%3Bdata%5B*%5D.author.follower_count%2Cvip_info%2Cbadge%5B*%5D.topics%3Bdata%5B*%5D.settings.table_of_content.enabled&offset=0&limit=5&order=updated'

    while url:
        url = get_data(url)
        time.sleep(random.randint(1, 2))
