# coding=utf-8
import json
import random
import time
import requests
from lxml import etree

from zhihu_mongo import *
from zhihu_tables import *


class zhihuSpiderUser(object):
    def __init__(self):
        self.base_url = 'https://www.zhihu.com/people/{}/activities'
        self.req_dic_url = ''
        self.req_follow_url = ''
        self.headers = {
            'User-Agent':'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36',
            'cookie':'_xsrf=h9ThOxRUo4Iv8jMpx4V1xj3ByJO8TPM7; d_c0="AGDtW2fLrg-PTmDrqKlU_XiwMzyOZc0bfGU=|1562208301"; __utmv=51854390.000--|3=entry_date=20190704=1; _zap=afe7cc4a-2866-4b7d-9223-152f84420b86; __utma=51854390.1816627932.1562208348.1562208348.1562208388.2; __utmz=51854390.1562208388.2.2.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; Hm_lvt_98beee57fd2ef70ccdd5ca52b9740c49=1571126897; l_n_c=1; r_cap_id="Y2M2ODMwMWIyZGZkNDQwMzg2ZTI2YmRhYmQ2MzE4NTA=|1571126731|a1c9fc2c0a2a3dc3bf4201585051feb562c22f52"; cap_id="MTBhODRiYjYxODA0NGIyNTk4NTk2MDhhM2E2NGZlYTQ=|1571126731|f9b676b6a072f8bf32af97d87293ddcf9149e348"; l_cap_id="OGNlMjkxMjEwYzM1NDQ2OGEzMmZjNWYzZTI4NGJlZmM=|1571126731|ea0f3ac5a5cb39a411b5c1263b213ed7806ccc7c"; n_c=1; tshl=; capsion_ticket="2|1:0|10:1571654837|14:capsion_ticket|44:YzY1MmI5NTRhM2RiNGY5ZTgzNGIyYzI3NjNlYzVhYWM=|cecc82454d77f9d5e9ff5e81151e3e5e186a028e79191365db05e509afd841d7"; z_c0="2|1:0|10:1571654883|4:z_c0|92:Mi4xMWswdERRQUFBQUFBWU8xYlo4dXVEeWNBQUFDRUFsVk40eG5WWFFEMTc2OGxreDlVTDFzNldrWi1BVmFyVHlCTUZn|9f736496f8e9fa04b4ae8ff5e71280510bb5b37d3e8a0021aa9a4baac8c73401"; tst=r; q_c1=97c2825ad87a45b1b3e57549374c9833|1571655639000|1562208296000; tgw_l7_route=18884ea8e9aef06cacc0556da5cb4bf1; Hm_lpvt_98beee57fd2ef70ccdd5ca52b9740c49=1572318778'
        }

    # 请求url数据
    def getHtml(self, dic_url):
        try:
            url = self.base_url.format(dic_url['urltoken'])
            # conn = requests.get(url=url, headers={'User-Agent':random.choice(user_agents)})
            conn = requests.get(url=url, headers=self.headers)

            conn.encoding = 'utf-8'
            if conn.status_code == 200:
                return True, conn
            else:
                ERROR_URL(dic_url)
                return False, 'error'
        except Exception as e:
            print('error requests.get ', str(e))
            ERROR_URL(dict)
            return False, 'error'

    def getUserData(self, html):
        html = etree.HTML(html)

        initialData = html.xpath('//script[@id="js-initialData"]/text()')
        if len(initialData) > 0:
            initialData = initialData[0].strip()
            return True, json.loads(initialData, encoding='utf-8')
        else:
            return False, 'error_get_data'


    def start(self):
        while True:
            # 等待爬取的url的数据
            while True:
                num = GET_STAY_URL_COUNT()
                if num > 0:
                    break
                else:
                    time.sleep(3)



            stay_url = GET_STAY_URL()
            status, conn = self.getHtml(stay_url)

            # status 判断请求是否成功
            if status == False:  # 获取url请求信息失败
                continue

            try:
                key, initialData = self.getUserData(conn.text)
                if key == False:
                    print('ERROR_URL get_data is [] 没有数据')
                    ERROR_URL(stay_url)
                    continue

                initialState = initialData['initialState']
                users = initialState['entities']['users']

                # 保存用户数据
                us = allUser()
                us.name, data = users.popitem()
                us.uid = data['id']
                us.url = data['url']
                us.urltoken = data['urlToken']
                us.data = data
                INSERT_USER_DATA(us.__dict__)
                print(us.__dict__)

                # 关注数据
                fu = followUrl()
                fu.uid = data['id']
                fu.urltoken = data['urlToken']
                fu.name = data['name']
                fu.follower = data['followerCount']
                fu.following = data['followingCount']
                INSERT_FOLLOW_URL(fu.__dict__)
                print(fu.__dict__)

            except Exception as e:
                print('error_url ', str(e))
                ERROR_URL(stay_url)

            time.sleep(3)



if __name__ == '__main__':
    zhip = zhihuSpiderUser()
    zhip.start()