# coding=utf-8
import json
import random
import time
import requests
from lxml import etree

from zhihu_mongo import *
from zhihu_tables import *


class zhihuSpiderUser(object):
    def __init__(self):
        self.follower_url = 'https://www.zhihu.com/people/{}/followers?page={}'
        self.following_url = 'https://www.zhihu.com/people/{}/following?page={}'
        self.follower_count = 1
        self.following_count = 1

        self.headers = {
            'User-Agent': 'Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.132 Safari/537.36',
            'cookie': '_xsrf=h9ThOxRUo4Iv8jMpx4V1xj3ByJO8TPM7; d_c0="AGDtW2fLrg-PTmDrqKlU_XiwMzyOZc0bfGU=|1562208301"; __utmv=51854390.000--|3=entry_date=20190704=1; _zap=afe7cc4a-2866-4b7d-9223-152f84420b86; __utma=51854390.1816627932.1562208348.1562208348.1562208388.2; __utmz=51854390.1562208388.2.2.utmcsr=baidu|utmccn=(organic)|utmcmd=organic; Hm_lvt_98beee57fd2ef70ccdd5ca52b9740c49=1571126897; l_n_c=1; r_cap_id="Y2M2ODMwMWIyZGZkNDQwMzg2ZTI2YmRhYmQ2MzE4NTA=|1571126731|a1c9fc2c0a2a3dc3bf4201585051feb562c22f52"; cap_id="MTBhODRiYjYxODA0NGIyNTk4NTk2MDhhM2E2NGZlYTQ=|1571126731|f9b676b6a072f8bf32af97d87293ddcf9149e348"; l_cap_id="OGNlMjkxMjEwYzM1NDQ2OGEzMmZjNWYzZTI4NGJlZmM=|1571126731|ea0f3ac5a5cb39a411b5c1263b213ed7806ccc7c"; n_c=1; tshl=; capsion_ticket="2|1:0|10:1571654837|14:capsion_ticket|44:YzY1MmI5NTRhM2RiNGY5ZTgzNGIyYzI3NjNlYzVhYWM=|cecc82454d77f9d5e9ff5e81151e3e5e186a028e79191365db05e509afd841d7"; z_c0="2|1:0|10:1571654883|4:z_c0|92:Mi4xMWswdERRQUFBQUFBWU8xYlo4dXVEeWNBQUFDRUFsVk40eG5WWFFEMTc2OGxreDlVTDFzNldrWi1BVmFyVHlCTUZn|9f736496f8e9fa04b4ae8ff5e71280510bb5b37d3e8a0021aa9a4baac8c73401"; tst=r; q_c1=97c2825ad87a45b1b3e57549374c9833|1571655639000|1562208296000; tgw_l7_route=18884ea8e9aef06cacc0556da5cb4bf1; Hm_lpvt_98beee57fd2ef70ccdd5ca52b9740c49=1572318778'
        }



    # 请求url数据
    def getHtml(self, dic_url, pageNum=1, follower=True):
        print('get_html dict ', dic_url)

        try:
            if follower:
                url = self.follower_url.format(dic_url['urltoken'], pageNum)
            else:
                url = self.following_url.format(dic_url['urltoken'], pageNum)

            print(url)
            # conn = requests.get(url=url, headers={'User-Agent':random.choice(user_agents)})
            conn = requests.get(url=url, headers=self.headers)
            conn.encoding = 'utf-8'
            if conn.status_code == 200:
                return True, conn
            else:
                if follower:
                    dic_url['follower_index'] = pageNum
                    dic_url['following_index'] = 0
                else:
                    dic_url['follower_index'] = 0
                    dic_url['following_index'] = pageNum
                ERROR_FOLLOW_URL(dic_url)
                return False, 'error'
        except Exception as e:
            if follower:
                dic_url['follower_index'] = pageNum
                dic_url['following_index'] = 0
            else:
                dic_url['follower_index'] = 0
                dic_url['following_index'] = pageNum
            ERROR_FOLLOW_URL(dic_url)
            return False, 'error'


    def getUserData(self, html):
        html = etree.HTML(html)

        initialData = html.xpath('//script[@id="js-initialData"]/text()')
        if len(initialData) > 0:
            initialData = initialData[0].strip()
            return True, json.loads(initialData, encoding='utf-8')
        else:
            return False, 'error_get_data'

    def getPage(self, num):
        num = int(num)
        tmp = num / 20
        if (num % 20) > 0:
            tmp += 1
        return tmp


    def start(self):
        while True:
            # 等待爬取的url的数据
            while True:
                num = GET_FOLLOW_URL_COUNT()
                if num > 0:
                    break
                else:
                    time.sleep(3)
                    print('get_follow_url_count 0 time sleep 3')

            follow_url = GET_FOLLOW_URL()

            # uid  urltoken  name  follower  following
            self.follower_count = int(self.getPage(follow_url['follower']))
            self.following_count = int(self.getPage(follow_url['following']))

            # 被关注了
            for i in range(1, self.follower_count+1):
                status, conn = self.getHtml(follow_url, pageNum=i, follower=True)

                # status 判断请求是否成功
                if status == False:  # 获取url请求信息失败
                    continue

                key, initialData = self.getUserData(conn.text)
                if key == False:
                    print('ERROR_FOLLOW_URL get_data is [] 没有数据')
                    ERROR_FOLLOW_URL(follow_url)
                    continue

                initialState = initialData['initialState']
                users = initialState['entities']['users']

                print('users users ', users)

                if len(users) < 1:
                    break

                for k,v in users.items():
                    alu = allUrl()
                    alu.uid = v['id']
                    alu.name = v.get('name', 'error')
                    if alu.name == 'error':
                        continue
                    alu.urltoken = v['urlToken']
                    alu.url = v['url']
                    INSERT_ALL_URL(alu.__dict__)
                    print(alu.__dict__)
                    time.sleep(0.5)
                time.sleep(3)

            # 关注了
            for i in range(1, self.following_count + 1):
                status, conn = self.getHtml(follow_url, pageNum=i, follower=False)

                # status 判断请求是否成功
                if status == False:  # 获取url请求信息失败
                    continue

                key, initialData = self.getUserData(conn.text)
                if key == False:
                    print('ERROR_FOLLOW_URL get_data is [] 没有数据')
                    ERROR_FOLLOW_URL(follow_url)
                    continue

                initialState = initialData['initialState']
                users = initialState['entities']['users']

                if len(users) < 1:
                    break

                for k, v in users.items():
                    print(v)
                    alu = allUrl()
                    alu.uid = v['id']
                    alu.name = v.get('name', 'error')
                    if alu.name == 'error':
                        continue
                    alu.urltoken = v['urlToken']
                    alu.url = v['url']
                    INSERT_ALL_URL(alu.__dict__)
                    print(alu.__dict__)
                    time.sleep(0.5)

                time.sleep(3)



if __name__ == '__main__':
    zhip = zhihuSpiderUser()
    zhip.start()