from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.chrome.options import Options
from selenium.webdriver.common.by import By
from multiprocessing import Process
from multiprocessing import Pool
from pymongo import MongoClient
from selenium import webdriver
from bs4 import UnicodeDammit
from bs4 import BeautifulSoup
from urllib import request
from urllib import parse

import numpy as np
import requests
import platform
import datetime
import pymongo
import random
import hashlib
import math
import base64
import codecs
import json
import pprint
import time
import ssl
import sys
import os
import re

net = sys.argv[1] if len(sys.argv) == 2 else 'localhost'

sys.setrecursionlimit(1000000)

client = MongoClient(net, 27017, connect = False)

zhihu_user_db = client['zhihu_user_db']
zhihu_user_coll = zhihu_user_db['zhihu_user_coll']

if platform.system() == 'Linux':
    chrome_driver_path = os.getcwd()[:os.getcwd().rfind('/') + 1] + 'chromedriver_linux'
else:
    chrome_driver_path = os.getcwd()[:os.getcwd().rfind('/') + 1] + 'chromedriver'


def chrome_driver():
    proxy = ''

    options = webdriver.ChromeOptions()
    # options.add_argument('--headless')
    # options.add_argument('--disable-gpu')
    # options.add_argument('--disable-images')
    # options.add_argument('start-maximized')
    # options.add_argument('disable-infobars')
    # options.add_argument('--disable-extensions')
    # options.add_argument('--disable-dev-shm-usage')
    options.add_argument('--no-sandbox')

    if platform.system() == 'Linux':
        options.binary_location = '/snap/bin/chromium'

    desired_capabilities = options.to_capabilities()

    if proxy:
        desired_capabilities['proxy'] = {
            "httpProxy": proxy,
            "ftpProxy": proxy,
            "sslProxy": proxy,
            "noProxy": None,
            "proxyType": "MANUAL",
            "class": "org.openqa.selenium.Proxy",
            "autodetect": False
        }

    driver = webdriver.Chrome(desired_capabilities = desired_capabilities, executable_path = chrome_driver_path)
    driver.set_page_load_timeout(10)

    # print(proxy)

    return driver


class ZHIHU():
    def __init__(self):
        self.driver = chrome_driver()
        self.start()

    def url2base64(self, picUrl):
        with request.urlopen(picUrl) as web:
            return base64.b64encode(web.read())

    def CalcSign(self, s):
        hl = hashlib.md5()
        hl.update(s.encode(encoding = 'utf-8'))

        return hl.hexdigest()

    def get_code(self, picUrl):
        tm = str(int(time.time()))
        img_data = self.url2base64(picUrl)
        rsp = requests.post('http://pred.fateadm.com/api/capreg', data = {
            'user_id': '103595',
            'timestamp': tm,
            'sign': self.CalcSign('103595' + tm + self.CalcSign(tm + 'qn4iUtbRoSU95do6zEglMUmOLIjkNIiQ')),
            'predict_type': '30400',
            'img_data': img_data
        }).json()

        if rsp['RetCode'] == '0':
            return json.loads(rsp['RspData'])['result']
        else:
            return False

    def start(self, url = ''):
        if url:
            for i in ['followers', 'following']:
                try:
                    self.driver.get(url + '/' + i)
                    time.sleep(2)
                    soup = BeautifulSoup(self.driver.page_source, 'lxml')
                except Exception as e:
                    self.driver.close()

                    self.driver = chrome_driver()
                    continue

                if soup.find('title') and soup.find('title').text.find('安全验证') != -1:
                    code = self.get_code(soup.find(class_ = 'Unhuman-captcha').get('src'))

                    self.driver.find_element_by_css_selector('.Unhuman-input input').send_keys(code)
                    self.driver.find_element_by_css_selector('button').click()

                    time.sleep(2)

                    soup = BeautifulSoup(self.driver.page_source, 'lxml')

                if soup.find('该用户设置了隐私保护'):
                    continue

                self.get_users()

        for u in zhihu_user_coll.aggregate([{'$sample': {'size': 1}}]):
            url = 'https:' + u['link']
            self.start(url)

    def get_users(self):
        soup = BeautifulSoup(self.driver.page_source, 'lxml')

        if soup.find('title') and soup.find('title').text.find('安全验证') != -1:
            code = self.get_code(soup.find(class_ = 'Unhuman-captcha').get('src'))

            self.driver.find_element_by_css_selector('.Unhuman-input input').send_keys(code)
            self.driver.find_element_by_css_selector('button').click()

            time.sleep(2)

            soup = BeautifulSoup(self.driver.page_source, 'lxml')

        for item in soup.select('.List .List-item'):
            try:
                link = item.find(class_ = 'ContentItem-head').find('a').get('href')
                nick = item.find(class_ = 'ContentItem-head').find('a').text

                if not zhihu_user_coll.find_one({'link': link}):
                    item = {
                        'nick': nick,
                        'link': link
                    }
                    zhihu_user_coll.insert_one(item)
                    print(item)
            except Exception as e:
                continue

        try:
            if self.driver.find_element_by_class_name('PaginationButton-next'):
                self.driver.find_element_by_class_name('PaginationButton-next').click()
                time.sleep(1)
                self.get_users()
        except:
            return


def start():
    ZHIHU()


if __name__ == '__main__':
    pool = []

    for i in range(10):
        p = Process(target = start)
        pool.append(p)

    for p in pool:
        p.start()

    for p in pool:
        p.join()
