import time
from urllib.parse import quote

import pymysql
from selenium import webdriver
from selenium.webdriver import Keys
from selenium.webdriver.chrome.service import Service
from selenium.webdriver.common.by import By
import random

import NoxConfig
import json

from NoxUser import NoxUser

chrome_driver_instance = None

noxUsers = []


def getDatabaseConnection():
    mysqlConnection = pymysql.connect(
        host=NoxConfig.noxConfig['nox']['db']['host'],
        port=NoxConfig.noxConfig['nox']['db']['port'],
        user=NoxConfig.noxConfig['nox']['db']['user'],
        password=NoxConfig.noxConfig['nox']['db']['pass'],
        database=NoxConfig.noxConfig['nox']['db']['dbName'])

    return mysqlConnection


def insert(_user, _tags):
    try:
        _connection = getDatabaseConnection()

        _cursor = _connection.cursor()

        channel = NoxConfig.noxConfig['nox']['channel']

        _cursor.execute("insert into nox_user_tags(user, tags, channel) values ('"
                        + _user + "', '" + quote(_tags) + "','" + channel + "')")

        _connection.commit()

    except Exception as e:
        print(e.__str__())
        if _connection:
            _connection.rollback()
    finally:
        if _cursor:
            _cursor.close()
        if _connection:
            _connection.close()


def getCookie(_userName):
    cookie = open(NoxConfig.noxConfig['nox']['cookieDirectory'] +
                  str(_userName) + ".cookie", 'r')
    cookie_str = cookie.read()
    cookie.close()

    return cookie_str


def loadingCookie(cookieName):
    chrome_driver_instance.delete_all_cookies()

    cookie_str = getCookie(cookieName)
    if cookie_str is None:
        print("加载cookie失败," + cookieName)

        return

    cookies = json.loads(cookie_str)

    for c in cookies:
        chrome_driver_instance.add_cookie(c)

    chrome_driver_instance.refresh()


def createChromeDriver():
    global chrome_driver_instance

    if chrome_driver_instance:
        return

    try:
        s = Service(executable_path='/Users/gilbert/chromedriver-mac-x64/chromedriver')
        chrome_driver_instance = webdriver.Chrome(service=s)

        loadingC = NoxConfig.noxConfig['nox']['loadingCookie']
        if loadingC == 1:
            chrome_driver_instance.get(NoxConfig.noxConfig['nox']['loadingCookieUrl'])

            time.sleep(3)

            userName = NoxConfig.noxConfig['nox']['userName']

            loadingCookie(userName)

            chrome_driver_instance.maximize_window()
    except Exception as e:
        print(e.__str__())


def closeChromeDriver():
    chrome_driver_instance.close()


def sampling():
    urls = []
    start = NoxConfig.noxConfig['nox']['startPage']
    end = NoxConfig.noxConfig['nox']['endPage']
    size = NoxConfig.noxConfig['nox']['pageSize']

    print("爬虫路由到指定分页")

    time.sleep(3)

    chrome_driver_instance.find_element(By.XPATH, "//div[@class='pagination-jump']//input"). \
        send_keys(Keys.BACK_SPACE)

    chrome_driver_instance.find_element(By.XPATH, "//div[@class='pagination-jump']//input").\
        send_keys("'" + str(start) + "'", Keys.ENTER)

    print("爬虫预热开始")

    time.sleep(5)

    count = start
    while count <= end:
        random_number = random.randint(3, 5)
        if count < start:
            chrome_driver_instance.find_element(By.XPATH, "//div[@class='right']").click()

            time.sleep(random_number)

            count = count + 1
        else:
            details = chrome_driver_instance.find_elements(By.XPATH, "//div[contains(@class, 'basic-info-container')]"
                                                                     "/a")
            for detail in details:
                urls.append(detail.get_attribute('href'))

            if len(urls) < size:
                break
            else:
                chrome_driver_instance.find_element(By.XPATH, "//div[@class='right']").click()

                time.sleep(random_number)

                count = count + 1

    print("爬虫预热结束，开始爬取")

    for url in urls:
        random_number = random.randint(1, 3)

        chrome_driver_instance.get(url)

        time.sleep(random_number)

        try:
            user = chrome_driver_instance.find_element(By.XPATH, "//div[@class='influencer-alias']").text

            chrome_driver_instance.find_element(By.XPATH, "//span[text()='内容数据']").click()
        except Exception as e:
            e.__str__()

        time.sleep(random_number)

        try:
            tags = chrome_driver_instance.find_elements(By.XPATH, "//div[contains(@class, 'common-tags')]"
                                                                  "//div[@class='text']")
        except Exception as e:
            e.__str__()

        noxUser = NoxUser()
        noxUser.setUserName(user.strip())
        for tag in tags:
            noxUser.addUserTag(tag.text)
        noxUsers.append(noxUser)

        insert(noxUser.getUserName(), noxUser.getUserTags())
        print("scrapy(" + noxUser.getUserName() + "," + noxUser.getUserTags() + ")")

        progress = len(noxUsers) / len(urls)
        print("爬虫进度：" + str(round(progress * 100, 2)) + "%,当前在处理第" + str(start + int(len(noxUsers) / 10)) + "页的数据.")


def initialize_scrapy():
    createChromeDriver()
    sampling()
    closeChromeDriver()


if __name__ == '__main__':
    initialize_scrapy()
