import os
import random
import time

import facade
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.mrequest.baserequest import MProxyRequest
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir.base_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "issue")

configfile = BaseDir.get_new_path(curPath, "db.ini")
mysqlutils = facade.MysqlUtiles(configfile, "db")

USER_AGENT_LIST = [
    'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36',
    'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko',
    'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 Edge/16.16299'
    'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1',
    'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0',
    'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
    'Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.9.168 Version/11.50',
    'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729',
]

HEADERS = {
    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
    "accept-encoding": "gzip, deflate",
    "accept-language": "zh-CN,zh;q=0.9",
    "user-agent": random.choice(USER_AGENT_LIST),
    "upgrade-insecure-requests": "1"
}

BaseUrl = "https://www.jstor.org"


def requestVolIssue(url, proxysList):
    BoolResult, errString, r = MProxyRequest(url,
                                             Feature="no-bullet",
                                             HEADERS=HEADERS,
                                             proxyRingList=proxysList,
                                             timeout=(30, 60))
    if not BoolResult:
        print("请检查失败原因:" + errString)

        return ""
    return r


def savefile(r, filePath):
    BaseDir.single_write_file(r.text, filePath)


def SelectFromDB():
    sql = "select `url` from `issue` where `stat`=0 and failed <10 order by rand() limit 5000"
    rows = mysqlutils.SelectFromDB(sql)
    return rows


def SelectProxy():
    sSql = "SELECT `proxy` FROM `proxy_pool`"
    # 数据库连接
    rows = mysqlutils.SelectFromDB(sSql)
    return rows


# def setProxy():
#     global proxysList
#     rows = SelectProxy()
#     for proxy in rows:
#         if not proxysList.exist(proxy):
#             proxysList.append(proxy[0])


# 插入数据库
def InsertIntoDbFromList(ListSqls):
    mysqlutils.ExeSqlListToDB(ListSqls)
    ListSqls = list()


def get_list_url_run(threadval, url, proxy):
    result_queue = threadval.result_queue
    filename = url.replace("/stable", "").replace("/", "_")
    fullurl = BaseUrl + url
    filePath = os.path.join(sPath, filename + ".html")
    if not os.path.exists(sPath):
        os.makedirs(sPath)
    if os.path.exists(filePath):
        print("文件存在" + filePath)
        sql = "update `issue` set `stat`=1 where url='{}'".format(url)
        result_queue.put(sql)
        return
    r = requestVolIssue(fullurl, proxy)
    if r == "":
        sql = "update `issue` set `stat`=0,failed = failed + 1 where url='{}'".format(url)
        result_queue.put(sql)
        return
    savefile(r, filePath)
    print("保存文件成功")
    sql = "update `issue` set `stat`=1 where url='{}'".format(url)
    result_queue.put(sql)


class IssueThreadRun(MThreadingRun):

    def __init__(self, num):
        super(IssueThreadRun, self).__init__(num)
        self.init()

    def init(self):
        sql = "update issue set failed=0"
        mysqlutils.ExeSqlToDB(sql)

    def getTask(self, *args, **kwargs):
        return SelectFromDB()

    def setTask(self, results=None, *args, **kwargs):
        for row in results:
            self.add_job(self.func, row[0])

    def dealresult(self, *args, **kwargs):
        InsertIntoDbFromList(self.results)

    def setProxy(self, proxysList=None):
        list_proxy = getDataFromRedis(curPath, 'db.ini')
        MThreadingRun.setProxy(self, list_proxy)
        time.sleep(60)

    def thread_pool_hook(self, thread_pool_dicts, thread, args, kwargs):
        return {}

    def fun(self, threadval, *args, **kwargs):
        get_list_url_run(threadval, args[0], self.list_proxy)

    def is_break(self):
        return True


def main():
    threadrun = IssueThreadRun(50)
    threadrun.run()


if __name__ == "__main__":
    main()
