import os
import sys
import time

import facade
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.mrequest.baserequest import MProxyRequest
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir.base_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "volume")  # volume

HEADERS = {
    "accept": "*/*",
    "accept-encoding": "gzip, deflate, br",
    "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
    "cache-control": "no-cache",
    "pragma": "no-cache",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 "
                  "Safari/537.36 ",
    "x-requested-with": "XMLHttpRequest"
}

BaseUrl = "https://www.jstor.org"

configfile = BaseDir.get_new_path(curPath, "db.ini")
mysqlutils = facade.MysqlUtiles(configfile, "db")


def requestVolIssue(url, proxysList):
    BoolResult, errString, r = MProxyRequest(url, endstring="", Feature="langMatch", HEADERS=HEADERS,
                                             proxyRingList=proxysList,
                                             timeout=(30, 60))
    if not BoolResult:
        print("请检查失败原因:" + errString)
        sys.exit(-1)
    return r


def savefile(r, filePath):
    BaseDir.single_write_file(r.text, filePath)


# # 数据库链接
# def MajorDbConnect():
#     return MySqlDbConnect(curPath, "db.ini")


def SelectFromDB():
    sql = "select `url` from `volume` where `stat`=0 ORDER BY RAND() LIMIT 500"
    rows = mysqlutils.SelectFromDB(sql)
    return rows


def SelectProxy():
    sSql = "SELECT `proxy` FROM `proxy_pool`"
    rows = mysqlutils.SelectFromDB(sSql)
    return rows


# def setProxy():
#     global proxysList
#     rows = SelectProxy()
#     for proxy in rows:
#         if not proxysList.exist(proxy):
#             proxysList.append(proxy[0])


# 插入数据库
def InsertIntoDbFromList(ListSqls):
    # conn = MajorDbConnect()
    # success, failed = ExeSqlList(ListSqls, conn)
    mysqlutils.ExeSqlListToDB(ListSqls)
    ListSqls = list()


def get_list_url_run(threadval, url, proxy):
    result_queue = threadval.result_queue
    filename = url.replace("/", "_").replace("journal", "").replace("decade", "")
    fullurl = BaseUrl + url
    filePath = os.path.join(sPath, filename + ".html")
    if os.path.exists(filePath):
        print("文件存在" + filePath)
        sql = "update `volume` set `stat`=1 where url='{}'".format(url)
        result_queue.put(sql)
        return
    r = requestVolIssue(fullurl, proxy)
    savefile(r, filePath)
    sql = "update `volume` set `stat`=1 where url='{}'".format(url)
    result_queue.put(sql)

def readDBProxy():
    return getDataFromRedis(curPath, 'db.ini')


class VolumeThreadRun(MThreadingRun):

    def __init__(self, num):
        super(VolumeThreadRun, self).__init__(num)
        if not os.path.exists(sPath):
            os.makedirs(sPath)

    def getTask(self, *args, **kwargs):
        return SelectFromDB()

    def setTask(self, results=None, *args, **kwargs):
        for row in results:
            self.add_job(self.func, row[0])
        time.sleep(30)

    def dealresult(self, *args, **kwargs):
        InsertIntoDbFromList(self.results)

    def setProxy(self, proxysList=None):
        rows = readDBProxy()
        MThreadingRun.setProxy(self, rows)
        time.sleep(60)

    def thread_pool_hook(self, thread_pool_dicts, thread, args, kwargs):
        return {}

    def fun(self, threadval, *args, **kwargs):
        get_list_url_run(threadval, args[0], self.list_proxy)

    def is_break(self):
        return True


def main():
    threadrun = VolumeThreadRun(50)
    threadrun.run()


if __name__ == "__main__":
    main()
