import os
import sys
import time

import facade
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir.base_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "journals")
BaseDir.create_dir(sPath)

configfile = BaseDir.get_new_path(curPath, "db.ini")
mysqlutils = facade.MysqlUtiles(configfile, "db")

HEADERS = {
    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
    "accept-encoding": "gzip, deflate",
    "accept-language": "zh-CN,zh;q=0.9",
    # "referer": 'https://www.jstor.org/action/showJournals?contentType=journals&letter=0-9',
    "upgrade-insecure-requests": "1",
    "cache-control": "no-cache",
    "pragma": "no-cache",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36"
}

BaseUrl = "https://www.jstor.org"


def requestVolIssue(url, proxysList):
    BoolResult, errString, r = facade.MProxyRequest(url,
                                                    Feature="facets-container",
                                                    HEADERS=HEADERS,
                                                    proxyRingList=proxysList,
                                                    timeout=(30, 60))
    # MProxyRequest(url,
    #                                         Feature="facets-container",
    #                                         HEADERS=HEADERS,
    #                                         proxyRingList=proxysList,
    #                                         timeout=(30, 60))
    if not BoolResult:
        print("请检查失败原因:" + errString)
        sys.exit(-1)
    return r


def savefile(r, filePath):
    BaseDir.single_write_file(r.text, filePath)


# # 数据库链接
# def MajorDbConnect():
#     return MySqlDbConnect(curPath, "db.ini")


def SelectFromDB():
    sql = "select `url` from `journal` where `stat`=0"
    # conn = MajorDbConnect()
    # rows = SelctSqlFromDB(sql, conn)
    rows = mysqlutils.SelectFromDB(sql)
    return rows


def SelectProxy():
    """
    本地代理 不再使用
    :return:
    """
    sSql = "SELECT `proxy` FROM `proxy_pool`"
    # 数据库连接
    # conn = MajorDbConnect()
    # rows = SelctSqlFromDB(sSql, conn)
    rows = mysqlutils.SelectFromDB(sSql)
    return rows


def readDBProxy():
    return getDataFromRedis(curPath, 'db.ini')


def selectProxyRedis():
    rows = readDBProxy()
    return list(rows)


# def setProxy():
#     global proxysList
#     rows = SelectProxy()
#     for proxy in rows:
#         if not proxysList.exist(proxy):
#             proxysList.append(proxy[0])


# 插入数据库
def InsertIntoDbFromList(ListSqls):
    # conn = MajorDbConnect()
    # success, failed = ExeSqlList(ListSqls, conn)
    mysqlutils.ExeSqlListToDB(ListSqls)
    ListSqls = list()


def get_list_url_run(threadval, url, proxy):
    result_queue = threadval.result_queue
    filename = url.replace("/", "_")
    fullurl = BaseUrl + url
    filePath = os.path.join(sPath, filename + ".html")
    if not os.path.exists(sPath):
        os.makedirs(sPath)
    if os.path.exists(filePath):
        print("文件存在" + filePath)
        sql = "update `journal` set `stat`=1 where url='{}'".format(url)
        result_queue.put(sql)
        return
    r = requestVolIssue(fullurl, proxy)
    savefile(r, filePath)
    sql = "update `journal` set `stat`=1 where url='{}'".format(url)
    result_queue.put(sql)


class VolIssueThreadRun(MThreadingRun):

    def __init__(self, num):
        super(VolIssueThreadRun, self).__init__(num)

    def is_break(self):
        return True

    def getTask(self, *args, **kwargs):
        return SelectFromDB()

    def setTask(self, results=None, *args, **kwargs):
        for row in results:
            self.add_job(self.func, row[0], self.list_proxy)

    def dealresult(self, *args, **kwargs):
        InsertIntoDbFromList(self.results)

    def setProxy(self, proxysList=None):
        list_proxy = selectProxyRedis()
        MThreadingRun.setProxy(self, list_proxy)
        time.sleep(60)

    def thread_pool_hook(self, thread_pool_dicts, thread, args, kwargs):
        return {}

    def fun(self, threadval, *args, **kwargs):
        url = args[0]
        get_list_url_run(threadval, url, self.list_proxy)


def main():
    threadrun = VolIssueThreadRun(30)
    threadrun.run()


if __name__ == "__main__":
    main()
