import json
import os
import threading
import time

import facade
import requests
from xjlibrary.mdatetime.mtime import getTodayDate
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.network.MyIP import GetLocalIPByPrefix
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "siamjournal", "download", "article")
BaseDir.create_dir(sPath)


class DownIssuePage():
    def __init__(self):
        self.url = ""
        self.header = {
            "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "accept-encoding": "gzip, deflate, br",
            "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
            "cache-control": "no-cache",
            "pragma": "no-cache",
            "referer": "https://epubs.siam.org",
            "upgrade-insecure-requests": "1",
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36"
        }
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)

    def set_url(self, url):
        BaseUrl = "https://epubs.siam.org"
        self.url = BaseUrl + url

    def down_issuepage(self, threadval, url, jsonmsg, proxys):
        result_queue = threadval.result_queue
        self.set_url(url)
        print("url is :" + self.url)
        if proxys.length() == 0:
            print("没有代理，睡眠10秒钟等待")
            time.sleep(10)
            return

        BoolResult, errString, r = facade.MProxyRequest(self.url,
                                                        Feature="hlFld-Abstract",
                                                        HEADERS=self.header,
                                                        timeout=(30, 60),
                                                        verify=False,
                                                        proxyRingList=proxys)
        if BoolResult:
            mark = url.replace("/doi/abs/","")
            if r.text.find(mark) == -1:
                sql = "UPDATE article SET failcount=failcount+1 WHERE `url`='%s'"
                sql = sql % url
                result_queue.put(sql)
                self.logger.error("下载失败")
                return False
            sumDict = dict()
            sumDict['url'] = url
            sumDict['html'] = r.text
            jsonmsg = jsonmsg.replace("\\", "\\\\")
            sumDict['jsonmsg'] = json.loads(jsonmsg)

            sumDict['date'] = getTodayDate()

            if not os.path.exists(sPath):
                os.makedirs(sPath)

            outPathFile = os.path.join(sPath,
                                       '%s_%s_%d_%d.big_json' % (
                                           getTodayDate(), GetLocalIPByPrefix('192.168.'), os.getpid(),
                                           threading.get_ident())
                                       )

            line = json.dumps(sumDict, ensure_ascii=False).strip() + '\n'
            BaseDir.single_add_file(outPathFile, line)

            sql = "UPDATE article SET stat=1 WHERE `url`='%s'"
            sql = sql % url
            result_queue.put(sql)
        else:
            sql = "UPDATE article SET failcount=failcount+1 WHERE `url`='%s'"
            sql = sql % url
            result_queue.put(sql)
            self.logger.error("下载失败")

    def select_journal(self):
        sql = "select url,jsonmsg from article where stat=0 and failcount<3 limit 1000"
        rows = self.mysqlutils.SelectFromDB(sql)
        return rows

    def update_stat(self, results):
        self.mysqlutils.ExeSqlListToDB(results)

    def get_proxy(self):
        return getDataFromRedis(curPath, 'db.ini')


class ArchiveThreadRun(MThreadingRun):

    def __init__(self, num):
        super(ArchiveThreadRun, self).__init__(num)
        self.down = DownIssuePage()

    def getTask(self, *args, **kwargs):
        rows = self.down.select_journal()
        return rows

    def setTask(self, results=None, *args, **kwargs):
        for row in results:
            self.add_job(self.func, row[0], row[1])

    def dealresult(self, *args, **kwargs):
        print("self.results length is {}".format(len(self.results)))
        # 表示不清空self.results
        self.down.update_stat(self.results)

    def setProxy(self, proxysList=None):
        rows = self.down.get_proxy()
        MThreadingRun.setProxy(self, list(rows))

    def is_break(self):
        return True

    def thread_pool_hook(self, thread_pool_dicts, thread, *args, **kwargs) -> dict:
        return {}

    def fun(self, threadval, *args, **kwargs):
        url, jsonmsg = args
        self.down.down_issuepage(threadval, url, jsonmsg, self.list_proxy)


def main():
    threadrun = ArchiveThreadRun(40)
    threadrun.dealresultsnum = 5
    threadrun.run()


"""
被封一次 原因是5分钟内创建超过25个会话
尝试使用一个sn下载是否会被封
"""
if __name__ == "__main__":
    main()
