"""
成都信息工程大学图书馆地址
http://www.lib.cuit.edu.cn/
AMS美国气象学会会刊
https://journals.ametsoc.org/
需要走代理
xujiang
"""
import os
import time

import facade
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.mrequest.baserequest import MProxyRequest
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "cdxg_ams", "download", "issue")

nCount = 0
ListSqls = []
list_failed = []
starttime = 0
configfile = BaseDir.get_new_path(curPath, "db.ini")
logger = facade.get_streamlogger()
mysqlutils = facade.MysqlUtiles(configfile, "db", logger)

BaseUrl = "https://journals.ametsoc.org"
HEADERS = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
           'Accept-Encoding': 'gzip, deflate, br', 'Connection': 'keep-alive',
           'Accept-Language': 'zh-CN,zh;q=0.9', 'Cache-Control': 'max-age=0',
           'Host': 'journals.ametsoc.org',
           'Upgrade-Insecure-Requests': '1',
           'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
                         'Chrome/66.0.3359.139 Safari/537.36', }

Proxies = {
    'http': '192.168.30.176:8184',
    # 'http':'162.105.138.192:8092',
    'https': '192.168.30.176:8184'  # key是指目标网站的协议
}


# 插入数据库
def InsertIntoDbFromList(ListSqls):
    global nCount
    mysqlutils.ExeSqlListToDB(ListSqls)


def get_url(url, proxys):
    BoolResult, errString, r = MProxyRequest(url,
                                             Feature="frmAbs",
                                             proxyRingList=proxys,
                                             timeout=60,
                                             HEADERS=HEADERS,
                                             verify=False)
    if not BoolResult:
        raise Exception("出现错误：" + errString)
    # if not r.content.decode('GB18030').strip().endswith('</html>'):
    #     print('not find </html>')
    #     sys.exit(-1)
    return r


def save_file(name, r):
    outfile = "{}/{}.html".format(sPath, name)
    if os.path.exists(outfile):
        print("{} 文件存在".format(outfile))
        return
    print(outfile)
    try:
        BaseDir.single_write_file(r.content.decode("utf-8"), outfile)
    except:
        BaseDir.single_write_file(r.content.decode("GB18030"), outfile, encoding="GB18030")


def get_list_url_run(threadval, name, url, id, proxy):
    result_queue = threadval.result_queue
    outfile = "{}/{}.html".format(sPath, name)
    if not os.path.exists(sPath):
        os.makedirs(sPath)
    if os.path.exists(outfile):
        sql = "Update ams_volume set `stat`=1 where `id`={}".format(id)
        result_queue.put(sql)
        print("{}文件存在".format(outfile))
        return
    try:
        r = get_url(url, proxy)
        save_file(name, r)
        print("save 文件成功")
    except Exception as e:
        print(str(e))
        sql = "Update ams_volume set `stat`=0,`explain`='{}' where `id`={}".format("get网络请求失败", id)
        result_queue.put(sql)
        return
    sql = "Update ams_volume set `stat`=1 where `id`={}".format(id)
    result_queue.put(sql)
    time.sleep(3)


def SelectListFromDB():
    global nCount, ListSqls
    sSql = "SELECT `id`,`name`,`volume`,`issue`,`url` FROM `ams_volume` WHERE `stat`=0 limit 1000"
    rows = mysqlutils.SelectFromDB(sSql)
    return rows


def SelectProxy():
    # sSql = "SELECT `proxy` FROM `proxy_pool`"
    #  conn = MajorDbConnect()
    # rows = SelctSqlFromDB(sSql, conn)
    return getDataFromRedis(curPath, 'db.ini', sesc="proxy")


def readDBProxy():
    global starttime
    if int(time.time()) - starttime > 10:
        starttime = int(time.time())
        return getDataFromRedis(curPath, 'db.ini')


class IssueThreadRun(MThreadingRun):

    def __init__(self, num):
        super(IssueThreadRun, self).__init__(num)
        self.thread_pool.set_is_static_max(True)

    def getTask(self, *args, **kwargs):
        return SelectListFromDB()

    def setTask(self, results=None, *args, **kwargs):
        for id_, name, volume, issue, url in results:
            name = "{}_{}_{}".format(name, volume, issue)
            self.add_job(self.func, name, url, id_)

    def dealresult(self, *args, **kwargs):
        InsertIntoDbFromList(self.results)

    def setProxy(self, proxysList=None):
        rows = readDBProxy()
        MThreadingRun.setProxy(self, rows)
        time.sleep(10)

    def fun(self, threadval, *args, **kwargs):
        name, url, id_ = args
        get_list_url_run(threadval, name, url, id_, self.list_proxy)

    def is_break(self):
        return True

    def thread_pool_hook(self, thread_pool_dicts, thread, args, kwargs) -> dict:
        return {}


def main():
    """
      1、运行该步骤之前将数据库中stat=0的改为1 保证每个都能更新到（已在程序中完成）
      2、观察ams_volume表中是否有stat=0的数据 该状态表示下载失败 去查找原因 状态为1的个数应该与文件个数一致
    """
    threadrun = IssueThreadRun(50)
    threadrun.run()


if __name__ == "__main__":
    main()
