"""
成都信息工程大学图书馆地址
http://www.lib.cuit.edu.cn/
AMS美国气象学会会刊
https://journals.ametsoc.org/
需要走代理
xujiang
"""
import logging
import os
import sys
import time

import facade
from facade.loggerfacade import get_filelogger
from xjlibrary.database_moudel.simple.mysqlclient import MySqlDbConnect, SelctSqlFromDB, ExeSqlList
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.mrequest.baserequest import MProxyRequest
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir import BaseDir

nCount = 0
ListSqls = []
list_failed = []
starttime = 0

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "cdxg_ams", "download", "archive")

BaseUrl = "https://journals.ametsoc.org"
HEADERS = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
           'Accept-Encoding': 'gzip, deflate, br', 'Connection': 'keep-alive',
           'Accept-Language': 'zh-CN,zh;q=0.9', 'Cache-Control': 'max-age=0',
           'Host': 'journals.ametsoc.org',
           'Upgrade-Insecure-Requests': '1',
           'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
                         'Chrome/66.0.3359.139 Safari/537.36', }

Proxies = {
    'http': '192.168.30.176:8184',
    # 'http':'162.105.138.192:8092',
    'https': '192.168.30.176:8184'  # key是指目标网站的协议
}
configfile = BaseDir.get_new_path(curPath, "db.ini")
logger = facade.get_streamlogger()
mysqlutils = facade.MysqlUtiles(configfile, "db", logger)


# 插入数据库
def InsertIntoDbFromList(ListSqls):
    mysqlutils.ExeSqlListToDB(ListSqls)


def get_url(url, proxys):
    url = BaseUrl + url
    BoolResult, errString, r = MProxyRequest(url, HEADERS=HEADERS, Feature="articleMeta ja", proxyRingList=proxys,
                                             timeout=30, verify=False)
    if not BoolResult:
        raise Exception("错误码是 %s" % errString)
    # if not r.content.decode('GB18030').strip().endswith('</html>'):
    #     print('not find </html>')
    #     sys.exit(-1)

    return r


def save_file(name, r):
    outfile = "{}/{}.html".format(sPath, name)
    if os.path.exists(outfile):
        print("{} 文件存在".format(outfile))
        return
    try:
        print(outfile)
        with open(outfile, mode='w', encoding='utf-8') as f:
            f.write(r.content.decode("utf-8"))
    except:
        with open(outfile, mode='w', encoding='GB18030') as f:
            f.write(r.content.decode("GB18030"))


def get_list_url(rows):
    count = len(rows)
    for id, name, volume, issue, url in rows:
        name = "{}_{}_{}".format(name, volume, issue)
        get_list_url_run(name, url, id)


def get_list_url_run(threadval, name, url, id_, proxy):
    global nCount
    result_queue = threadval.result_queue
    outfile = "{}/{}.html".format(sPath, name)
    if not os.path.exists(sPath):
        os.makedirs(sPath)
    if os.path.exists(outfile):
        sql = "Update `ams_issue` set `stat`=1 where `id`={}".format(id_)
        result_queue.put(sql)
        print(sql)
        print("{}文件存在".format(outfile))
        return
    try:
        r = get_url(url, proxy)
        save_file(name, r)
        print("save 文件成功")
    except Exception as e:
        print(str(e))
        sql = "Update `ams_issue` set `stat`=0,`explain`='{}' where `id`={}".format("get网络请求失败", id_)
        result_queue.put(sql)
        return
    sql = "Update `ams_issue` set `stat`=1 where `id`={}".format(id_)
    result_queue.put(sql)
    nCount = nCount + 1
    time.sleep(3)


def SelectListFromDB():
    global nCount, ListSqls
    # 数据库连接
    # conn = MajorDbConnect()
    sSql = "SELECT `id`,`url` FROM `ams_issue` WHERE `stat`=0"
    # rows = SelctSqlFromDB(sSql, conn)
    rows = mysqlutils.SelectFromDB(sSql)
    return rows


def SelectProxy():
    global starttime
    if int(time.time()) - starttime > 10:
        starttime = int(time.time())
        return getDataFromRedis(curPath, 'db.ini')


def readDBProxy():
    global starttime
    if int(time.time()) - starttime > 10:
        starttime = int(time.time())
        return getDataFromRedis(curPath, 'db.ini')


class ArchiveThreadRun(MThreadingRun):

    def __init__(self, num):
        super(ArchiveThreadRun, self).__init__(num)

    def getTask(self, *args, **kwargs):
        rows = SelectListFromDB()
        return rows

    def setTask(self, results=None, *args, **kwargs):
        for id_, url in results:
            self.add_job(self.func, id_, url, id_)

    def dealresult(self, *args, **kwargs):
        InsertIntoDbFromList(self.results)

    def setProxy(self, proxysList=None):
        rows = readDBProxy()
        MThreadingRun.setProxy(self, rows)

    def fun(self, threadval, *args, **kwargs):
        id_, url, _ = args
        get_list_url_run(threadval, id_, url, id_, self.list_proxy)

    def thread_pool_hook(self, thread_pool_dicts, thread, args, kwargs) -> dict:
        return {}

    def is_break(self):
        return True


def main():
    threadrun = ArchiveThreadRun(30)
    threadrun.run()


if __name__ == "__main__":
    main()
