import logging
import os
import time

import facade
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.mrequest import baserequest
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir import BaseDir
from xjlibrary.tools.BaseUrl import BaseUrl

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "cell", "download", "issue")
BaseDir.create_dir(sPath)

absurl = []
ListSqls = []
StartTime = time.time()
BaseUrls = "https://www.cell.com"
HEADERS = {'accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
           'accept-encoding': 'gzip, deflate, br',
           'accept-language': 'zh-CN,zh;q=0.9,en;q=0.8',
           'upgrade-insecure-requests': '1',
           'cache-control': 'no-cache',
           'pragma': 'no-cache',
           'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36', }
nCount = 0
mysqlutils = facade.MysqlUtiles(BaseDir.get_new_path(curPath, "db.ini"), "db", facade.get_streamlogger())


# # 数据库链接
# def MajorDbConnect():
#     return MySqlDbConnect(curPath, "db.ini")


def SelectListFromDB():
    sSql = "SELECT `url` FROM issue where stat=0 ORDER BY RAND() LIMIT 1000"
    rows = mysqlutils.SelectFromDB(sSql)
    return rows


# 插入数据库
def InsertIntoDbFromList(ListSqls):
    # 数据库连接
    # conn = MajorDbConnect()
    # ExeSqlList(ListSqls, conn)
    mysqlutils.ExeSqlListToDB(ListSqls)


# 测试文字输出到文本
def output(value, files='log.txt'):
    f = open(files, 'w', encoding='utf-8')
    f.write(value)
    f.close()


def save_file(outfile, r):
    if os.path.exists(outfile):
        return
    print("开始保存文件")
    with open(outfile, mode='w', encoding='utf-8') as f:
        f.write(r.text)


def get_list_url_run(threadval, ringproxy, url):
    global absurl, nCount, ListSqls
    result_queue = threadval.result_queue
    if ringproxy.length() == 0:
        print("等待代理")
        time.sleep(3)
        return
    pii = BaseUrl.urlQuery2Dict(url)["pii"]
    journal = BaseUrl.urlPath2List(url)[-2]
    if not os.path.exists(sPath):
        os.makedirs(sPath)
    outfile = "{}/{}_{}.html".format(sPath, journal, pii)
    if os.path.exists(outfile):
        print("文件存在{}".format(outfile))
        nCount = nCount + 1
        sql = "update issue set `stat`=1 where `url`='{}'".format(url)
        result_queue.put(sql)
        # print("完成{ncount}个，总共{count}个".format(ncount=nCount, count=count))
        return
    fullurl = BaseUrls + url
    BoolResult, errString, r = baserequest.MProxyRequest(fullurl, HEADERS=HEADERS, proxyRingList=ringproxy,
                                                         timeout=(30, 60),
                                                         verify=False)
    if not BoolResult:
        sql = "update issue set `stat`=0 where `url`='{}'".format(url)
        result_queue.put(sql)
        return
    save_file(outfile, r)
    sql = "update issue set `stat`=1 where `url`='{}'".format(url)
    result_queue.put(sql)


def SelectProxy():
    time.sleep(10)
    return getDataFromRedis(curPath, 'db.ini', sesc="redis")


class IssueThreadRun(MThreadingRun):
    def __init__(self, num):
        super(IssueThreadRun, self).__init__(num)

    def getTask(self, *args, **kwargs):
        rows = SelectListFromDB()
        return rows

    def setTask(self, results=None, *args, **kwargs):
        for url in results:
            # 将每一页加入任务队列
            self.add_job(self.func, self.list_proxy, url[0])

    def dealresult(self, *args, **kwargs):
        InsertIntoDbFromList(self.results)

    def setProxy(self, proxysList=None):
        rows = SelectProxy()
        MThreadingRun.setProxy(self, rows)

    def is_break(self):
        return True

    def thread_pool_hook(self, thread_pool_dicts, thread, *args, **kwargs) -> dict:
        return {}

    def fun(self, threadval, *args, **kwargs):
        ringproxy, url = args
        get_list_url_run(threadval, ringproxy, url)


def main(logger1: logging.Logger = None):
    global logger
    logger = logger1
    threadrun = IssueThreadRun(40)
    threadrun.run()


"""
此处使用了多线程下载，并使用公网代理 请将proxy目录下的程序运行,以便更新代理
下载期级页面保存到文件,并将下载了的状态置1
"""
if __name__ == '__main__':
    main()
