import logging
import os
import time

import facade
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.mrequest import baserequest
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "cell", "download", "abstractdetails")
BaseDir.create_dir(sPath)

absurl = []
StartTime = time.time()
BaseUrl = "https://www.cell.com"
HEADERS = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
           'Accept-Encoding': 'gzip, deflate, br', 'Connection': 'keep-alive',
           'Accept-Language': 'zh-CN,zh;q=0.9', 'Cache-Control': 'max-age=0',
           'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) '
                         'Chrome/66.0.3359.139 Safari/537.36', }
nCount = 0
ListSqls = list()

mysqlutils = facade.MysqlUtiles(BaseDir.get_new_path(curPath, "db.ini"), "db", facade.get_streamlogger())


# # 数据库链接
# def MajorDbConnect():
#     return MySqlDbConnect(curPath, "db.ini")


def SelectListFromDB():
    sSql = "SELECT `issueppi`,`url` FROM `archive` where stat=0 and Failed < 5"
    rows = mysqlutils.SelectFromDB(sSql)
    return rows


# 插入数据库
def InsertIntoDbFromList(ListSqls):
    # 数据库连接
    # conn = MajorDbConnect()
    # ExeSqlList(ListSqls, conn)
    mysqlutils.ExeSqlListToDB(ListSqls)


# 测试文字输出到文本
def output(value, files='log.txt'):
    f = open(files, 'a', encoding='utf-8')
    f.write(value)
    f.close()


def get_url(url, ringproxy):
    BoolResult, errString, r = baserequest.MProxyRequest(url,
                                                         Feature='container',
                                                         proxyRingList=ringproxy,
                                                         HEADERS=HEADERS,
                                                         verify=False,
                                                         timeout=(30, 60))
    if not BoolResult:
        return False, errString
    return True, r


def save_file(outfile, r):
    if os.path.exists(outfile):
        return
    with open(outfile, mode='w', encoding='utf-8') as f:
        f.write(r.text)


def get_list_url_run(threadval, ringproxy, name, url):
    result_queue = threadval.result_queue
    if not os.path.exists(sPath):
        os.makedirs(sPath)
    listname = url.split("/")
    filename = "_".join(listname)
    filename = "{}_{}".format(name, filename)
    outfile = "{}/{}.html".format(sPath, filename)
    print(outfile)
    if os.path.exists(outfile):
        sql = "Update archive set stat=1 where `url`='{}'".format(url)
        result_queue.put(sql)
        return

    urlabs = BaseUrl + url.replace("fulltext", "abstract")
    bool, r = get_url(urlabs, ringproxy)
    if bool == False:
        output("请求失败{}\n".format(urlabs))
        sql = "Update archive set failed=failed+1,`stat`=0 where `url`='{}'".format(url)
        result_queue.put(sql)
        return
    save_file(outfile, r)
    sql = "Update archive set stat=1 where `url`='{}'".format(url)
    result_queue.put(sql)


def SelectProxy():
    # sSql = "SELECT `proxy` FROM `proxy_pool`"
    # # 数据库连接
    # # conn = MajorDbConnect()
    # # rows = SelctSqlFromDB(sSql, conn)
    # rows = mysqlutils.SelectFromDB(sSql)
    # return rows
    return getDataFromRedis(curPath, 'db.ini', sesc="redis")


class ArchiveThreadRun(MThreadingRun):
    def __init__(self, num):
        super(ArchiveThreadRun, self).__init__(num)

    def getTask(self, *args, **kwargs):
        return SelectListFromDB()

    def setTask(self, results=None, *args, **kwargs):
        for issuppi, url in results:
            # 将每一页加入任务队列
            self.add_job(self.func, issuppi, url)
        time.sleep(20)

    def dealresult(self, *args, **kwargs):
        InsertIntoDbFromList(self.results)

    def setProxy(self, proxysList=None):
        # list_proxy = []
        rows = SelectProxy()
        # for proxy in rows:
        #     list_proxy.append(proxy[0])
        MThreadingRun.setProxy(self, rows)
        time.sleep(60)

    def is_break(self):
        return True

    def thread_pool_hook(self, thread_pool_dicts, thread, *args, **kwargs) -> dict:
        return {}

    def fun(self, threadval, *args, **kwargs):
        issuppi, url = args
        get_list_url_run(threadval, self.list_proxy, issuppi, url)


def main(logger1: logging.Logger = None):
    global logger
    logger = logger1
    threadrun = ArchiveThreadRun(40)
    threadrun.run()


if __name__ == '__main__':
    main()
