import json
import os
import random
import threading
import time

import facade
from xjlibrary.mdatetime.mtime2 import MDateTimeUtils
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.mrequest.baserequest import MProxyRequest
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir.base_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "article")
BaseDir.create_dir(sPath)
configfile = BaseDir.get_new_path(curPath, "db.ini")


class Downarticle(object):
    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)
        self.USER_AGENT_LIST = [
            'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36',
            'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko',
            'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.36 Edge/16.16299'
            'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/535.1 (KHTML, like Gecko) Chrome/14.0.835.163 Safari/535.1',
            'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:6.0) Gecko/20100101 Firefox/6.0',
            'Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/534.50 (KHTML, like Gecko) Version/5.1 Safari/534.50',
            'Opera/9.80 (Windows NT 6.1; U; zh-cn) Presto/2.9.168 Version/11.50',
            'Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Win64; x64; Trident/5.0; .NET CLR 2.0.50727; SLCC2; .NET CLR 3.5.30729',
        ]

        self.HEADERS = {
            "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "accept-encoding": "gzip, deflate, br",
            "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
            "user-agent": random.choice(self.USER_AGENT_LIST),
            "upgrade-insecure-requests": "1"
        }

        self.BaseUrl = "https://www.jstor.org"

    def requestVolIssue(self, url, proxysList):
        BoolResult, errString, r = MProxyRequest(url,
                                                 Feature="small-12 columns",
                                                 HEADERS=self.HEADERS,
                                                 proxyRingList=proxysList,
                                                 timeout=(30, 60),
                                                 verify=False)
        if not BoolResult:
            print("请检查失败原因:" + errString)
            return ""
        return r

    def SelectFromDB(self):
        sql = "select `url` from `article` where `stat`=0 ORDER BY `failcount`"
        rows = self.mysqlutils.SelectFromDB(sql)
        return rows

    def InsertIntoDbFromList(self, ListSqls):
        self.mysqlutils.ExeSqlListToDB(ListSqls)

    def get_list_url_run(self, threadval, url, proxy):
        result_queue = threadval.result_queue
        fullurl = self.BaseUrl + url
        outPathFile = os.path.join(sPath, '%d_%d.big_html' % (os.getpid(), threading.get_ident()))
        r = self.requestVolIssue(fullurl, proxy)
        if r == "":
            sql = "update `article` set `stat`=0,failcount = failcount + 1 where url='{}'".format(url)
            result_queue.put(sql)
            return
        # line = url + '★' + \
        #        r.text.replace('\0', ' ').replace(
        #            '\r', ' ').replace('\n', ' ') + '\n'

        htmls = r.text.replace('\0', ' ').replace('\r', ' ').replace('\n', ' ') + '\n'

        sumDict = dict()
        sumDict['url'] = url
        sumDict["downdate"] = MDateTimeUtils.get_today_date_strings()
        sumDict['html'] = htmls

        line = json.dumps(sumDict, ensure_ascii=False).strip() + '\n'

        BaseDir.single_add_file(outPathFile, line)
        print("保存文件成功")
        sql = "update `article` set `stat`=1 where url='{}'".format(url)
        result_queue.put(sql)


class articleThreadRun(MThreadingRun):

    def __init__(self, num):
        super(articleThreadRun, self).__init__(num)
        self.down = Downarticle()

    def getTask(self, *args, **kwargs):
        return self.down.SelectFromDB()

    def setTask(self, results=None, *args, **kwargs):
        for row in results:
            self.add_job(self.func, row[0])
        time.sleep(60)

    def dealresult(self, *args, **kwargs):
        print("self.results length is {}".format(len(self.results)))
        # 表示不清空self.results
        self.down.InsertIntoDbFromList(self.results)

    def setProxy(self, proxysList=None):
        list_proxy = getDataFromRedis(curPath, 'db.ini')
        MThreadingRun.setProxy(self, list_proxy)
        time.sleep(60)

    def thread_pool_hook(self, thread_pool_dicts, thread, args, kwargs):
        return {}

    def is_break(self):
        return True

    def fun(self, threadval, *args, **kwargs):
        self.down.get_list_url_run(threadval, args[0], self.list_proxy)


def main():
    threadrun = articleThreadRun(30)
    threadrun.run()


if __name__ == "__main__":
    main()
