import json
import os
import platform
import threading
import time

import facade
from xjlibrary.mdatetime.mtime import getTodayDate
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.mprocesspoll.process_distributed.BaseTask import BaseTask
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.network.MyIP import get_local_ip, GetLocalIPByPrefix
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
topPath = BaseDir.get_upper_dir(curPath, -2)
filepath = BaseDir.get_new_path(topPath, "download", "wanfang_bz", "download", "detail")
BaseDir.create_dir(filepath)
configfile = BaseDir.get_new_path(curPath, "db.ini")


class NodeTask(BaseTask):
    def __init__(self):
        super().__init__()
        # 连接到服务器，也就是运行task_master.py的机器:
        sysstr = platform.system()
        if sysstr == "Windows":
            self.ip = GetLocalIPByPrefix("192.168.")
        else:
            self.ip = get_local_ip("enp2s0")
        self.server_addr = self.ip
        self.port = 5000
        self.init_node(self.server_addr, self.port)


class DownDetail():
    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)
        self.BaseUrl = "http://www.wanfangdata.com.cn/details/detail.do?_type=standards&id={}"
        self.header = {
            "Referer": "http://www.wanfangdata.com.cn/navigations/standards.do",
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
        }
        self.node = NodeTask()
        self.init_node()

    def init_node(self):
        self.node.conn()
        self.node.set_task_result_obj()

    def get_proxy(self):
        return getDataFromRedis(curPath, 'db.ini')

    def down_one(self, threadval, ids, listproxys):
        result_queue = threadval.result_queue
        url = self.BaseUrl.format(ids)
        print("url is :" + url)
        if listproxys.length() == 0:
            print("没有代理，睡眠10秒钟等待")
            time.sleep(10)
            return
        BoolResult, errString, r = facade.MProxyRequest(url,
                                                        retrynum=3,
                                                        HEADERS=self.header,
                                                        timeout=(30, 60),
                                                        Feature="info_left",
                                                        proxyRingList=listproxys
                                                        )
        if BoolResult:
            sumDict = dict()
            sumDict['url'] = url
            sumDict['html'] = r.text
            sumDict['date'] = getTodayDate()
            outPathFile = os.path.join(filepath,
                                       '%s_%s_%d_%d.big_json' % (
                                           getTodayDate(), GetLocalIPByPrefix('192.168.'), os.getpid(),
                                           threading.get_ident())
                                       )

            line = json.dumps(sumDict, ensure_ascii=False).strip() + '\n'
            BaseDir.single_add_file(outPathFile, line)
            # 状态为1 失败状态任意填充
            result_queue.put((ids, 1, 0))
        else:
            # 状态为0 失败次数加一
            result_queue.put((ids, 0, 1))


class DetailThreadRun(MThreadingRun):

    def __init__(self, num):
        super(DetailThreadRun, self).__init__(num)
        self.down = DownDetail()
        # self.setfunc(self.down.down_one)

    def getTask(self, *args, **kwargs):
        pass

    def setTask(self, *args, **kwargs):
        for i in range(1, 100):
            ids = self.down.node.task.get()
            self.add_job(self.func, ids)

    def dealresult(self, *args, **kwargs):
        print("self.results length is {}".format(len(self.results)))
        # 表示不清空self.results
        for result in self.results:
            self.down.node.result.put(result)

    def setProxy(self, proxysList=None):
        rows = self.down.get_proxy()
        MThreadingRun.setProxy(self, list(rows))
        time.sleep(60)

    def is_break(self):
        return True

    def fun(self, threadval, *args, **kwargs):
        ids = args[0]
        self.down.down_one(threadval, ids, self.list_proxy)

    def thread_pool_hook(self, thread_pool_dicts, thread, args, kwargs) -> dict:
        return {}


if __name__ == "__main__":
    threadrun = DetailThreadRun(40)
    threadrun.dealresultsnum = 5
    threadrun.run(model=2)
