import copy
import datetime
import json
import os
import pickle
import platform
import warnings

import facade
import requests
from pymongo.errors import DuplicateKeyError
from xjlibrary.configread import MyConfigParser
from xjlibrary.mdatetime.mtime import getDateTime
from xjlibrary.mdatetime.mtime2 import MDateTimeUtils
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.network.MyIP import GetLocalIPByPrefix, get_local_ip
from xjlibrary.our_file_dir import BaseDir

from base_sipo_shichuan import BaseSipo

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")
topPath = BaseDir.get_upper_dir(curPath, -2)
coverPath = BaseDir.get_new_path(topPath, "download", "sipogov", "download", "cover")
BaseDir.create_dir(coverPath)
cookiedir = BaseDir.get_new_path(curPath, "cookie")


class DownDetail(BaseSipo):
    def __init__(self):
        self.filelogger = facade.get_filelogger(BaseDir.get_new_path(curPath, "logs", "step3" + str(os.getpid())))
        super().__init__(self.filelogger)
        self.cf = MyConfigParser(configfile).set_keep_keys_case().read_config()
        # 详情页url
        self.url2 = self.Baseurl+"/pubsearch/patentsearch/showAbstractInfo-viewAbstractInfo.shtml"
        self.postdata2 = {
            # "nrdAn": "TW105212609",
            # "cid": "TWM532100105212609",
            # "sid": "TWM532100105212609",
            "nrdAn": "",
            "cid": "",
            "sid": "",
            "wee.bizlog.modulelevel": "0201101"
        }
        self.header2 = {
            "Accept": "application/json, text/javascript, */*; q=0.01",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
            "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
            "Host": self.domain,
            "Origin": self.Baseurl,
            "Referer": self.Baseurl+"/pubsearch/patentsearch/showViewList-jumpToView.shtml",
            "Proxy-Connection": "keep-alive",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
            "X-Requested-With": "XMLHttpRequest"
        }
        # 全文下载
        self.url3 = self.Baseurl+"/pubsearch/patentsearch/showFullText-viewFullText.shtml"
        self.postdata3 = {
            # "nrdAn": "TW105212609",
            # "cid": "TWM532100105212609",
            # "sid": "TWM532100105212609",
            "nrdAn": "",
            "cid": "",
            "sid": "",
        }
        self.header3 = {
            "Accept": "application/json, text/javascript, */*; q=0.01",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
            "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8",
            "Host": self.domain,
            "Origin": self.Baseurl,
            "Referer": self.Baseurl+"/pubsearch/patentsearch/showViewList-jumpToView.shtml",
            # "Proxy-Connection": "keep-alive",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36",
            # "User-Agent": "Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Trident/4.0; SE 2.X MetaSr 1.0; SE 2.X MetaSr 1.0; .NET CLR 2.0.50727; SE 2.X MetaSr 1.0)",
            "X-Requested-With": "XMLHttpRequest"
        }
        # 该请求无post参数 公用header3
        self.url4 = self.Baseurl+"/pubsearch/patentsearch/showViewList-showViewList.shtml"

        # 从详情页获取同族专利(要想获取fn 需要一个请求，使用3的header)
        self.url5 = self.Baseurl+"/pubsearch/patentsearch/showPatentInfo-showPatentInfo.shtml"
        self.postdata5 = {
            "literaInfo.nrdAn": "",  # HK15105078
            "literaInfo.nrdPn": "",  # HK1202221A2
            "literaInfo.fn": ""  # 54196616
        }

        # 获取图片地址的url (共用header3)
        self.url7 = self.Baseurl+"/pubsearch/patentsearch/retrieveUrls.shtml"
        self.postdata7 = {
            # "figureUrl": "3964135814",
            # "rids[0]": "3964135814",
            "figureUrl": "",
            "rids[0]": "",
            "wee.bizlog.modulelevel": "0201203"
        }

        # 图片地址
        self.imageurl = ""

    def down_url2(self, sn, proxys):
        """
        详情页下载 获取基本信息和摘要
        :return:
        """
        # self.header2["X-Forwarded-For"] = self.localVal.ipaddr
        BoolResult, errString, r = facade.BaseRequestPost(self.url2,
                                                          sn=sn,
                                                          data=self.postdata2,
                                                          endstring="",
                                                          proxies=proxys,
                                                          mark="abstractItemList",
                                                          allow_redirects=True,
                                                          headers=self.header2,
                                                          timeout=(30, 60))
        if BoolResult:
            self.logger.info("下载摘要成功 %s" % proxys)
            return True, r
        else:
            self.logger.info("下载摘要失败 %s" % proxys)
            return False, r

    def down_url3(self, sn, proxys):
        """
        全文下载
        :return:
        """
        # self.header3["X-Forwarded-For"] = self.localVal.ipaddr
        BoolResult, errString, r = facade.BaseRequestPost(self.url3,
                                                          sn=sn,
                                                          data=self.postdata3,
                                                          endstring="",
                                                          proxies=proxys,
                                                          mark="fullTextDTO",
                                                          allow_redirects=True,
                                                          headers=self.header3,
                                                          timeout=(30, 60))
        if BoolResult:
            self.logger.info("下载全文成功 %s" % proxys)
            return True, r
        else:
            self.logger.info("下载全文失败 %s" % proxys)
            return False, r

    def down_url4(self, sn, proxys, threadval):
        """
        好像get也可以 该下载为下载其他信息的做准备
        需要其中的一个key
        :return:
        """
        self.logger.info(proxys)
        header = copy.deepcopy(self.header3)
        del header['Content-Type']
        print(header)
        BoolResult, errString, r = facade.BaseRequestPost(self.url4,
                                                          sn=sn,
                                                          endstring="",
                                                          proxies=proxys,
                                                          mark="literaInfo",
                                                          allow_redirects=True,
                                                          headers=header,
                                                          timeout=(30, 60))
        if BoolResult:
            self.logger.info("下载showviewlist成功 %s" % proxys)
            threadval.countajax = 0
            return True, r, ""
        else:
            if r:
                self.logger.info("下载showviewlist失败 %s; %s; %s" % (proxys, str(r.status_code), r.text))
                if r.text.find("IsAjaxAndJsonData") > -1:
                    self.logger.info("发现 IsAjaxAndJsonData")
                    threadval.countajax += 1
                    if threadval.count == 0:
                        self.logger.info("将cookies设置为-2 因为IsAjaxAndJsonData")
                        self.db.cookies.update({"_id": threadval.rows["_id"]},
                                               {"$set": {"stat": -2,
                                                         "updatetime": MDateTimeUtils.get_beijin_date_strins()}})
                    if threadval.countajax > 2:
                        self.logger.info("将cookies设置为-2 因为IsAjaxAndJsonData 连续次数大于2")
                        self.db.cookies.update({"_id": threadval.rows["_id"]},
                                               {"$set": {"stat": -2,
                                                         "updatetime": MDateTimeUtils.get_beijin_date_strins()}})
                        sql = "update `user` set stat=-2 where username='{}'".format(threadval.user)
                        self.result_queue.put(sql)
                        return False, r, "break"
                if r.text.find("访问受限") > -1:
                    self.db.cookies.update({"_id": threadval.rows["_id"]},
                                           {"$set": {"stat": -2,
                                                     "updatetime": MDateTimeUtils.get_beijin_date_strins()}})

                    return False, r, "12"
            else:
                self.logger.info("下载showviewlist失败 %s" % proxys)
            return False, r, ""

    def down_url5(self, sn, proxys):
        """
        下载其他信息 包括法律状态同族专利等
        :return:
        """
        header = copy.deepcopy(self.header3)
        header["Content-Length"] = "{0}".format(self.get_content_length(self.postdata5))
        # header["X-Forwarded-For"] = self.localVal.ipaddr
        BoolResult, errString, r = facade.BaseRequestPost(self.url5,
                                                          sn=sn,
                                                          data=self.postdata5,
                                                          endstring="",
                                                          proxies=proxys,
                                                          mark="lawStateList",
                                                          allow_redirects=True,
                                                          headers=header,
                                                          timeout=(30, 60))
        if BoolResult:
            self.logger.info("下载法律状态等其他信息成功 %s" % proxys)
            return True, r
        else:
            self.logger.info("下载法律状态等其他信息失败 %s" % proxys)
            return False, r

    def down_url7(self, sn, proxys):
        """
        该请求可以获取图片url的地址
        :return:
        """
        # self.header3["X-Forwarded-For"] = self.localVal.ipaddr
        BoolResult, errString, r = facade.BaseRequestPost(self.url7,
                                                          sn=sn,
                                                          data=self.postdata7,
                                                          endstring="",
                                                          proxies=proxys,
                                                          mark="figureUrls",
                                                          allow_redirects=True,
                                                          headers=self.header3,
                                                          timeout=(30, 60))
        if BoolResult:
            self.logger.info("down_url7 图片地址下载成功 %s" % proxys)
            return True, r
        else:
            self.logger.info("down_url7 图片地址下载失败 %s" % proxys)
            self.logger.info(self.postdata7)
            return False, r

    def down_image_cover(self, requestsid, sn, proxys):
        headers={
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "Host": self.domain,
            "Pragma": "no-cache",
            "Upgrade-Insecure-Requests": "1",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36"
        }
        BoolResult, errString, r = facade.BaseRequest(self.imageurl,
                                                      sn=sn,
                                                      headers=headers,
                                                      endstring="",
                                                      proxies=proxys,
                                                      mark="",
                                                      allow_redirects=True,
                                                      timeout=(30, 60))
        if BoolResult:
            self.logger.info("下载图片成功")
            if platform.system() == "Windows":
                path = BaseDir.get_new_path(coverPath, MDateTimeUtils.get_today_date_strings())
                if not BaseDir.is_dir_exists(path):
                    BaseDir.create_dir(path)
                BaseDir.single_write_wb_file(r.content,
                                             BaseDir.get_new_path(path,
                                                                  requestsid + ".jpg"))
            else:
                path = BaseDir.get_new_path("/var/cnipa", MDateTimeUtils.get_today_date_strings())
                if not BaseDir.is_dir_exists(path):
                    BaseDir.create_dir(path)
                BaseDir.single_write_wb_file(r.content,
                                             BaseDir.get_new_path(path,
                                                                  requestsid + ".jpg"))
            return True
        else:
            self.logger.info("下载图片失败")
            return False

    def get_content_length(self, data):
        """
        Content-Length 长度计算
        :param data:
        :return:
        """
        length = len(data.keys()) * 2 - 1
        total = ''.join(list(data.keys()) + list(data.values()))
        length += len(total)
        return length

    # def selectdb(self):
    #     """
    #     从mongodb获取需要下载的数据
    #     :return:
    #     """
    #     return self.db.pageidjson.find({"stat": 0}).limit(100)

    def set_dates_417(self, id, app_no):
        self.dates417 = {
            "viewQC.viewLiteraQCList[0].srcCnName": "检索式:申请号=({}+)".format(app_no),
            "viewQC.viewLiteraQCList[0].srcEnName": "SearchStatement:申请号=({}+))".format(app_no),
            "viewQC.viewLiteraQCList[0].searchStrategy": "",
            "viewQC.viewLiteraQCList[0].searchCondition.executableSearchExp": "VDB:(ID='{}')".format(id),
            "viewQC.viewLiteraQCList[0].searchCondition.sortFields": "-APD,+PD",
            "viewQC.needSearch": "true",
            "viewQC.searchKeywords[0]": "[C][ ]{0,}[N][ ]{0,}[0][ ]{0,}[0][ ]{0,}[1][ ]{0,}[0][ ]{0,}[0][ ]{0,}[0][ ]{0,}[1][ ]{0,}[1][ ]{0,}[ ]{0,}",
            "viewQC.type": "SEARCH",
            "wee.bizlog.modulelevel": "0200604"
        }
        Strings = ""
        for i in list(app_no):
            Strings = Strings + "[%s][ ]{0,}" % i
        Strings = Strings + "[ ]{0,}"
        self.dates417["viewQC.searchKeywords[0]"] = Strings

    def set_post5(self, fn, row):

        self.postdata5["literaInfo.fn"] = fn
        self.postdata5["literaInfo.nrdAn"] = row["app_no"]
        self.postdata5["literaInfo.nrdPn"] = row["pub_no"]

    def set_post2(self, row):
        self.postdata2["nrdAn"] = row["app_no"]
        self.postdata2["cid"] = row["rawid"]
        self.postdata2["sid"] = row["rawid"]

    def set_post7(self, figureRid):
        self.postdata7["figureUrl"] = figureRid
        self.postdata7["rids[0]"] = figureRid

    def set_post3(self, row):
        self.postdata3["nrdAn"] = row["app_no"]
        self.postdata3["cid"] = row["rawid"]
        self.postdata3["sid"] = row["rawid"]

    def insertabs(self, dicts):
        try:
            self.db.absfull.insert(dicts)
        except DuplicateKeyError as e:
            warnings.warn(str(e))

    def insertpatent(self, dicts):
        try:
            self.db.patentinfo.insert(dicts)
        except DuplicateKeyError as e:
            warnings.warn(str(e))

    def deal_patentinfo(self, dicts):
        jsondicts = json.loads(dicts["patentinfo"])
        # 同族
        cognation_count = jsondicts["cognation_count"]
        # 引证
        patcit_count = jsondicts["patcit_count"]
        # 法律
        totalCount = jsondicts["lawStatePagination"]["totalCount"]
        app_no = jsondicts["literaInfo"]["nrdAn"]
        pub_no = jsondicts["literaInfo"]["nrdPn"]
        stat = 1
        if int(patcit_count) > 5 or int(totalCount) > 5 or int(dicts["cpnum"]) > 0:
            stat = 0
        dicts["ccount"] = int(cognation_count)
        dicts["pcount"] = int(patcit_count)
        dicts["tcount"] = int(totalCount)
        dicts["app_no"] = app_no
        dicts["pub_no"] = pub_no
        dicts["lawmsg"] = ""
        dicts["cmsg"] = ""
        dicts["pmsg"] = ""
        dicts["cpmsg"] = ""
        dicts["stat"] = stat
        return dicts

    def is_login(self, threadval):
        id = threadval.rows["_id"]
        boolresult, msg = self.is_login_and_ip(threadval.sn, threadval.proxys, threadval.cookies)
        if boolresult:
            # 获取下一个
            return True
        else:
            self.logger.info(msg)
            if msg != "登陆失败":
                # todo 代理失效 由登录程序检查失效性
                self.db.cookies.update({"_id": id},
                                       {"$set": {"stat": -1, "updatetime": MDateTimeUtils.get_beijin_date_strins()}})
                return False
            else:
                if threadval.count == 0:
                    sql = "update `user` set stat=-2 where username='{}'".format(threadval.user)
                    self.result_queue.put(sql)
                # todo cookie 失效
                self.logger.info("将cookies设置为-2 因为检查cookie登录cookie失效ip没失效")
                self.db.cookies.update({"_id": id},
                                       {"$set": {"stat": -2, "updatetime": MDateTimeUtils.get_beijin_date_strins()}})
                return False

    def thread_func(self, threadval, row):
        resultbool, num, msg = self.oneces(row, threadval)
        self.logger.info(msg)
        if resultbool:
            if num == 10:
                self.result_queue.put(msg)
            return True
        else:
            if num in (11, 12):
                return False
            if num == 1:
                results = self.down_search_page(threadval.sn, threadval.proxys)
                return self.is_login(threadval)
                """
                if not results:
                    # todo 检查登录是否有效
                    return self.is_login(threadval)
                else:
                    # 获取下一个
                    return True
                """
            elif num in (2, 4, 5, 6, 7, 8, 9):
                # todo 检查是否登录
                return self.is_login(threadval)
            elif num == 3:
                # 有可能遇到这种情况 进入详情页什么都没有这种情况状态写-1
                self.logger.info("没有解析出fn,有可能出现这种情况，状态写为-1")
                sql = "update article set stat=-1 where rawid='{}'".format(row["rawid"])
                self.result_queue.put(sql)
                return True
            else:
                raise Exception("没有判断的情况")

    def oneces(self, row, threadval):
        # 查看摘要是否下载
        resultrow = self.db.absfull.find_one({"requestsid": row["rawid"]})
        # 如果结果表中存在就不需要下载
        if resultrow:
            self.logger.info("存在 不需要重复下载")
            sql = "update article set stat=1 where rawid='{}'".format(row["rawid"])
            return True, 10, sql
        # time.sleep(30)
        # sleeptime = int(time.time()) - threadval.timesleep
        # if sleeptime < 5:
        #     time.sleep(5-sleeptime)
        # threadval.timesleep = int(time.time())
        time.sleep(5)
        dicts = {}
        dicts1 = {}
        print(row["rawid"])
        dicts["requestsid"] = row["rawid"]
        dicts1["requestsid"] = row["rawid"]
        # 设置解决417问题的数据
        self.set_dates_417(row["rawid"], row["app_no"])
        # 下载以解决417错误问题
        results, r = self.search_detail(threadval.sn, threadval.proxys)
        if not results:
            if r is not None and r.status_code == 404:
                return False, 1, "遭遇404错误，请搜索"
            else:
                return False, 2, "417下载错误，请检查登录是否成功"
        # 为下载周边信息做准备
        boolresult, r, is_break = self.down_url4(threadval.sn, threadval.proxys, threadval)
        if is_break == "break":
            return False, 11, "下载周边信息做准备失败,请检查登录情况"
        if is_break == "12":
            return False, 12, "出现访问问题，可能ip被封"
        if not boolresult:
            return False, 9, "下载周边信息做准备失败,请检查登录情况"

        dicts["showviewlist"] = r.text
        try:
            fn = json.loads(r.text)["viewLiteraDTOList"][0]["literaInfo"]["fn"]
        except:
            # 特殊情况 CN00325821 无详情页信息
            return False, 3, "下载出错，请检查是否登录"
        self.logger.info("fn 的值为:" + fn)
        # 设置post参数
        self.set_post5(fn, row)
        # 下载周边
        boolresult, r = self.down_url5(threadval.sn, threadval.proxys)
        if not boolresult:
            return False, 4, "下载法律状态等周边信息失败"
        dicts1["patentinfo"] = r.text

        # 设置参数
        self.set_post2(row)
        # 下载abs
        boolresult, r = self.down_url2(threadval.sn, threadval.proxys)
        if not boolresult:
            if r.status_code == 417 and r.text.find("系统错误，请联系管理员") > -1:
                # 存在这种情况 app_no CN00338791
                dicts["abs"] = "-1"
            else:
                return False, 5, "下载摘要信息失败"
        else:
            dicts["abs"] = r.text
            figureRid = json.loads(r.text)["abstractInfoDTO"]["figureRid"]

            if not figureRid or figureRid == "" or figureRid == "null" or figureRid == "None":
                self.logger.info("没有图片 不下载")
                dicts["imageurl"] = ""
            else:
                # 设置参数
                self.set_post7(figureRid)
                # 下载abs
                boolresult, r = self.down_url7(threadval.sn, threadval.proxys)
                if not boolresult:
                    return False, 6, "下载图片地址失败"
                url = json.loads(r.text)["figureUrls"][0]
                self.imageurl = self.Baseurl+"/pubsearch" + url
                self.logger.info("***image url is: %s" % self.imageurl)
                # 下载图片
                boolresult = self.down_image_cover(row["rawid"], threadval.sn, threadval.proxys)
                if not boolresult:
                    return False, 7, "下载图片失败"
                dicts["imageurl"] = self.imageurl
        # 设置参数
        self.set_post3(row)
        # 下载全文
        boolresult, r = self.down_url3(threadval.sn, threadval.proxys)
        # time.sleep(2)
        if not boolresult:
            self.logger.info("下载全文失败")
            printr(r)
            if r is not None:
                printr(r.status_code)
                if r.status_code == 417:
                    print("经过实验 发现确实存在该情况 比如 公开号 US8855085B2 US2015078434A1")
                    dicts["fulltxt"] = "-1"
                else:
                    return False, 8, "下载全文失败"
            else:
                return False, 8, "下载全文失败"
        else:
            dicts["fulltxt"] = r.text
        now = getDateTime()
        dicts["downdate"] = now

        self.logger.info("开始保存数据")
        self.insertabs(dicts)
        dicts1["downdate"] = now
        dicts1["cpnum"] = row["cpnum"]
        dicts1 = self.deal_patentinfo(dicts1)
        self.insertpatent(dicts1)
        self.logger.info("开始更新状态")
        sql = "update article set stat=1 where rawid='{}'".format(row["rawid"])
        threadval.count += 1
        sql1 = "insert into downcount (`process`,`count`,`proxys`,`user`,`cookiesid`) values ('%s',%s,'%s','%s','%s');"
        sql1 = sql1 % (
            str(os.getpid()), threadval.count, threadval.rows["proxy"], threadval.rows["user"],
            str(threadval.rows["_id"]))
        self.result_queue.put(sql1)
        return True, 10, sql

    def set_result_queue(self, queue):
        self.result_queue = queue


def printr(r):
    try:
        print(r.text)
    except:
        pass


"""
**********************多进程分布式代码**************************
"""
import time
from multiprocessing.managers import BaseManager


# 创建类似的QueueManager:


class QueueManager(BaseManager):
    pass


class NodeTask(object):
    def __init__(self, server_addr, port):
        self.register()
        # 连接到服务器，也就是运行task_master.py的机器:
        self.server_addr = server_addr  # '192.168.30.123'
        print('Connect to server %s...' % self.server_addr)
        # 端口和验证码注意保持与task_master.py设置的完全一致:
        self.m = QueueManager(address=(self.server_addr, port), authkey=b'abc')  # 5002
        self.task = None
        self.result = None

    def register(self):
        # 由于这个QueueManager只从网络上获取Queue，所以注册时只提供名字:
        QueueManager.register('get_task_queue')
        QueueManager.register('get_result_queue')

    def conn(self):
        # 从网络连接:
        self.m.connect()

    def set_task_result_obj(self):
        # 获取Queue的对象:
        self.task = self.m.get_task_queue()
        self.result = self.m.get_result_queue()


class DetailDown(MThreadingRun):
    def __init__(self):
        sysstr = platform.system()
        if sysstr == "Windows":
            self.ip = GetLocalIPByPrefix("192.168.")
        else:
            self.ip = get_local_ip("enp2s0")
        # self.ip = "192.168.30.179"
        self.node = NodeTask(self.ip, 5004)
        self.node.conn()
        self.node.set_task_result_obj()
        self.nodecookie = NodeTask(self.ip, 5002)
        self.nodecookie.conn()
        self.nodecookie.set_task_result_obj()
        self.down = DownDetail()
        super().__init__(0, self.down.filelogger)

        self.down.set_result_queue(self.thread_pool.result_queue)
        self.down.set_localVal(self.thread_pool.localVal)
        self.rows = None

    def getTask(self, *args, **kwargs):
        pass

    def setTask(self, results=None, *args, **kwargs):
        for i in range(1, 100):
            row = self.node.task.get()
            self.add_job(self.func, row)

    def dealresult(self, *args, **kwargs):
        for sql in self.results:
            self.down.mysqlutils.ExeSqlToDB(sql)

    def check_is_need_cookie(self):
        """
        检查是否需要cookies
        """
        for threadname in self.thread_pool.thread_pool_dicts:
            thread = self.thread_pool.thread_pool_dicts[threadname]["thread"]
            if thread.threadval.is_sleep:
                return True
        if self.thread_pool.get_thread_num() < self.thread_pool.max_workers:
            return True
        return False

    def setProxy(self, proxysList=None):
        self.thread_pool.add_thread(1)
        time.sleep(30)

    def thread_pool_hook(self, thread_pool_dicts, thread, *args, **kwargs):
        if thread_pool_dicts is self.thread_pool.thread_pool_dicts:
            self.init_threadval(thread)
        return {}

    def fun(self, threadval, *args, **kwargs):
        self.logger.info("获取一个需要请求的数据")
        # 每个cookie 下载10个任务后将该标志设为True
        if threadval.is_sleep:
            self.logger.info("cookie 到达上限 等待新的cookie")
            # boolresult, msg = self.down.set_cookie_from_db(updatetime="updatetime",
            #                                                source="mimvp",
            #                                                is_time=True,
            #                                                timesleep=150)
            self.logger.info("开始调用get（） 有可能柱塞")
            msg = self.nodecookie.task.get()
            self.logger.info("开始调用get完毕")
            self.logger.info(msg)
            bytess = msg["cookie"]
            if not self.down.set_proxys(msg["proxy"], msg["source"]):
                self.down.db.cookies.update({"_id": msg["_id"], "source": msg["source"]},
                                            {"$set": {"stat": 0,
                                                      "updatetime": MDateTimeUtils.get_beijin_date_strins()}})
                return
            sql = "insert into downcount (`process`,`count`,`proxys`,`user`,`cookiesid`) values ('%s',%s,'%s','%s','%s');"
            sql = sql % (str(os.getpid()), 0, msg["proxy"], msg["user"], str(msg["_id"]))
            self.down.result_queue.put(sql)
            self.down.localVal.sn = requests.session()
            self.down.localVal.cookies = pickle.loads(bytess)
            self.down.localVal.sn.cookies.update(self.down.localVal.cookies)
            self.logger.info("开始设置新的cookies")
            # 线程内部允许删除 表示程序运行到可以删除的位置
            threadval.sn = requests.session()
            threadval.proxys = self.down.localVal.proxys
            threadval.cookies = self.down.localVal.cookies
            threadval.user = msg["user"]
            threadval.cookies_id = msg["_id"]
            threadval.rows = msg
            threadval.timesleep = int(time.time())
            threadval.sn.cookies.update(threadval.cookies)
            self.down.set_ipaddr(threadval.proxys)
            threadval.count = 0
            # 遭遇ajax状态
            threadval.countajax = 0
            threadval.is_sleep = False
        # 不能被删除
        threadval.thread_delete = False
        self.logger.info("当前cookie已获取{},共500个".format(threadval.count))
        if threadval and threadval.cookies and threadval.proxys:
            if args and args[0]:
                def strings_insert(str_i):
                    list_i = list(str_i)  # str -> list
                    list_i.insert(4, '.')  # 注意不用重新赋值
                    list_i.insert(7, '.')
                    str_i = ''.join(list_i)
                    return str_i

                row = args[0]
                rows = {"rawid": row[0],
                        "app_no": row[1],
                        "pub_no": row[2],
                        "app_date": strings_insert(row[3]),
                        "pub_date": strings_insert(row[4]),
                        "cpnum": row[5]}
                if threadval.count > 500:
                    self.logger.info("判断到count 大于指定数量")
                    threadval.sn = None
                    threadval.is_sleep = True
                    threadval.count = 0
                    threadval.timesleep = int(time.time())
                    threadval.countajax = 0
                    return
                if threadval.sn:
                    if not self.down.thread_func(threadval, rows):
                        # 更换cookie
                        threadval.sn = None
                        threadval.is_sleep = True
                        threadval.proxys = None
                    # else:
                    #     self.down.db.cookies.update({"_id": threadval.rows["_id"]},
                    #                                 {"$set": {"updatetime":MDateTimeUtils.get_beijin_date_strins()}})

                else:
                    threadval.sn = requests.session()
                    threadval.sn.cookies.update(threadval.cookies)
                    self.down.set_ipaddr(threadval.proxys)
                    if not self.down.thread_func(threadval, rows):
                        # 更换cookie
                        threadval.sn = None
                        threadval.is_sleep = True
                        threadval.proxys = None

                self.down.db.cookies.update({"_id": threadval.rows["_id"]},
                                            {"$set": {"usestat": 1,
                                                      "updatetime": MDateTimeUtils.get_beijin_date_strins()}})

    def is_break(self):
        return False

    def init_threadval(self, thread):
        """
        该函数用于初始化线程管理器的threadval对象
        :return:
        """
        thread.threadval.is_sleep = True


if __name__ == "__main__":
    detail = DetailDown()
    detail.thread_pool.set_work_queue(3)
    detail.thread_pool.set_is_static_max(False)
    detail.thread_pool.set_max_workers(1)
    detail.thread_pool.set_is_static_max(True)
    detail.run(model=2)
