"""
书生之家-内蒙古工业大学
url: http://202.207.22.16:9988/index.action
代理: 192.168.30.176:8062
"""
import time

import facade
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "shusheng_imut", "download", "listhtml")
BaseDir.create_dir(sPath)


class ShuShengDownload(object):

    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.proxy = {"http": "http://192.168.30.176:8062",
                      "https": "http://192.168.30.176:8062"}
        self.BASE_URL = 'http://202.207.22.16:9988'
        self.header = {
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
            "Cache-Control": "no-cache",
            "Host": "202.207.22.16:9988",
            "Origin": "http://202.207.22.16:9988",
            "Content-Type": "application/x-www-form-urlencoded",
            "Pragma": "no-cache",
            "Referer": "http://202.207.22.16:9988/book_list.action?zhongtuQuery=false",
            # Proxy-Connection: keep-alive
            "Upgrade-Insecure-Requests": "1",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/78.0.3904.87 Safari/537.36"
        }

    def down_list2(self, result_queue, page):
        url = self.BASE_URL + "/book_list.action?zhongtuQuery=false&kindId=&secondQuery=false"
        postdata = {"pageNo": str(page)}
        feature = '<td align="CENTER" valign="TOP">'  # 网页特征码
        page_path = BaseDir.get_new_path(sPath, "{}.html".format(page))
        if BaseDir.is_file_exists(page_path):
            print("文件存在->{}".format(page_path))
            return
        BoolResult, errString, r = facade.BaseRequestPost(url,
                                                          data=postdata,
                                                          headers=self.header,
                                                          mark=feature,
                                                          endstring="",
                                                          proxies=self.proxy,
                                                          timeout=(30, 60))
        if BoolResult:
            BaseDir.single_write_file(r.content.decode("GB18030"), page_path, encoding="GB18030")
            self.logger.info("写入文件成功,第{}页".format(page))
            time.sleep(4)
        else:
            self.logger.info("请求错误")
            return


class DownListThreadRun(MThreadingRun):
    def __init__(self, num):
        super(DownListThreadRun, self).__init__(num)
        self.down = ShuShengDownload()

    def getTask(self, *args, **kwargs):
        pass

    def setTask(self, results=None, *args, **kwargs):
        for page in range(1, 21277 + 1):
            # 将每一页加入任务队列
            self.add_job(self.func, page)
        return "break"

    def dealresult(self, *args, **kwargs):
        pass

    def setProxy(self, proxysList=None):
        time.sleep(100)

    def is_break(self):
        return True

    def thread_pool_hook(self, thread_pool_dicts, thread, args, kwargs):
        return {}

    def fun(self, threadval, *args, **kwargs):
        self.down.down_list2(threadval, args[0])


if __name__ == "__main__":
    # 不能太快 如果出现连续请求错误 就关一分钟再请求
    down = DownListThreadRun(2)
    down.run()
