import facade
import math
import parsel
import requests
from xjlibrary.our_file_dir import BaseDir
from xjlibrary.tools.BaseUrl import BaseUrl

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "sublibbook", "download", "pages")
BaseDir.create_dir(sPath)


class DownBookList(object):
    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.Baseurl = "http://202.115.72.61:9099/"
        self.proxy = {
            "http": "192.168.30.176:8131",
            "https": "192.168.30.176:8131",
        }
        self.sn = requests.Session()
        self.headers = {
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
            "Cache-Control": "no-cache",
            "Host": "202.115.72.61:9099",
            "Referer": "http://202.115.72.61:9099/",
            "Upgrade-Insecure-Requests": "1",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36"
        }

        self.mysqlutils = facade.MysqlUtiles(BaseDir.get_new_path(curPath, "db.ini"), "db", self.logger)

    def select(self):
        sql = "select url,`subject`,allpage from sublist where stat=0"
        rows = self.mysqlutils.SelectFromDB(sql)
        if rows:
            for row in rows:
                self.down_pages(row[0], row[2])
                sql = "update sublist set stat=1 where url = '{}'".format(row[0])
                self.mysqlutils.ExeSqlToDB(sql)

    def home(self):
        """
        先访问首页
        :return:
        """
        BoolResult, errString, r = facade.BaseRequest(self.Baseurl,
                                                      sn=self.sn,
                                                      proxies=self.proxy,
                                                      headers=self.headers,
                                                      timeout=(30, 60))
        if BoolResult:
            return True
        else:
            self.logger.info("首页访问错误")
            return False

    def down_one_page(self, url):
        BoolResult, errString, r = facade.BaseRequest(url,
                                                      sn=self.sn,
                                                      mark="searListCon clearfix",
                                                      proxies=self.proxy,
                                                      headers=self.headers,
                                                      timeout=(30, 60))
        if BoolResult:
            return r.text
        else:
            return False

    def down_pages(self, url, allpages):
        sublibID = BaseUrl.urlQuery2Dict(url)["sublibID"]
        pagespath = BaseDir.get_new_path(sPath, sublibID + "_1.html")
        if BaseDir.is_file_exists(pagespath) and allpages:
            self.logger.info("文件存在：" + pagespath)
        else:
            html = self.down_one_page(url)
            if html is not False:
                BaseDir.single_write_file(html, pagespath)
                select = parsel.Selector(text=html)
                allnum = select.xpath('//*[@id="pageEx"]/span/text()').get()
                self.logger.info(allnum)
                if allnum:
                    allnum = int(allnum.replace("共", "").replace("条结果", "").strip())
                    allpages = math.ceil(allnum / 20)
                    sql = "update sublist set allpage='{}' where url = '{}'".format(allpages, url)
                    self.mysqlutils.ExeSqlToDB(sql)
                else:
                    sql = "update sublist set allpage='1' where url = '{}'".format(url)
                    self.mysqlutils.ExeSqlToDB(sql)
                    return True
            else:
                return False
        for i in range(2, int(allpages) + 1):
            pagespath = BaseDir.get_new_path(sPath, sublibID + "_" + str(i) + ".html")
            if BaseDir.is_file_exists(pagespath):
                self.logger.info("文件存在：" + pagespath)
                continue
            pageurl = url + "&viewType=imgView&sortType=Default&abolish=&indexInfor=&PageIndex={}".format(
                str(i))
            html = self.down_one_page(pageurl)
            if html is not False:
                BaseDir.single_write_file(html, pagespath)


if __name__ == "__main__":
    down = DownBookList()
    if down.home():
        down.select()
