"""
下载首页并查看有多少本期刊
"""
import sys

import facade
import requests
from parsel import Selector
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")


class DownHomePage(object):

    def __init__(self):
        self.url = "https://epubs.siam.org/page/locus"
        self.header = {
            "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
            "accept-encoding": "gzip, deflate",
            "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
            # "cache-control": "no-cache",
            # "pragma": "no-cache",
            "referer": "https://epubs.siam.org/page/locus",
            "sec-fetch-mode": "navigate",
            "sec-fetch-site": "none",
            "sec-fetch-user": "?1",
            "upgrade-insecure-requests": "1",
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/76.0.3809.100 Safari/537.36"
        }
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)
        self.sn = requests.Session()
        self.proxy = {
            "http": "192.168.30.176:8012",
            "https": "192.168.30.176:8012"
        }

    def downpage(self):
        BoolResult, errString, r = facade.BaseRequest(self.url,
                                                      proxies=self.proxy,
                                                      sn=self.sn,
                                                      mark="contentWithAd",
                                                      headers=self.header,
                                                      timeout=(30, 60),
                                                      verify=False)
        if BoolResult:
            self.para_home_page(r)
        else:
            self.logger.info("下载出错 请检查")
            sys.exit(-1)

    def para_home_page(self, r):
        BaseDir.single_write_file(r.text, "./test.html")
        # soup = BeautifulSoup(r.text, "lxml")
        # # soup = BeautifulSoup(open("test.html",encoding="utf-8"), "lxml")
        # li_tag = soup.find("li", class_="journals")
        # ul_tag = li_tag.find("ul",class_="submenu menu")
        # ul_tag
        selector = Selector(text=r.text)

        listurl = selector.xpath('//li[@class="journals"]/ul/li/a/@href').getall()
        listtext = selector.xpath('//li[@class="journals"]/ul/li/a/text()').getall()
        listsql = []
        for url, text in zip(listurl, listtext):
            if url == "/page/journalFAQ" or url == "/page/locus":
                continue
            listsql.append((url, text))
        sql = "replace into journal (`url`,`journalname`) values (%s,%s)"
        self.mysqlutils.ExeSqlMany(sql, listsql)

        listurl = selector.xpath('//div[@class="locusRow"]/div/div/a/@href').getall()
        listtext = selector.xpath('//div[@class="locusRow"]/div/div/a/text()').getall()
        listsql = []
        for url, text in zip(listurl, listtext):
            "http://epubs.siam.org/journal/toc/siread/38/4"
            url = url.replace("http://epubs.siam.org", "").replace("/toc", "")
            urllist = url.split("/")
            urllist = urllist[:-2]
            url = "/".join(urllist)
            listsql.append((url, text))
        sql = "replace into journal (`url`,`journalname`) values (%s,%s)"
        self.mysqlutils.ExeSqlMany(sql, listsql)


def main():
    down = DownHomePage()
    down.downpage()


if __name__ == "__main__":
    main()
