import sys

import facade
import requests
from parsel import Selector
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")


class DownVolIssue(object):
    def __init__(self):
        self.url = ""
        self.header = {
            "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "accept-encoding": "gzip, deflate, br",
            "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
            "cache-control": "no-cache",
            "pragma": "no-cache",
            "referer": "https://epubs.siam.org/journals",
            "upgrade-insecure-requests": "1",
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36"
        }
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)
        self.sn = requests.session()
        self.proxy = {
            "http": "192.168.30.176:8012",
            "https": "192.168.30.176:8012"
        }

    def set_url(self, url):
        self.url = url

    def down_volissue(self, journal):
        BoolResult, errString, r = facade.BaseRequest(self.url,
                                                      sn=self.sn,
                                                      proxies=self.proxy,
                                                      mark="no-top-border",
                                                      headers=self.header,
                                                      timeout=(30, 60),
                                                      verify=False)
        if BoolResult:
            self.para_home_page(r, journal)
        else:
            self.logger.info("下载出错 请检查")
            sys.exit(-1)

    def para_home_page(self, r, journalurl):
        BaseDir.single_write_file(r.text, "./test.html")
        selector = Selector(text=r.text)
        listul = selector.xpath('//div[@id="allIssuesPanel"]/div/div[2]/div/ul/ul')
        for ul in listul:
            vol = ul.xpath('.//li[contains(@class, "loiListHeading")]/text()').get()
            urllist = ul.xpath(".//a/@href").getall()
            issuetext = ul.xpath(".//a/text()").getall()
            for url, issue in zip(urllist, issuetext):
                sql = "replace into `volissue` (`url`,`issue`,`vol`,`journalurl`) values('{}','{}','{}','{}')".format(
                    url, issue, vol, journalurl
                )
                self.mysqlutils.ExeSqlToDB(sql)

    def select_journal(self):
        sql = "select url from journal where stat=0"
        rows = self.mysqlutils.SelectFromDB(sql)
        return rows

    def update_stat(self, url):
        sql = "update journal set stat=1 where `url`='{}'".format(url)
        self.mysqlutils.ExeSqlToDB(sql)


def main():
    down = DownVolIssue()
    rows = down.select_journal()
    for row in rows:
        url = "https://epubs.siam.org/loi" + row[0].replace("/journal", "")
        down.set_url(url)
        down.down_volissue(row[0])
        down.update_stat(row[0])


if __name__ == "__main__":
    main()
