import os

import facade
import requests
from parsel import Selector
from xjlibrary.our_file_dir import BaseDir, ImageFile

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "siamjournal", "download", "cover")
BaseDir.create_dir(sPath)


class DownHomePage(object):
    def __init__(self):
        self.url = "https://epubs.siam.org"
        self.header = {
            "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "accept-encoding": "gzip, deflate, br",
            "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
            "cache-control": "no-cache",
            "pragma": "no-cache",
            "referer": "https://epubs.siam.org/journals",
            "upgrade-insecure-requests": "1",
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36"
        }
        self.sn = requests.session()
        self.proxy = {
            "http": "192.168.30.176:8012",
            "https": "192.168.30.176:8012"
        }
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)

    def select_journal(self):
        sql = "select url from journal where coverstat=0"
        rows = self.mysqlutils.SelectFromDB(sql)
        return rows

    def down_cover_page(self, journalurl):
        journalurl1 = journalurl
        if journalurl.find("journal") == -1:
            journalurl1 = "/journal"+journalurl

        url = "{}{}".format(self.url, journalurl1)
        BoolResult, errString, r = facade.BaseRequest(url,
                                                      mark="view-inner",
                                                      sn=self.sn,
                                                      proxies=self.proxy,
                                                      headers=self.header,
                                                      timeout=(30, 60),
                                                      verify=False)
        if BoolResult:
            if self.para_cover_page(r):
                sql = "update journal set coverstat=1 where `url`='{}'".format(journalurl)
                self.mysqlutils.ExeSqlToDB(sql)

    def para_cover_page(self, r):
        BaseDir.single_write_file(r.text, "./test.html")
        selector = Selector(text=r.text)
        coverurl = selector.xpath('//div[@class="view"]/div[@class="view-inner"]/img/@src').get()
        boolresult = self.down_cover(coverurl)
        if boolresult:
            BaseDir.remove_file_suf(sPath, ".png")
            return True
        return False

    def down_cover(self, url):
        if not os.path.exists(sPath):
            os.makedirs(sPath)
        url = self.url + url

        BoolResult, errString, r = facade.BaseRequest(url,
                                                      endstring="",
                                                      sn=self.sn,
                                                      proxies=self.proxy,
                                                      headers=self.header,
                                                      timeout=(30, 60),
                                                      verify=False)
        if BoolResult:
            coverPath = BaseDir.get_new_path(sPath, url.split("/")[-1].lower())
            print(coverPath)
            ImageFile.save_img(r.content, coverPath, "PNG")
            ImageFile.get_screen(coverPath, coverPath, 22, 70, 110, 160)
            ImageFile.img_to_format_jpg(BaseDir.single_read_rb_file(coverPath), coverPath.replace("png", "jpg"))
            return True
        return False


def main():
    down = DownHomePage()
    rows = down.select_journal()
    for row in rows:
        down.down_cover_page(row[0])


if __name__ == "__main__":
    main()
