import logging
import os
import re
import sys
import time

import facade
from bs4 import BeautifulSoup
# 禁用安全请求警告
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "cell", "download", "coverhome")

absurl = []
BaseUrls = "https://www.cell.com"
HEADERS = {'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8',
           'Accept-Encoding': 'gzip, deflate',
           'Connection': 'keep-alive',
           'Accept-Language': 'zh-CN,zh;q=0.9',
           'Cache-Control': 'max-age=0',
           'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/66.0.3359.139 Safari/537.36', }

nCount = 0
ListSqls = []
urlcover = "https://www.cell.com/pb/widgets/cover-issue-selector/loiViewCoverIssues?journalCode={}&issueDisplayFilter=all-issues&format=&volume={}&filter=false"

mysqlutils = facade.MysqlUtiles(BaseDir.get_new_path(curPath, "db.ini"), "db", facade.get_streamlogger())


def SelectListFromDB():
    global ListSqls
    # 数据库连接
    sSql = "SELECT `name`,`url` FROM `journal` WHERE `stat` != 2 order by `cover_url` asc"
    rows = mysqlutils.SelectFromDB(sSql)
    return rows


# 测试文字输出到文本
def output(value, files='log.txt'):
    f = open(files, 'w', encoding='utf-8')
    f.write(value)
    f.close()


def save_file(outfile, r):
    if os.path.exists(outfile):
        return
    with open(outfile, mode='w', encoding='GB18030') as f:
        f.write(r.content.decode("GB18030"))


def souphtml(r):
    soup = BeautifulSoup(r.text, 'lxml')
    journalCode = soup.find("input", id="gptSite")["value"]
    vol_tag = soup.find("li", class_="group volume-header")
    if vol_tag:
        voltext = "".join(vol_tag.stripped_strings)
        tupledata = re.search("Volume (\d+) \((\d{4})\)", voltext).groups()
        volume = tupledata[0]
        return journalCode, volume
    else:
        return False, False


def souphtmlcover1(r):
    soup = BeautifulSoup(r.text, 'lxml')
    div_tag = soup.find("div", class_="coverImageCont coverCaption")
    if div_tag:
        try:
            imagurl = div_tag.img["src"]
            return True, imagurl
        except:
            return False, ""
    else:
        return False, ""


def souphtmlcover(r):
    soup = BeautifulSoup(r.text, 'lxml')
    div_tag = soup.find("div", class_="coverImage")
    try:
        imgurl = div_tag.find("img")["src"]
    except:
        return ""
    return imgurl


def get_list_url(rows):
    global absurl, nCount
    for name, url in rows:
        urlsql = url
        BoolResult, errString, r = facade.BaseRequest(BaseUrls + url,
                                                      headers=HEADERS,
                                                      endstring="</html>",
                                                      timeout=(30, 60))
        if not BoolResult:
            continue
        ResultBool, img_url = souphtmlcover1(r)
        if ResultBool:
            sql = "update `journal` set `cover_url`='{}' where `url`='{}'".format(img_url, urlsql)
            mysqlutils.ExeSqlToDB(sql)
            continue
        list_url = url.split("/")
        list_url.pop()
        list_url.append("archive")
        url = "/".join(list_url)
        url = BaseUrls + url
        BoolResult, errString, r = facade.BaseRequest(url,
                                                      headers=HEADERS,
                                                      endstring="</html>",
                                                      timeout=(30, 60))
        if BoolResult:
            if r.status_code == 404:
                continue
        journalCode, vol = souphtml(r)
        if journalCode and vol:
            url = urlcover.format(journalCode, vol)
            BoolResult, errString, r = facade.BaseRequest(url,
                                                          headers=HEADERS,
                                                          endstring="",
                                                          timeout=(30, 60))
            if BoolResult:
                img_url = souphtmlcover(r)
                sql = "update `journal` set `cover_url`='{}' where `url`='{}'".format(img_url, urlsql)
                mysqlutils.ExeSqlToDB(sql)
                time.sleep(5)
        else:
            print("no journalCode or no vol")


def main(logger1: logging.Logger = None):
    global logger
    logger = logger1
    rows = SelectListFromDB()
    get_list_url(rows)


if __name__ == "__main__":
    main()
