import base64
import io
import os
import re
import sys
import traceback

import facade
from PIL import Image
from bs4 import BeautifulSoup
from xjlibrary.our_file_dir.base_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "journals")
coverPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "cover")
issncoverPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "issncover")
ListSql = []

configfile = BaseDir.get_new_path(curPath, "db.ini")
mysqlutils = facade.MysqlUtiles(configfile, "db")


# # 数据库链接
# def MajorDbConnect():
#     return MySqlDbConnect(curPath, "db.ini")


def InsertSql():
    global nCount, ListSql
    # conn = MajorDbConnect()
    # success, failed = ExeSqlList(ListSql, conn, errExit=True)
    mysqlutils.ExeSqlListToDB(ListSql)
    ListSql = list()


def Img2Jpg(buf, dstFile):
    print('Img2Jpg %s ...' % dstFile)

    exMsg = ''
    try:
        srcImg = Image.open(io.BytesIO(buf))
        dstImg = srcImg.resize((108, 150), Image.ANTIALIAS).convert('RGB')
        dstImg.save(dstFile, 'JPEG')
    except:
        exMsg = '* %s' % traceback.format_exc()
        print(exMsg)
    if exMsg:
        return False

    return True


def parahtml(filePath):
    global ListSql
    print(filePath)
    filename = BaseDir.get_filename_not_extsep(filePath)
    cover_name = filename.split("_")[-1].split(".")[0]
    jurl = filename.replace("_", "/")
    soup = BeautifulSoup(open(filePath, 'r', encoding='utf-8'), "lxml")

    issn_tag = soup.find("div", class_="issn mtm")
    if issn_tag:
        issn = "".join(soup.find("div", class_="issn mtm").stripped_strings)
        issn = issn.replace("ISSN:", "").strip()
    else:
        issn = ''
    eissn_tag = soup.find("div", class_="eissn mtm")
    if eissn_tag:
        eissn = "".join(soup.find("div", class_="eissn mtm").stripped_strings)
        eissn = eissn.replace("EISSN:", "").strip()
    else:
        eissn = ''

    try:
        strcover = soup.find("a", class_="cover-url").img["src"]
        convert_img_raw_data = base64.b64decode(strcover.split(",")[-1])
        # with open("example.png", "wb+") as f:
        #     f.write(convert_img_raw_data)
        BaseDir.create_dir(coverPath)
        BaseDir.create_dir(issncoverPath)
        outfile = os.path.join(coverPath, cover_name + ".jpg")
        if Img2Jpg(convert_img_raw_data, outfile):
            print("保存图片成功")
            if issn:
                BaseDir.copy_file_to_file(os.path.join(coverPath, cover_name + ".jpg"),
                                          os.path.join(issncoverPath, issn + ".jpg"))
            if eissn:
                BaseDir.copy_file_to_file(os.path.join(coverPath, cover_name + ".jpg"),
                                          os.path.join(issncoverPath, eissn + ".jpg"))
        else:
            print("保存文件失败,请检查")
    except:
        print(traceback.format_exc())

    dl_all_tag = soup.find_all("dt", attrs={"data-collection": True, "data-decade": re.compile("\d{4}"), "class": True,
                                            "data-filter": True})
    num = len(dl_all_tag)
    for dt in dl_all_tag:
        filter = dt["data-filter"]
        if filter:
            url = jurl + "/decade/" + filter
            sql = "replace INTO `volume`(`filter`,`url`,`num`,`journalurl`) values ('{filter}','{url}','{num}','{journalurl}')"
            sql = sql.format(filter=filter, url=url, num=num, journalurl=jurl)
            ListSql.append(sql)
            sql = "update `journal` set `issn`='{}',`eissn`='{}' where url='{}'".format(issn, eissn, jurl)
            ListSql.append(sql)
            if len(ListSql) >= 100:
                InsertSql()
        else:
            print("解析出现错误，请检查")
            sys.exit(-1)
    InsertSql()





def main():
    for filepath in BaseDir.get_dir_all_files(sPath):
        parahtml(filepath)


if __name__ == "__main__":
    main()
