"""
下载下来的数据为当个文件存储 通过该程序将其转换为json后
到平台去解析
"""
import json
import sys

import facade
from xjlibrary.mdatetime.mtime2 import MDateTimeUtils
from xjlibrary.our_file_dir import BaseDir

curpath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curpath, -2)
configfile = BaseDir.get_new_path(curpath, "db.ini")
articlePath = BaseDir.get_new_path(TopPath, "download", "intlpress", "download", "article")
articlePath2 = BaseDir.get_new_path(TopPath, "download", "intlpress", "download", "article2")
BaseDir.create_dir(articlePath2)
articlePath3 = BaseDir.get_new_path(TopPath, "download", "intlpress", "download", "bigjson")
BaseDir.create_dir(articlePath3)
journalPath = BaseDir.get_new_path(TopPath, "download", "intlpress", "download", "journals")


class CreateJson(object):

    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger, cursorsnum=1)

    def select(self):
        """
        获取目录下的所有文件 并通过文件名获取对应的article表中的信息
        """
        for file in BaseDir.get_dir_all_files(articlePath):
            filename = BaseDir.get_filename_not_extsep(file)
            listname = filename.split("_")
            url = "site/pub/pages/journals/items/{}/content/vols/{}/{}/{}/index.php".format(listname[0], listname[1],
                                                                                             listname[2], listname[3])
            sql = "select * from article where url='{}'".format(url)
            rows = self.mysqlutils.SelectFromDB(sql)
            row = rows[0]
            jid = row["jid"]
            journalpath = BaseDir.get_new_path(journalPath, jid + ".html")
            journal = BaseDir.single_read_file(journalpath)
            dicts = {}
            dicts["downdate"] = MDateTimeUtils.get_today_date_strings()
            dicts["journalhtml"] = journal
            dicts["articlehtml"] = BaseDir.single_read_file(file)
            dicts["title"] = row["title"]
            dicts["pages"] = row["pages"]
            dicts["vol"] = row["vol"]
            dicts["year"] = row["year"]
            dicts["issue"] = row["issue"]
            dicts["jid"] = row["jid"]
            dicts["url"] = row["url"]
            dicts["rawid"] = row["url"] \
                .replace("site/pub/pages/journals/items/", "") \
                .replace("/content/vols/", "").replace("/index.php", "").replace("/", "").replace("\"", "")
            jsonstrings = json.dumps(dicts, ensure_ascii=False)
            big_path = BaseDir.get_new_path(articlePath3,"all.big_json")
            BaseDir.single_add_file(big_path, jsonstrings + "\n")
            BaseDir.copy_file_to_dir(file, articlePath2)


def main():
    create = CreateJson()
    create.select()


if __name__ == "__main__":
    main()
