import shutil

import facade
from bs4 import BeautifulSoup
from xjlibrary.database_moudel.simple.sqlite3cloent import sqliteEscape
from xjlibrary.our_file_dir.base_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "issue")  # volume
sPath2 = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "issue2")
ListSql = []
BaseDir.create_dir(sPath2)

configfile = BaseDir.get_new_path(curPath, "db.ini")
mysqlutils = facade.MysqlUtiles(configfile, "db")


def InsertSql():
    global ListSql
    mysqlutils.ExeSqlListToDB(ListSql)
    ListSql.clear()


def parahtml(filePath):
    global ListSql
    print(filePath)
    filename = BaseDir.get_filename_not_extsep(filePath)
    iurl = filename.replace("_", "/")
    issueurl = "/stable" + iurl
    soup = BeautifulSoup(open(filePath, 'r', encoding='utf-8'), "lxml")
    # print(soup)
    ol_tag = soup.find("ol", class_="no-bullet")
    if not ol_tag:
        return
    a_all_tag = ol_tag.find_all("a", class_="tt-track small-heading inline", attrs={"data-qa": "content title"})
    num = len(a_all_tag)
    for a_tag in a_all_tag:
        href = a_tag["href"]
        title = "".join(a_tag.stripped_strings)
        sql = "INSERT IGNORE INTO `article` (`url`,`title`,`issueurl`,`num`) VALUES('{url}','{title}','{issueurl}','{num}')"
        sql = sql.format(url=href, title=sqliteEscape(title), issueurl=sqliteEscape(issueurl), num=num)
        ListSql.append(sql)
        if len(ListSql) >= 10000:
            InsertSql()
    InsertSql()


def main():
    for filepath in BaseDir.get_dir_all_files(sPath):
        parahtml(filepath)
        shutil.move(filepath, sPath2)


if __name__ == "__main__":
    main()
