import json

import facade
from bs4 import BeautifulSoup
from xjlibrary.database_moudel.simple.sqlite3cloent import sqliteEscape
from xjlibrary.our_file_dir.base_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "volume")  # volume
ListSql = []


configfile = BaseDir.get_new_path(curPath, "db.ini")
mysqlutils = facade.MysqlUtiles(configfile, "db")




def InsertSql():
    global nCount, ListSql
    mysqlutils.ExeSqlListToDB(ListSql)
    ListSql = list()


def parahtml(filePath):
    global ListSql
    print(filePath)
    filename = BaseDir.get_filename_not_extsep(filePath)
    vurl = filename.replace("_", "/")
    vurllist = vurl.split("/")
    vurllist[1] = "journal"
    vurllist[3] = "decade"
    volurl = "/".join(vurllist)

    with open(filePath, 'r', encoding='utf-8') as f:
        html = f.read()
        html = json.loads(html)
    soup = BeautifulSoup(html[0], "lxml")
    # print(soup)
    a_tag = soup.find_all("a")
    num = len(a_tag)
    li_all_tag = soup.find_all("li", class_="langMatch")
    for li_tag in li_all_tag:
        span_tag = li_tag.span
        voltext = "".join(span_tag.stripped_strings)
        li_child_tag = li_tag.ul.find_all("li")
        for li in li_child_tag:
            href = li.a["href"]
            text = "".join(li.a.stripped_strings)
            print(href)
            sql = "replace INTO `issue` (`url`,`volume`,`issue`,`volurl`,`num`) VALUES('{url}','{volume}','{issue}','{volurl}','{num}')"
            sql = sql.format(url=href, volume=sqliteEscape(voltext), issue=sqliteEscape(text), volurl=volurl, num=num)
            ListSql.append(sql)
        if len(ListSql) >= 100:
            InsertSql()
    InsertSql()


def main():
    for filepath in BaseDir.get_dir_all_files(sPath):
        parahtml(filepath)


if __name__ == "__main__":
    main()
