import os
import re
import time

import facade
from bs4 import BeautifulSoup
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "cdxg_ams", "download", "volume")
ListSqls = []
nCount = 0
configfile = BaseDir.get_new_path(curPath, "db.ini")
logger = facade.get_streamlogger()
mysqlutils = facade.MysqlUtiles(configfile, "db", logger)




# 插入数据库
def InsertIntoDbFromList():
    global nCount, ListSqls
    mysqlutils.ExeSqlListToDB(ListSqls)
    ListSqls = list()


def GetFileName(filePath):
    global ListSqls
    filepathlist = filePath.split(os.sep)
    filename = filepathlist.pop()
    filename = filename.split(os.extsep)[0]
    soup = BeautifulSoup(open(filePath, 'r', encoding='utf-8'), "lxml")
    article_tag = soup.find('article', class_="decade-list volume-list")
    div_tag = article_tag.find_all("div", class_="slider")
    for div in div_tag:
        a = div.find("a", class_="expander")
        a_string = "".join(a.stripped_strings)
        tupledata = re.search("Volume (\d+) \((\d{4})\)", a_string).groups()
        volume = tupledata[0]
        year = tupledata[1]
        div_row_tag = div.find_all("div", class_="row")
        num = len(div_row_tag)
        for row in div_row_tag:
            href = row.a['href']
            list_string = row.stripped_strings
            list_string_list = []
            for string in list_string:
                list_string_list.append(string)
            title = ""
            issue = ""
            times = ""
            page = ""
            if filename == "amsm":
                title = list_string_list[1]
            else:
                issue = list_string_list[1]
                times = list_string_list[2].strip()
                times = time.strftime("%Y%m%d", time.strptime(times, '%B %Y'))
                page = list_string_list[3]

            sql = "replace INTO ams_volume(`name`, `volume`,`year`,`url`,`title`,`issue`, `time`, `page`,`num`) VALUES('{name}', '{volume}','{year}','{url}','{title}','{issue}', '{time}', '{page}','{num}') ".format(
                name=filename, volume=volume, year=year, url=href, title=title, issue=issue, time=times, page=page,
                num=num)
            print(sql)
            ListSqls.append(sql)
        if len(ListSqls) >= 100:
            InsertIntoDbFromList()


def main():
    filenum = 0
    for filePath in BaseDir.get_dir_all_files(sPath):
        print(filePath)
        # AnalysisFile(filePath,fileName)
        GetFileName(filePath)
        filenum += 1
        print("完成解析文件数量{}".format(filenum))
    InsertIntoDbFromList()


if __name__ == "__main__":
    main()
