import facade
from bs4 import BeautifulSoup
from xjlibrary.our_file_dir import BaseDir

curpath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curpath, -2)
dirPath = BaseDir.get_new_path(TopPath, "download", "gzlg_botu", "download", "list")
configfile = BaseDir.get_new_path(curpath, "db.ini")


class UpArticle(object):
    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)

    def select(self):
        for file in BaseDir.get_dir_all_files(dirPath):
            # filename = BaseDir.get_filename_not_extsep(file)
            self.get_data(file)

    def get_data(self, filepath):
        sqlList = []
        htmlText = BaseDir.single_read_file(filepath)
        soup = BeautifulSoup(htmlText, 'lxml')
        div = soup.find('span', id='DataList1')
        try:
            all_a = div.find_all('a')
        except:
            return
        for ta in all_a:
            url = ta.get("href")
            name = ta.get_text().strip()
            name = self.mysqlutils.escape_string(name)
            rawid = url.split("=")
            rawid = rawid[1]
            # print(rawid)
            sql = "insert ignore into article (rawid,bookname) values (%s,%s)" % (str(rawid), name)
            sqlList.append(sql)
        self.mysqlutils.ExeSqlListToDB(sqlList)


if __name__ == "__main__":
    uparticle = UpArticle()
    uparticle.select()
