import json
import sys

import facade
import parsel
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")
topPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(topPath, "download", "adks_cqjd", "download", "home")


class ParaHome(object):
    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutiles = facade.MysqlUtiles(configfile, "db", logger=self.logger)

    def parahome(self):
        for file in BaseDir.get_dir_all_files(sPath):
            html = BaseDir.single_read_file(file)
            selector = parsel.Selector(text=html)
            dllist = selector.xpath('//*[@id="wrapper"]/div/div[2]/div[1]/div/dl')
            list_value = []
            for dltag in dllist:
                dictsonejson = {}
                imagesrc = dltag.xpath('./dt/a/img/@src').get()
                href = dltag.xpath('./dd/h3/a/@href').get()
                title = dltag.xpath('./dd/h3/a/text()').get()
                pinfotag = dltag.xpath('./dd/p[@class="info"]')
                listkeys = pinfotag.xpath('./@text()').getall()
                authorurl = pinfotag.xpath('./em[1]/a/@href').get()
                author = pinfotag.xpath('./em[1]/a/text()').get()
                subject = pinfotag.xpath('./em[2]/a/text()').get()
                times = pinfotag.xpath('./em[3]/text()').get()
                pub_time = pinfotag.xpath('./em[4]/text()').get()
                abstract = dltag.xpath('./dd/p[@class="desc"]/text()').getall()
                abstract = "".join(abstract).replace("\n", "").replace(" ", "")
                divtag = dltag.xpath('./dd/div/a')
                dictsarticle = {}
                for atag in divtag:
                    # 每一集的地址和标题
                    arhref = atag.xpath('./@href').get()
                    artitle = atag.xpath('./text()').get()
                    dictsarticle[arhref] = artitle

                dictsonejson = {
                    "imagesrc": imagesrc,
                    "href": href,
                    "title": title,
                    "authorurl": authorurl,
                    "author": author,
                    "subject": subject,
                    "times": times,
                    "pub_time": pub_time,
                    "abstract": abstract,
                    "dictsarticle": dictsarticle
                }
                jsonmsg = json.dumps(dictsonejson, ensure_ascii=False)
                rawid = dictsonejson["href"].split("&")[-1].replace("courseId=", "")
                url = dictsonejson["href"]
                coverurl = dictsonejson["imagesrc"]
                authorurl = dictsonejson["authorurl"]
                list_value.append((rawid, url, jsonmsg, authorurl, coverurl))
            sql = "insert ignore into article(rawid,url,vidmsg,authorurl,coverurl) values (%s,%s,%s,%s,%s)"
            self.mysqlutiles.ExeSqlMany(sql, list_value)
            list_value.clear()


if __name__ == "__main__":
    para = ParaHome()
    para.parahome()
