import sys

import facade
import parsel
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")


class DownIssue(object):
    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutile = facade.MysqlUtiles(configfile, "db", logger=self.logger)
        self.headers = {
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
            "Accept-Encoding": "gzip, deflate",
            "Host": "www.astm.org",
            "Upgrade-Insecure-Requests": "1",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36"
        }

    def select(self):
        sql = "select `url`,`title`,`sourcename`,`journal`,`jurl` from volissue where stat=0"
        rows = self.mysqlutile.SelectFromDB(sql)
        for row in rows:
            if row[0].find("BOOKSTORE") > -1:
                sql = "update volissue set stat=-1 where url='{}'".format(row[0])
                self.mysqlutile.ExeSqlToDB(sql)
                continue
            baseurl = "/".join(row[4].split("/")[:-1])
            url = baseurl + "/" + row[0]
            self.headers["Referer"] = row[4]
            self.logger.info(url)
            BoolResult, errString, r = facade.BaseRequest(url,
                                                          headers=self.headers,
                                                          timeout=(30, 60),
                                                          verify=False)
            if BoolResult:
                self.para(r.text, row, url)
                sql = "update volissue set stat=1 where url='{}'".format(row[0])
                self.mysqlutile.ExeSqlToDB(sql)
            else:
                print("下载失败")

    def para(self, html, row, url):
        divstrings = html.split("<MCX_SNIPPET_START>")[-1]

        # divstrings = divstrings.split("<MCX_END_QUERY>")[-1]

        if divstrings:
            selector = parsel.Selector(divstrings)
            divtag = selector.xpath('//div[contains(@class,"span8") and contains(@class,"main")]')
            if divtag:
                divstrings = divtag.get()
                datestrings = divstrings.split("<MCX_END_QUERY>")[-1]
                datestrings = datestrings.split("<mcx_end_query>")[-1]
                print(datestrings)
                dateselecter = parsel.Selector(datestrings)
                alist = dateselecter.xpath("//p/a")
                publisherlist = dateselecter.xpath('//p[contains(string(.), "Published")]')
                Listpara = []
                for atag, ptag in zip(alist, publisherlist):
                    href = atag.xpath("./@href").get()
                    text = atag.xpath("./text()").get()
                    publishertext = "".join(ptag.xpath("./text()").getall())
                    Listpara.append((href, text, publishertext, url, row[1], row[2], row[3]))
                sql = "insert ignore into article(`url`,`title`,`pubdate`,`issueurl`,`volissue`,`sourcename`,`journal`) values (%s,%s,%s,%s,%s,%s,%s)"
                self.mysqlutile.ExeSqlMany(sql, Listpara)
            else:
                sys.exit("没有定位到标签，检查并将新的情况加入逻辑")
        else:
            sys.exit("存在特殊情况，结束程序检查")


def main():
    downissue = DownIssue()
    downissue.select()


if __name__ == "__main__":
    main()
