import json

import facade
from xjlibrary.mdatetime.mtime2 import MDateTimeUtils
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "astmjournal", "download", "article")
BaseDir.create_dir(sPath)


class DownArticle(object):
    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutile = facade.MysqlUtiles(configfile, "db", logger=self.logger)
        self.headers = {
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
            "Accept-Encoding": "gzip, deflate",
            "Host": "www.astm.org",
            "Upgrade-Insecure-Requests": "1",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36"
        }

    def select(self):
        sql = "select `url`,`title`,`pubdate`,`issueurl`,`volissue`,`sourcename`,`journal` from article where stat=0 and failcount < 3"
        rows = self.mysqlutile.SelectFromDB(sql)
        for row in rows:
            if row[0].find("..") > -1:
                fullurl = "/".join(row[3].split("/")[:-2])
                fullurl = fullurl + row[0].replace("..", "")
            else:
                fullurl = "https://www.astm.org"
                fullurl = fullurl + row[0]

            BoolResult, errString, r = facade.BaseRequest(fullurl,
                                                          headers=self.headers,
                                                          timeout=(30, 60),
                                                          mark="itemprop",
                                                          verify=False)
            if BoolResult:
                dicts = {}
                dicts["downdate"] = MDateTimeUtils.get_today_date_strings()
                dicts["html"] = r.text
                dicts["pubdate"] = row[2]
                dicts["volissue"] = row[4]
                dicts["source"] = row[5]
                dicts["gch"] = row[6]
                dicts["url"] = fullurl
                jsonstrings = json.dumps(dicts)
                newpath = BaseDir.get_new_path(sPath, MDateTimeUtils.get_today_date_strings()+".big_json")
                BaseDir.single_add_file(newpath, jsonstrings + "\n")
                sql = "update article set stat=1 where url='{}'".format(row[0])
                self.mysqlutile.ExeSqlToDB(sql)
            else:
                print("下载失败")
                sql = "update article set failcount=failcount+1 where url='{}'".format(row[0])
                self.mysqlutile.ExeSqlToDB(sql)


def main():
    down = DownArticle()
    down.select()


if __name__ == "__main__":
    main()
