import json

import facade
from xjlibrary.mdatetime.mtime2 import MDateTimeUtils

from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
sPath = BaseDir.get_new_path(TopPath, "download", "rucjournal", "download", "detail")
BaseDir.create_dir(sPath)
dbconfigpath = BaseDir.get_new_path(curPath, "db.ini")


class DownDetail(object):
    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(dbconfigpath, "db", logger=self.logger, cursorsnum=1)
        self.proxy = {
            "http": "192.168.30.176:8207",
            "https": "192.168.30.176:8207"
        }
        self.headers = {
            "Accept": "*/*",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
            "Cache-Control": "no-cache",
            "Host": "www.rdfybk.com",
            "Pragma": "no-cache",
            "Referer": "http://www.rdfybk.com/qw/detail?id=51883&kw=",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36"
        }

    def selectDb(self):
        while True:
            sql = "select rawid,jsonmsg from article where stat=0 limit 1000"
            rows = self.mysqlutils.SelectFromDB(sql)
            if rows:
                for row in rows:
                    dictsmsg = {}
                    dictsmsg["id"] = row["rawid"]
                    dictsmsg["downdate"] = MDateTimeUtils.get_today_date_strings()
                    dictsmsg["jsonmsg"] = json.loads(row["jsonmsg"])

                    # self.url1 = "http://www.rdfybk.com/Qw/GetBaseArt?id={}&kw=".format(row["rawid"])
                    self.url1 = "http://www.rdfybk.com/Qw/GetPdfBaseArt?id={}&kw=".format(row["rawid"])
                    self.url2 = "http://www.rdfybk.com/Qw/GetTextArt?id={}&pn=1&ps=2&kw=&openall=0".format(row["rawid"])
                    BoolResult, errString, r = facade.BaseRequest(self.url1,
                                                                  headers=self.headers,
                                                                  endstring="",
                                                                  proxies=self.proxy,
                                                                  timeout=60)
                    if BoolResult:
                        dictsmsg["basexml"] = r.text
                    else:
                        self.logger.info("下载出错")
                        continue
                    """
                    BoolResult, errString, r = facade.BaseRequest(self.url2,
                                                                  headers=self.headers,
                                                                  endstring="",
                                                                  proxies=self.proxy,
                                                                  timeout=60)
                    if BoolResult:
                        dictsmsg["textxml"] = r.text
                    else:
                        self.logger.info("下载出错")
                        continue
                    """
                    dictsmsg["textxml"] = ''
                    filemsg = json.dumps(dictsmsg, ensure_ascii=False)
                    filepath = BaseDir.get_new_path(sPath, "detail_article.big_html")
                    BaseDir.single_add_file(filepath, filemsg+"\n")
                    sql = "update article set stat=1 where rawid='{}'".format(row["rawid"])
                    self.mysqlutils.ExeSqlToDB(sql)

            else:
                self.logger.info("下载完成")
                break


if __name__ == "__main__":
    down = DownDetail()
    down.selectDb()
