import json
import os
import time

import requests
from facade.loggerfacade import get_streamlogger
from facade.mysqlfacade import MysqlUtiles
from xjlibrary import mrequest
from xjlibrary.mdatetime.mtime import getTodayDate
from xjlibrary.our_file_dir import BaseDir

UserAgent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36'

t = int(round(time.time() * 1000))

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -3)
dirPath = BaseDir.get_new_path(TopPath, "download", "EI", "download", "json", "bigjson")
BaseDir.create_dir(dirPath)
dbpath = BaseDir.get_upper_dir(curPath, -1)
configfile = BaseDir.get_new_path(dbpath, "db.ini")

# keytransformdicts = {"port": "db_port", "host": "db_host", "passwd": "db_pw", "user": "db_user",
#                      "db": "db_name",
#                      "chartset": "db_charset"}

logger = get_streamlogger()
myutil = MysqlUtiles(configfile, "db", logger=logger)

Proxies = {
    "http": "192.168.30.176:8207",
    "https": "192.168.30.176:8207"  # key是指目标网站的协议
}

header = {
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
    "Accept-Encoding": "gzip, deflate, br",
    "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Host": "www.engineeringvillage.com",
    "Pragma": "no-cache",
    "Upgrade-Insecure-Requests": "1",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36"
}

headerdatail = {
    "Accept": "application/json, text/javascript, */*; q=0.01",
    "Accept-Encoding": "gzip, deflate, br",
    "Accept-Language": "h-CN,zh;q=0.9,en;q=0.8",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Content-Type": "application/json",
    "Host": "www.engineeringvillage.com",
    "Pragma": "no-cache",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
    "X-NewRelic-ID": "VQQAUldRCRAFUFFQBwgCUQ==",
    "X-Requested-With": "XMLHttpRequest"
}

# cpx_7d688f89165582071f4M7b3e1017816339
detailedUrl = "https://www.engineeringvillage.com/share/document.url?mid={}&database=cpx&view=detailed"
# cpx_7d688f89165582071f4M7b3e1017816339
detailedJsonUrl = "https://www.engineeringvillage.com/search/doc/detailed.url?content=true&docid={}&usageOrigin=share"
# cpx_7d688f89165582071f4M7b3e1017816339
refsUrl = "https://www.engineeringvillage.com/search/doc/refs.url?content=true&compendexajax=t&docid={}&SEARCHID=&database=1&DOCINDEX=&currPageNumber=1&searchtype=Quick&pageSize={}"


def down_detailed(cpxid):
    sn = requests.Session()
    url = detailedUrl.format(cpxid)
    dicts = {}
    num = 100
    BoolResult, errString, r = mrequest.BaseRequest(url,
                                                    sn=sn,
                                                    proxies=Proxies,
                                                    endstring="",
                                                    headers=header,
                                                    allow_redirects=True,
                                                    timeout=60)
    if not BoolResult:
        logger.error(errString)
        return False, "请求详情页失败"
    # try:
    #     print(r.url)
    #     param_dict = parse.parse_qs(parse.urlparse(r.url).query)
    #     SEARCHID = param_dict['SEARCHID'][0]
    # except:
    #     logger.error("返回的url错误,没有302重定向")
    #     return False, "解析SEARCHID失败"
    urljson = detailedJsonUrl.format(cpxid)
    BoolResult, errString, r = mrequest.BaseRequest(urljson,
                                                    sn=sn,
                                                    proxies=Proxies,
                                                    endstring="",
                                                    headers=headerdatail,
                                                    timeout=60)
    if not BoolResult:
        logger.error(errString)
        return False, "请求详情页json失败"
    else:
        try:
            num = json.loads(r.text)["result"]["abstractrecord"]["refcount"]
            dicts["detailed"] = r.text
        except:
            logger.error("json不是我们预想的，请检查")
            return False, "详情页json数据有误"

    def refsrequest():
        refurl = refsUrl.format(cpxid, num)
        BoolResult, errString, r = mrequest.BaseRequest(refurl,
                                                        sn=sn,
                                                        proxies=Proxies,
                                                        endstring="",
                                                        headers=headerdatail,
                                                        timeout=60)
        if not BoolResult:
            logger.error(errString)
            return False, "请求ref失败"
        else:
            if json.loads(r.text)["referenceBean"]:
                dicts["refs"] = r.text
            else:
                return False, "ref json错误"

    print(num)
    returnnum = num
    if int(num) == -1:
        dicts["refs"] = ""
    elif num < 25:
        num = 25
        refsrequest()
    else:
        num = num + 10
        refsrequest()
    filePath = os.path.join(dirPath, getTodayDate() + ".big_json")
    dicts["downtime"] = getTodayDate()
    jsons = json.dumps(dicts, ensure_ascii=False)
    BaseDir.single_add_file(filePath, jsons)
    return True, returnnum


def selectsql():
    sql = "select AccessionNumber,docid from article where dd_stat=0 limit 1000"
    rows = myutil.SelectFromDB(sql)
    return rows


if __name__ == "__main__":
    while True:
        rows = selectsql()
        if rows:
            for AccessionNumber, docid in rows:
                BoolResult, Msg = down_detailed(docid)
                if BoolResult:
                    updatesql = "update article set dd_stat=1,dd_fail_cause='{}' where AccessionNumber='{}'".format(Msg,
                                                                                                                    AccessionNumber)
                    myutil.ExeSqlToDB(updatesql)
                else:
                    logger.error(Msg)
                    updatesql = "update article set dd_failcount = dd_failcount + 1,dd_fail_cause='{}' where AccessionNumber='{}'".format(
                        Msg, AccessionNumber)
                    myutil.ExeSqlToDB(updatesql)
        else:
            break
