import json
import os
import time
import traceback
from urllib import parse

import requests
from facade.loggerfacade import get_streamlogger
from facade.mysqlfacade import MysqlUtiles
from xjlibrary.mdatetime.mtime import getTodayDate
from xjlibrary.our_file_dir import BaseDir

UserAgent = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36'

t = int(round(time.time() * 1000))

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -3)
dirPath = BaseDir.get_new_path(TopPath, "download", "EI", "download", "json", "bigjson")
BaseDir.create_dir(dirPath)
dbpath = BaseDir.get_upper_dir(curPath, -1)
configfile = BaseDir.get_new_path(dbpath, "db.ini")

keytransformdicts = {"port": "db_port", "host": "db_host", "passwd": "db_pw", "user": "db_user",
                     "db": "db_name",
                     "chartset": "db_charset"}

logger = get_streamlogger()
myutil = MysqlUtiles(configfile, "db", keytransformdicts=keytransformdicts, logger=logger)

Proxies = {
    "http": "192.168.30.176:8207",
    "https": "192.168.30.176:8207"  # key是指目标网站的协议
}

header = {
    "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
    "Accept-Encoding": "gzip, deflate, br",
    "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Host": "www.engineeringvillage.com",
    "Pragma": "no-cache",
    "Upgrade-Insecure-Requests": "1",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36"
}

headerdatail = {
    "Accept": "application/json, text/javascript, */*; q=0.01",
    "Accept-Encoding": "gzip, deflate, br",
    "Accept-Language": "h-CN,zh;q=0.9,en;q=0.8",
    "Cache-Control": "no-cache",
    "Connection": "keep-alive",
    "Content-Type": "application/json",
    "Host": "www.engineeringvillage.com",
    "Pragma": "no-cache",
    "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/67.0.3396.62 Safari/537.36",
    "X-NewRelic-ID": "VQQAUldRCRAFUFFQBwgCUQ==",
    "X-Requested-With": "XMLHttpRequest"
}

Form = {
    "usageOrigin": "searchform",
    "usageZone": "quicksearch",
    "editSearch": "",
    "isFullJsonResult": "true",
    "angularReq": "true",
    "CID": "searchSubmit",
    "searchtype": "Quick",
    "origin": "searchform",
    "category": "quicksearch",
    "section1": "NO-LIMIT",
    "searchWord1": "",
    "database": "1",
    "yearselect": "yearrange",
    "startYear": "1884",
    "endYear": "2019",
    "updatesNo": "1",
    "language": "NO-LIMIT",
    "doctype": "NO-LIMIT",
    "sort": "relevance",
    "autostem": "true",
    "treatmentType": "NO-LIMIT",
    "searchStartTimestamp": str(int(round(time.time() * 1000))),
    "_": str(t)
}

FormDetail = {
    "content": "true",
    "SEARCHID": "84a10deeM197dM4f71M87d7M2e56ff29f9db",
    "DOCINDEX": "1",
    "database": "1",
    "pageType": "quickSearch",
    "searchtype": "Quick",
    "dedupResultCount": "null",
    "format": "quickSearchDetailedFormat",
    "usageOrigin": "recordpage",
    "usageZone": "abstracttab"
}

FormAbs = {
    # "content": "true",
    "": "",
    "pageType": "quickSearch",
    "usageZone": "resultslist",
    "usageOrigin": "searchresults",
    "searchtype": "Quick",
    "SEARCHID": "",
    "DOCINDEX": "1",
    "ignore_docid": "",
    "database": "1",
    "format": "quickSearchAbstractFormat",
    "tagscope": "",
    "displayPagination": "yes"
}


def down_homepage():
    sn = requests.Session()
    sn.headers['User-Agent'] = UserAgent

    r = None  # 声明
    exMsg = None
    try:
        url = r'https://www.engineeringvillage.com/'  # 首页
        print(url)
        r = sn.get(url, proxies=Proxies, timeout=60)
    except:
        exMsg = '* ' + traceback.format_exc()
        print(exMsg)
    finally:
        if 'r' in locals() and r:
            r.close()

    if exMsg:
        return False
    print(r.url)

    if r.status_code != 200:
        print('error url:' + r.url)
        print('r.status_code:' + str(r.status_code))
        return False
    print("首页访问完成")
    return sn


def downone(sn, year, accnum):
    # 设置本次检索的表单
    form = Form
    form['searchWord1'] = accnum
    form['startYear'] = '1884'
    form['endYear'] = str(year)

    r = None  # 声明
    exMsg = None
    try:
        url = r'https://www.engineeringvillage.com/search/submit.url'
        parastring = ""
        for key, value in Form.items():
            parastring += key + "=" + value + "&"
        parastring = parastring[:-1]

        url = url + "?" + parastring
        r = sn.get(url=url, proxies=Proxies, allow_redirects=True, timeout=60)
    except:
        exMsg = '* ' + traceback.format_exc()
        print(exMsg)
    finally:
        if 'r' in locals() and r:
            r.close()

    if exMsg:
        if 'sn' in locals() and sn:
            sn = None
        return "getsn"

    if r.status_code != 200:
        if 'sn' in locals() and sn:
            sn = None
        print('error url:' + r.url)
        print('r.status_code:' + str(r.status_code))
        return "getsn"

    print(r.url)
    print("搜索成功")
    try:
        param_dict = parse.parse_qs(parse.urlparse(r.url).query)
        SEARCHID = param_dict['SEARCHID'][0]
    except:
        print("返回的url错误 重新请求首页")
        return "getsn"

    if not json.loads(r.text)["results"]:
        return

    # 获取搜索出来的总量
    # docid = json.loads(r.text)["results"][0]["doc"]["docid"]
    # print(docid)
    #
    # FormAbs["ignore_docid"] = docid
    # FormAbs["SEARCHID"] = SEARCHID
    #
    # r = None  # 声明
    # exMsg = None
    # try:
    #     url = r'https://www.engineeringvillage.com/search/doc/abstract.url'
    #     parastring = ""
    #     for key, value in FormAbs.items():
    #         parastring += key + "=" + value + "&"
    #     parastring = parastring[:-1]
    #
    #     url = url + "?" + parastring
    #     url = url.replace("&=&", "&&").replace("=&","&")
    #     print("abs url is:"+url)
    #     r = sn.get(url=url, proxies=Proxies, headers=header, allow_redirects=True, timeout=60)
    # except:
    #     exMsg = '* ' + traceback.format_exc()
    #     print(exMsg)
    # finally:
    #     if 'r' in locals() and r:
    #         r.close()
    #
    # if exMsg:
    #     if 'sn' in locals() and sn:
    #         sn = None
    #     return False
    # print(r.url)
    # if r.status_code != 200:
    #     if 'sn' in locals() and sn:
    #         sn = None
    #     print('error url:' + r.url)
    #     print('r.status_code:' + str(r.status_code))
    #     return False
    # # print(r.text)
    # if not json.loads(r.text)["ckhighlighting"]:
    #     return False
    # print("请求 abs 成功")

    FormDetail["SEARCHID"] = SEARCHID
    r = None  # 声明
    exMsg = None
    try:
        url = r'https://www.engineeringvillage.com/search/doc/detailed.url'
        parastring = ""
        for key, value in FormDetail.items():
            parastring += key + "=" + value + "&"
        parastring = parastring[:-1]

        url = url + "?" + parastring
        r = sn.get(url=url, proxies=Proxies, headers=headerdatail, allow_redirects=True, timeout=60)
    except:
        exMsg = '* ' + traceback.format_exc()
        print(exMsg)
    finally:
        if 'r' in locals() and r:
            r.close()

    if exMsg:
        if 'sn' in locals() and sn:
            sn = None
        return False
    print(r.url)
    print(r.url)
    if r.status_code != 200:
        if 'sn' in locals() and sn:
            sn = None
        print('error url:' + r.url)
        print('r.status_code:' + str(r.status_code))
        return False

    if not json.loads(r.text)["ckhighlighting"]:
        return False

    print("请求datail成功")
    filePath = os.path.join(dirPath, getTodayDate() + ".big_json")
    BaseDir.single_add_file(filePath, r.text)
    return True


# downone("2019", "20184205956497")
"""
使用搜索下载  现在放弃 改为使用组合连接下载
"""
if __name__ == "__main__":
    while True:
        sql = "select AccessionNumber from article where dd_stat=0 and dd_failcount<3  limit 1000"
        rows = myutil.SelectFromDB(sql)
        if not rows:
            break
        sn = down_homepage()
        for row in rows:
            accnum = row[0]
            print(accnum)
            result = downone(sn, "2019", accnum)
            if result == "getsn":
                sn = down_homepage()
            elif result == True:
                sql = "update article set dd_stat=1 where AccessionNumber='{}'".format(accnum)
                myutil.ExeSqlToDB(sql, errExit=True)
            else:
                sql = "update article set dd_failcount = dd_failcount + 1 where AccessionNumber='{}'".format(accnum)
                myutil.ExeSqlToDB(sql, errExit=True)
