import json
import os
import threading

import facade
import pymysql
from xjlibrary.MRabbitMQ.useMQ.nodemq import Distributer, dealThread, Collecter
from xjlibrary.configread import MyConfigParser
from xjlibrary.mdatetime.mtime import getTodayDate
from xjlibrary.mdatetime.mtime2 import MDateTimeUtils
from xjlibrary.network.MyIP import GetLocalIPByPrefix
from xjlibrary.our_file_dir.base_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
config = BaseDir.get_new_path(BaseDir.get_upper_dir(curPath, -1), "db.ini")
TopPath = BaseDir.get_upper_dir(curPath, -3)
sPath = BaseDir.get_new_path(TopPath, "download", "jstor", "download", "pagejson")
GolobalConfig = MyConfigParser().set_config_path(
    config).get_configparser().set_keep_keys_case().read_config().conf_to_dict()
queueCenter2Node = GolobalConfig['get_article']['MQQueueCenter2Node']
MQQueueNode2Center = GolobalConfig['get_article']['MQQueueNode2Center']

# import re
# import http.client
#
# http.client._is_legal_header_name = re.compile(rb'\A[^\s][^\r\n]*\Z').fullmatch

Headers = {
    # "authority": "www.jstor.org",
    # "method": "GET",
    # "path": "/stable/42904207",
    # "scheme": "https",
    "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3",
    "accept-encoding": "gzip, deflate",
    "accept-language": "zh-CN,zh;q=0.9",
    "cache-control": "no-cache",
    "pragma": "no-cache",
    "upgrade-insecure-requests": "1",
    "user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/71.0.3578.98 Safari/537.36"
    # random.choice(USER_AGENTS)
}


def down_page_func(threadval, proxylist, url):
    result_queue = threadval.result_queue
    BoolResult = None
    while True:
        try:
            proxy = proxylist.pop()
            if not proxy:
                continue
        except:
            break
        proxys = {
            "http": proxy,
            "https": proxy
        }
        print(proxys)
        BaseUrl = "https://www.jstor.org"
        fullurl = BaseUrl + url
        Headers["X-Forwarded-For"] = proxy.split(":")[0]
        # print(Headers["X-Forwarded-For"])
        Headers["path"] = url
        # print(Headers)
        BoolResult, errString, r = facade.BaseRequest(fullurl,
                                                      headers=Headers,
                                                      mark="gaData.content",
                                                      proxies=proxys,
                                                      timeout=(30, 60),
                                                      verify=False)
        if BoolResult:
            # html = r.text.strip()
            # if html.find("issn mtm") == -1:
            #     print('not found {}'.format("issn mtm"))
            #     BoolResult = False
            #     continue

            content = r.content
            if content.startswith(b'\xef\xbb\xbf'):  # 去掉 utf8 bom 头
                content = content[3:]
            jsonText = content.decode('utf8').strip()

            htmls = jsonText.replace('\0', ' ').replace(
                '\r', ' ').replace('\n', ' ') + '\n'

            sumDict = dict()
            sumDict['url'] = url
            sumDict["downdate"] = MDateTimeUtils.get_today_date_strings()
            sumDict['html'] = htmls

            if not os.path.exists(sPath):
                os.makedirs(sPath)

            outPathFile = os.path.join(sPath,
                                       '%s_%s_%d_%d.big_json' % (
                                           getTodayDate(), GetLocalIPByPrefix('192.168.'), os.getpid(),
                                           threading.get_ident())
                                       )
            print('Write to %s ...' % outPathFile)
            with open(outPathFile, mode='a', encoding='utf-8') as f:
                line = json.dumps(sumDict, ensure_ascii=False).strip() + '\n'
                f.write(line)

            sql = "UPDATE article SET stat=2 WHERE `url`='%s'"
            sql = sql % pymysql.escape_string(url)
            result_queue.put(sql)

    if not BoolResult:
        sql = "UPDATE article SET failcount=failcount+1 WHERE `url`='%s'"
        sql = sql % pymysql.escape_string(url)
        result_queue.put(sql)


class downPage(dealThread):
    def __init__(self, num):
        super(downPage, self).__init__(num)

    def parameters(self, result):
        dic = json.loads(result)
        proxy = dic['proxy']
        url = dic['url']
        return [proxy], {'url': url}

    def thread_pool_hook(self, thread_pool_dicts, thread, args, kwargs):
        return {}

    def fun(self, threadval, *args, **kwargs):
        proxylist = args[0]
        url = kwargs["url"]
        down_page_func(threadval, proxylist, url)


if __name__ == "__main__":
    threadNum = GolobalConfig['get_article']['WorkerThreadNumber']
    Distributer(config, queueCenter2Node, 3).start()
    Collecter(config, MQQueueNode2Center).start()
    down = downPage(40)  # int(threadNum)
    down.run(model=2)
