import datetime
import json
import os
import random
import time

import facade
import requests
from xjlibrary.newthreading.MThreadingRun import MThreadingRun
from xjlibrary.newthreading.mthreading import ThreadManager
from xjlibrary.our_file_dir import BaseDir
from urllib3.exceptions import InsecureRequestWarning
from multiprocessing.pool import Pool

curPath = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(curPath, -2)
dirPath = BaseDir.get_new_path(TopPath, "download", "TandBook", "download", "big_json")
BaseDir.create_dir(dirPath)
configfile = BaseDir.get_new_path(curPath, "db.ini")
requests.urllib3.disable_warnings(InsecureRequestWarning)

# 所有类别
SUBJECTS_DICT = {
    'SCAS': 'Area Studies',
    'SCAR': 'Arts',
    'SCBE': 'Behavioral Sciences',
    'SCBS': 'Bioscience',
    'SCBU': 'Built Environment',
    'SCCS': 'Communication Studies',
    'SCCM': 'Computer Science',
    'SCDS': 'Development Studies',
    'SCEA': 'Earth Sciences',
    'SCEB': 'Economics, Finance, Business & Industry',
    'SCED': 'Education',
    'SCEC': 'Engineering & Technology',
    'SCAG': 'Environment & Agriculture',
    'SCFS': 'Food Science & Technology',
    'SCGE': 'Geography',
    'SCHS': 'Health and Social Care',
    'SCHU': 'Humanities',
    'SCIF': 'Information Science',
    'SCLA': 'Language & Literature',
    'SCLW': 'Law',
    'SCMA': 'Mathematics & Statistics',
    'SCME': 'Medicine, Dentistry, Nursing & Allied Health',
    'SCAH': 'Museum and Heritage Studies',
    'SCPC': 'Physical Sciences',
    'SCPI': 'Politics & International Relations',
    'SCRF': 'Reference & Information Science',
    'SCSN': 'Social Sciences',
    'SCSL': 'Sports and Leisure',
    'SCSP': 'Tourism, Hospitality and Events'
}
# payload字段，单次请求10条数据
PAYLOAD_DATA = {"keyword": "", "limit": 10, "offset": 0, "scoreOffset": 0.5,
                "sortCriteria": [{"type": "relevance", "order": "desc", "fields": ["_score", "datePublication"]}],
                "outputFields": ["categories", "classifications", "coverImages", "dacKey", "datePublication",
                                 "description", "edition", "firstPublishedOn", "formats.bindingStyle",
                                 "formats.bindingStyleCode", "formats.coverImages", "formats.datePublication",
                                 "formats.isbn13", "formats.isbnPdf", "formats.isbnEpub3", "formats.isbnEpub",
                                 "formats.isbnMobi", "formats.licensedEntities", "formats.status", "formats.statusCode",
                                 "formats.versionType", "formats.versionTypeCode", "imprint", "isbn13", "meta.abstract",
                                 "meta.contributors", "meta.doi", "meta.pdfSize", "meta.span", "meta.subtitle",
                                 "meta.title", "originators", "pages", "pdfSize", "subjectGroup", "subtitle", "title"],
                "fieldConfig": [{"name": "formats.isbn13", "boost": "40"}, {"name": "formats.isbnPdf", "boost": "40"},
                                {"name": "formats.isbnPdfFree", "boost": "40"},
                                {"name": "formats.isbnEpub3", "boost": "40"},
                                {"name": "formats.isbnEpub", "boost": "40"},
                                {"name": "formats.isbnMobi", "boost": "40"}, {"name": "formats.isbnDx", "boost": "40"},
                                {"name": "formats.classifications.classifications.stringValue", "boost": "40"},
                                {"name": "formats.keywords", "boost": "20"}, {"name": "keywords", "boost": "20"}],
                "filter": {"must": [{"range": {"datePublication": {"lte": "2018-07-27T15:23:41+08:00"}}}, {
                    "nested": {"path": "formats", "query": {"bool": {
                        "must": [{"term": {"formats.versionTypeCode.raw": "EBK"}},
                                 {"terms": {"formats.statusCode.raw": ["LFB", "VGR", "PLZ", "IHST", "WNN"]}}]}}}}, {
                                        "bool": {"should": [{"term": {"formats.isbn13.raw": ""}},
                                                            {"term": {"formats.isbnPdf.raw": ""}},
                                                            {"term": {"formats.isbnPdfFree.raw": ""}},
                                                            {"term": {"formats.isbnEpub3.raw": ""}},
                                                            {"term": {"formats.isbnEpub.raw": ""}},
                                                            {"term": {"formats.isbnMobi.raw": ""}},
                                                            {"term": {"formats.isbnDx.raw": ""}},
                                                            {"term": {"formats.keywords.raw": ""}}]}},
                                    {"terms": {"categories.code.raw": ["SCAS"]}}], "must_not": [],
                           "should": [{"exists": {"field": "formats.licensedEntities.raw"}}, {
                               "nested": {"path": "formats", "query": {"bool": {
                                   "must": [{"term": {"formats.versionTypeCode.raw": "EBK"}}, {
                                       "terms": {"formats.statusCode.raw": ["LFB", "VGR", "PLZ", "IHST", "WNN"]}}],
                                   "must_not": [{"terms": {
                                       "formats.classifications.classifications.code.raw": ["DRMY", "EBRRTL"]}}]}}}}]},
                "aggs": [{"fieldName": "datePublication",
                          "ranges": [{"key": "Upcoming", "from": "2018-04-13T15:23:41+08:00",
                                      "to": "2018-07-27T15:23:41+08:00"}], "type": "custom_date_range"},
                         {"fieldName": "categories.code", "type": "terms"},
                         {"fieldName": "imprint", "type": "terms", "limit": 3},
                         {"fieldName": "originators.originators.name.full", "type": "terms", "limit": 4}],
                "customAggs": {"datePublication": {"date_range": {"field": "datePublication", "ranges": [
                    {"key": "Older", "to": "2017-04-13T15:23:41+08:00"}]}},
                               "categoriesCode": {"terms": {"field": "categories.code.raw", "size": 10}},
                               "imprint": {"terms": {"field": "imprint.raw", "size": 3}},
                               "originators.originators.name.full": {
                                   "terms": {"field": "originators.originators.name.full.raw", "size": 4}}}}


class DownJson(object):

    def __init__(self):
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile,
                                             "db",
                                             logger=self.logger)
        self.HOME_URL = "https://www.taylorfrancis.com/"
        self.HEADERS = {
            'content-type': 'application/json',
            'origin': 'https://www.taylorfrancis.com',
            'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36'
        }
        self.API_URL = 'https://api.taylorandfrancis.com/v1/search/title'
        self.success_count = 0
        self.fail_count = 0

    def getTask(self):
        sql = "select `allnum`,`offset`,`subject` from `tbook` ORDER BY `allnum` DESC "
        rows = self.mysqlutils.SelectFromDB(sql)
        return rows

    # 更新HEADERS信息，请求10次更新一次idtoken
    def updateHeaders(self, session, req_count):
        """
        更新Headers信息
        :param session:先前的会话
        :param req_count: 当前会话已用于请求的次数
        :return: 请求已使用次数
        """
        # resp = session.get(HOME_URL)
        if req_count > 10:
            boolrequets = 1
            while boolrequets < 5:
                try:
                    # verify=False 不验证ssl证书
                    resp = requests.get(self.HOME_URL, verify=False)
                    if resp.status_code == 200:
                        try:
                            token = resp.cookies['_token']
                            session.headers.update({'Authorization': 'idtoken ' + token})
                            return 0
                        except:
                            print("获取_tokan失败")
                    else:
                        print("请求header失败")
                except:
                    print("请求header失败")
                boolrequets += 1
        return req_count

    def control(self):
        while True:
            rows = self.getTask()
            if not rows:
                break
            for row in rows:
                offset = int(row[1])
                # 这里为什么使用+10,因为最后一页offset没有写入
                if int(row[0]) <= int(row[1]) + 10 and row[0] != 0:
                    print(row[2] + "存在，跳过")
                    continue
                self.getBookInfo(row[2], offset)

    # 请求API接口
    def post2API(self, session, payload_str):
        time.sleep(random.random() * 4)  # 随机延时0~4s
        BoolResult, errString, r = facade.BaseRequestPost(self.API_URL,
                                                          sn=session,
                                                          data=payload_str,
                                                          headers=self.HEADERS,
                                                          timeout=60,
                                                          endstring="",
                                                          verify=False)
        if BoolResult:
            data_json = json.loads(r.text)
            if data_json['metadata'] and data_json['metadata']["status"] and data_json['metadata'][
                'status'] == "Success":
                self.success_count += 1
                return data_json
            else:
                return False
        else:
            self.fail_count += 1
            self.logger.error('[5]>> Request API ERROR, Add Failed Payload into log')
            return False

    def write2bigJson(self, json_text, subject_name):
        file_path = os.path.join(dirPath, subject_name + ".big_json")
        with open(file_path, 'a', encoding='utf-8') as f:
            f.write(json_text + '\n')
            self.logger.debug('[12]>> Writing into a filed %s.big_json' % subject_name)

    def getBookInfo(self, threadval, subject, offset):
        result_queue = threadval.result_queue
        while True:
            session = requests.Session()
            session.headers = self.HEADERS
            if self.updateHeaders(session, 11) == 11:
                break
            req_count = 0  # 用于记录请求次数
            payload_str = self.setPayloadField(subject, offset=offset)
            if offset >= 19990:
                break
            if payload_str:
                data_json = self.post2API(session, payload_str)
                if data_json and data_json['data']:
                    self.success_count += 1
                    self.write2bigJson(json.dumps(data_json), subject)
                    # 判断是否存在下一页
                    total_books = data_json["data"]['productCount']
                    if offset + 10 >= int(total_books):
                        break
                    sql = "Update `tbook` set `allnum`={},`offset`={} where `subject`='{}'".format(int(total_books),
                                                                                                   int(offset),
                                                                                                   str(subject))

                    # self.mysqlutils.ExeSqlToDB(sql)
                    result_queue.put(sql)
                    if total_books and total_books > 10:
                        ListSql = []
                        while True:
                            offset += 10
                            payload_str_more = self.setPayloadField(subject, offset=offset)
                            req_count += 1
                            req_count = self.updateHeaders(session, req_count)
                            result_more = self.post2API(session, payload_str_more)
                            if result_more and result_more["data"]['resultSet']:
                                self.write2bigJson(json.dumps(result_more), subject)
                                sql = "Update `tbook` set `allnum`={},`offset`={} where `subject`='{}'".format(
                                    int(total_books),
                                    int(offset),
                                    str(subject))
                                # ListSql.append(sql)
                                result_queue.put(sql)
                                # if len(ListSql) > 10:
                                #     self.mysqlutils.ExeSqlListToDB(ListSql)
                                #     ListSql.clear()
                            else:
                                break
                            if req_count > 5 or offset > (total_books - 10):
                                break
                        # if len(ListSql) > 0:
                        #     self.mysqlutils.ExeSqlListToDB(ListSql)
                        #     ListSql.clear()
                else:
                    self.fail_count += 1
                    self.logger.error('[5]>> Request API ERROR, Add Failed Payload into log')
            else:
                self.logger.error('[11]>> Post payload ERROR >>> ' + payload_str)

    # 构造payload字段
    def setPayloadField(self, subject, offset=0):
        try:
            now_time_stamp, later_time_stamp = self.getTimeStamp()
            PAYLOAD_DATA['filter']['must'][-1]['terms']['categories.code.raw'][0] = subject
            PAYLOAD_DATA['filter']['must'][0]['range']['datePublication']['lte'] = later_time_stamp
            PAYLOAD_DATA['aggs'][0]['ranges'][0]['from'] = now_time_stamp
            PAYLOAD_DATA['customAggs']['datePublication']['date_range']['ranges'][0]['to'] = now_time_stamp
            PAYLOAD_DATA['offset'] = offset
            payload_str = json.dumps(PAYLOAD_DATA)
            return payload_str
        except:
            self.logger.error('[13]>> Set payload filed ERROR, Subject:' + subject + ', offset:' + offset)
            return False

    # 等到105天后的日期和当前日期，用于构造post请求中的payload字段
    def getTimeStamp(self):
        """
        payload字段中两个时间值
        :return: 当前时间，150天后时间
        """
        now = datetime.datetime.now()
        later_now = now + datetime.timedelta(days=105)  # 计算105天之后
        return now.strftime('%Y-%m-%dT%H:%M:%S+08:00'), later_now.strftime('%Y-%m-%dT%H:%M:%S+08:00')


# if __name__ == "__main__":
#     down = DownJson()
#     down.control()


class JsonThreadRun(MThreadingRun):

    def __init__(self, num):
        super(JsonThreadRun, self).__init__(num, is_static_max=False)
        self.down = DownJson()

    def getTask(self, *args, **kwargs):
        rows = self.down.getTask()
        return rows

    def setTask(self, results=None, *args, **kwargs):
        if results:
            for row in results:
                offset = int(row[1])
                # 这里为什么使用+10,因为最后一页offset没有写入
                if int(row[0]) <= int(row[1]) + 10 and row[0] != 0:
                    print(row[2] + "存在，跳过")
                    continue
                self.add_job(self.func, row[2], offset)
        return "break"

    def dealresult(self, *args, **kwargs):
        self.down.mysqlutils.ExeSqlListToDB(self.results)

    def setProxy(self, proxysList=None):
        return "break"

    def is_break(self):
        return True

    def thread_pool_hook(self, thread_pool_dicts: dict, dictsfull: dict, *args, **kwargs):
        if dictsfull["threadname"] in ["task_thread_1", "proxy_thread_1"]:
            thread = dictsfull["thread"]
            if not isinstance(thread, ThreadManager):
                raise TypeError("线程对象不是ThreadManager")
            # 设置线程不重启
            thread.is_restart = False

    def fun(self, threadval, *args, **kwargs):
        self.down.getBookInfo(threadval, args[0], args[1])


def main():
    threadrun = JsonThreadRun(20)
    threadrun.run()


if __name__ == "__main__":
    main()
