#!/usr/bin/env python 
# -*- coding: utf-8 -*- 
# @Time : 2018-04-13 12:34 
# @Author : Woolei
# @File : Step1_getBookISBN.py

import requests
import pymysql
import json
import datetime
import time
import logging

from TandfBook.ProjectConfig import *

SUBJECTS_DICT = {
    'SCAS': 'Area Studies',
    'SCAR': 'Arts',
    'SCBE': 'Behavioral Sciences',
    'SCBS': 'Bioscience',
    'SCBU': 'Built Environment',
    'SCCS': 'Communication Studies',
    'SCCM': 'Computer Science',
    'SCDS': 'Development Studies',
    'SCEA': 'Earth Sciences',
    'SCEB': 'Economics, Finance, Business & Industry',
    'SCED': 'Education',
    'SCEC': 'Engineering & Technology',
    'SCAG': 'Environment & Agriculture',
    'SCFS': 'Food Science & Technology',
    'SCGE': 'Geography',
    'SCHS': 'Health and Social Care',
    'SCHU': 'Humanities',
    'SCIF': 'Information Science',
    'SCLA': 'Language & Literature',
    'SCLW': 'Law',
    'SCMA': 'Mathematics & Statistics',
    'SCME': 'Medicine, Dentistry, Nursing & Allied Health',
    'SCAH': 'Museum and Heritage Studies',
    'SCPC': 'Physical Sciences',
    'SCPI': 'Politics & International Relations',
    'SCRF': 'Reference & Information Science',
    'SCSN': 'Social Sciences',
    'SCSL': 'Sports and Leisure',
    'SCSP': 'Tourism, Hospitality and Events'
}

PAYLOAD_DATA = {"keyword": "", "limit": 10, "offset": 0, "scoreOffset": 0.5,
                "sortCriteria": [{"type": "relevance", "order": "desc", "fields": ["_score", "datePublication"]}],
                "outputFields": ["categories", "classifications", "coverImages", "dacKey", "datePublication",
                                 "description", "edition", "firstPublishedOn", "formats.bindingStyle",
                                 "formats.bindingStyleCode", "formats.coverImages", "formats.datePublication",
                                 "formats.isbn13", "formats.isbnPdf", "formats.isbnEpub3", "formats.isbnEpub",
                                 "formats.isbnMobi", "formats.licensedEntities", "formats.status", "formats.statusCode",
                                 "formats.versionType", "formats.versionTypeCode", "imprint", "isbn13", "meta.abstract",
                                 "meta.contributors", "meta.doi", "meta.pdfSize", "meta.span", "meta.subtitle",
                                 "meta.title", "originators", "pages", "pdfSize", "subjectGroup", "subtitle", "title"],
                "fieldConfig": [{"name": "formats.isbn13", "boost": "40"}, {"name": "formats.isbnPdf", "boost": "40"},
                                {"name": "formats.isbnPdfFree", "boost": "40"},
                                {"name": "formats.isbnEpub3", "boost": "40"},
                                {"name": "formats.isbnEpub", "boost": "40"},
                                {"name": "formats.isbnMobi", "boost": "40"}, {"name": "formats.isbnDx", "boost": "40"},
                                {"name": "formats.classifications.classifications.stringValue", "boost": "40"},
                                {"name": "formats.keywords", "boost": "20"}, {"name": "keywords", "boost": "20"}],
                "filter": {"must": [{"range": {"datePublication": {"lte": "2018-07-27T15:23:41+08:00"}}}, {
                    "nested": {"path": "formats", "query": {"bool": {
                        "must": [{"term": {"formats.versionTypeCode.raw": "EBK"}},
                                 {"terms": {"formats.statusCode.raw": ["LFB", "VGR", "PLZ", "IHST", "WNN"]}}]}}}}, {
                                        "bool": {"should": [{"term": {"formats.isbn13.raw": ""}},
                                                            {"term": {"formats.isbnPdf.raw": ""}},
                                                            {"term": {"formats.isbnPdfFree.raw": ""}},
                                                            {"term": {"formats.isbnEpub3.raw": ""}},
                                                            {"term": {"formats.isbnEpub.raw": ""}},
                                                            {"term": {"formats.isbnMobi.raw": ""}},
                                                            {"term": {"formats.isbnDx.raw": ""}},
                                                            {"term": {"formats.keywords.raw": ""}}]}},
                                    {"terms": {"categories.code.raw": ["SCAS"]}}], "must_not": [],
                           "should": [{"exists": {"field": "formats.licensedEntities.raw"}}, {
                               "nested": {"path": "formats", "query": {"bool": {
                                   "must": [{"term": {"formats.versionTypeCode.raw": "EBK"}}, {
                                       "terms": {"formats.statusCode.raw": ["LFB", "VGR", "PLZ", "IHST", "WNN"]}}],
                                   "must_not": [{"terms": {
                                       "formats.classifications.classifications.code.raw": ["DRMY", "EBRRTL"]}}]}}}}]},
                "aggs": [{"fieldName": "datePublication",
                          "ranges": [{"key": "Upcoming", "from": "2018-04-13T15:23:41+08:00",
                                      "to": "2018-07-27T15:23:41+08:00"}], "type": "custom_date_range"},
                         {"fieldName": "categories.code", "type": "terms"},
                         {"fieldName": "imprint", "type": "terms", "limit": 3},
                         {"fieldName": "originators.originators.name.full", "type": "terms", "limit": 4}],
                "customAggs": {"datePublication": {"date_range": {"field": "datePublication", "ranges": [
                    {"key": "Older", "to": "2017-04-13T15:23:41+08:00"}]}},
                               "categoriesCode": {"terms": {"field": "categories.code.raw", "size": 10}},
                               "imprint": {"terms": {"field": "imprint.raw", "size": 3}},
                               "originators.originators.name.full": {
                                   "terms": {"field": "originators.originators.name.full.raw", "size": 4}}}}

logging.basicConfig(level=logging.WARNING,  # 最低输出
                    format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
                    datefmt='%a, %d %b %Y %H:%M:%S')

success_count = 0
fail_count = 0

HOME_URL = "https://www.taylorfrancis.com/"


# 等到105天后的日期和当前日期，用于构造post请求中的payload字段
def getTimeStamp():
    now = datetime.datetime.now()
    later_now = now + datetime.timedelta(days=105)  # 计算105天之后
    # print(older_now.strftime('%Y-%m-%dT%H:%M:%S+08:00'))
    return now.strftime('%Y-%m-%dT%H:%M:%S+08:00'), later_now.strftime('%Y-%m-%dT%H:%M:%S+08:00')


# 更新HEADERS信息
def updateHeaders(session):
    req = session.get(HOME_URL)
    if req.status_code == 200:
        token = req.cookies['_token']
        session.headers.update({'Authorization': 'idtoken ' + token})
    else:
        pass


# 构造payload字段
def setPayloadField(subject, offset=0):
    try:
        now_time_stamp, later_time_stamp = getTimeStamp()
        PAYLOAD_DATA['filter']['must'][-1]['terms']['categories.code.raw'][0] = subject
        PAYLOAD_DATA['filter']['must'][0]['range']['datePublication']['lte'] = later_time_stamp
        PAYLOAD_DATA['aggs'][0]['ranges'][0]['from'] = now_time_stamp
        PAYLOAD_DATA['customAggs']['datePublication']['date_range']['ranges'][0]['to'] = now_time_stamp
        PAYLOAD_DATA['offset'] = offset
        payload_str = json.dumps(PAYLOAD_DATA)
        return payload_str
    except:
        with open('setPayloadError.log', 'a') as f:
            f.write(subject + '##' + offset + '\n')
        logging.error('Set payload filed ERROR, Subject:' + subject + ', offset:' + offset)
        return False


# 过滤出书籍信息
def filterBookInfo(book_item_list):
    book_isbn13, book_isbnMobi, book_isbnPdf, book_cover = '', '', '', ''
    if book_item_list:
        try:
            for book_item in book_item_list:
                if 'isbn13' in book_item.keys() and book_item['isbn13']:
                    book_isbn13 = book_item['isbn13']
                if 'isbnMobi' in book_item.keys() and book_item['isbnMobi']:
                    book_isbnMobi = book_item['isbnMobi']
                if 'isbnPdf' in book_item.keys() and book_item['isbnPdf']:
                    book_isbnPdf = book_item['isbnPdf']
                book_cover = ''
                if 'coverImages' in book_item.keys() and book_item[
                    'coverImages'] and book_cover == '':  # 确保coverImg只趣第一次获取到的url
                    if book_cover and '//.' not in book_cover:  # 防止指向错误文件
                        continue
                    book_cover = book_item['coverImages'][0]
        except:
            logging.exception('filter book info ERROR')
    else:
        logging.warning('Book info is None')
        with open('./log/book_info_none.log', 'a') as f:
            book_info_str = '##'.join(book_item_list)
            f.write(book_info_str + '\n\n')
    return book_isbn13, book_isbnMobi, book_isbnPdf, book_cover


# 保存至数据库
def save2SQL(conn, isbn13, isbnMobi, isbnPdf, cover):
    try:
        cursor = conn.cursor()
        sql_book_isbn = '''INSERT IGNORE INTO book(isbn13, isbnMobi, isbnPdf) VALUES("%s", "%s", "%s")''' % (
            isbn13, isbnMobi, isbnPdf)
        sql_book_cover = '''INSERT IGNORE INTO cover(isbn13, url) VALUES("%s", "%s")''' % (
            isbn13, cover)  # 封面URL似乎有较多无法访问
        cursor.execute(sql_book_isbn)
        cursor.execute(sql_book_cover)
        conn.commit()
    except:
        with open('./log/SQL_ERROR.log', 'a') as f:
            f.write(isbn13 + '#' + isbnMobi + '#' + isbnPdf + '#' + cover + '\n')
        logging.error('Save into SQL ERROR, ' + isbn13 + '#' + isbnMobi + '#' + isbnPdf + '#' + cover)


# 请求API接口
def post2API(session, payload_str, retry=False):
    time.sleep(1)
    try:
        req = session.post(url=GET_ISBN_API_URL, data=payload_str, headers=COMMENT_HEADERS)
        print(req.status_code)
        if req.status_code == 200 and req.text:
            data_json = json.loads(req.text)
            if data_json['metadata'] and data_json['metadata']["status"] and data_json['metadata']['status'] == "Success":
                return data_json
            else:
                return False
        else:
            if retry:
                with open('./log/fail_payload_retry.log', 'a') as f_retry:
                    f_retry.write(payload_str + '\n')
                logging.error('RETRY >> Request API ERROR, Add Failed Payload into log')
                return False
            else:
                with open('./log/fail_payload.log', 'a') as f:
                    f.write(payload_str + '\n')
                logging.error('Request API ERROR, Add Failed Payload into log')
                return False
    except:
        with open('./log/fail_payload.log', 'a') as f:
            f.write(payload_str + '\n')
        logging.error('Request API ERROR, Add Failed Payload into log')
        return False


def getBookInfo(session, subject, conn):
    offset = 0
    global success_count, fail_count
    payload_str = setPayloadField(subject, 0)
    if payload_str:
        result = post2API(session, payload_str)
    else:
        fail_count += 1
    if result:
        total_books = result["data"]['productCount']
        book_list = result["data"]['resultSet']
        for item in book_list:
            book_isbn13, book_isbnMobi, book_isbnPdf, book_cover = filterBookInfo(item['formats'])
            logging.debug(
                'Already got book info: ' + book_isbn13 + '-' + book_isbnMobi + '-' + book_isbnPdf + '-' + book_cover)
            # print(book_isbn13, book_isbnMobi, book_isbnPdf, book_cover)
            # save2SQL(conn, book_isbn13, book_isbnMobi, book_isbnPdf, book_cover)
        print(total_books)
        if total_books and total_books > 10:
            while True:
                offset += 10
                payload_str_more = setPayloadField(subject, offset=offset)
                result_more = post2API(session, payload_str_more)
                if result_more and result_more["data"]['resultSet']:
                    book_list_more = result_more["data"]['resultSet']
                    for item in book_list_more:
                        book_isbn13, book_isbnMobi, book_isbnPdf, book_cover = filterBookInfo(item['formats'])
                        logging.debug(
                            'Already got more book info:' + book_isbn13 + '-' + book_isbnMobi + '-' + book_isbnPdf + '-' + book_cover)
                        # print(book_isbn13, book_isbnMobi, book_isbnPdf, book_cover)
                        save2SQL(conn, book_isbn13, book_isbnMobi, book_isbnPdf, book_cover)
                        success_count += 1
                logging.warning('Success %s, Fail Count %s' % (success_count, fail_count))
                if offset > total_books:
                    logging.debug('Current Subject API End')
                    break
    else:
        logging.error('Post payload ERROR >>> ' + payload_str)


# 解决请求失败的Payload.log日志文件中的字段
def checkFailLog(session, conn):
    log_file = open('./log/fail_payload.log')
    payload_set = set()  # 集合校对太慢?
    # payload_list = list()
    count = 0
    while True:
        payload_line = log_file.readline()  # 不能按行读取，否则不断追加日志的情况下陷入死循环
        count += 1
        if payload_line.strip() == '':
            continue
        if not payload_line:
            break
        result = post2API(session, payload_str=payload_line, retry=True)
        if result and result["data"]['resultSet']:
            book_list_more = result["data"]['resultSet']
            for item in book_list_more:
                book_isbn13, book_isbnMobi, book_isbnPdf, book_cover = filterBookInfo(item['formats'])
                logging.debug(
                    'Failed Payload retry:' + book_isbn13 + '-' + book_isbnMobi + '-' + book_isbnPdf + '-' + book_cover)
                # print(book_isbn13, book_isbnMobi, book_isbnPdf, book_cover)
                save2SQL(conn, book_isbn13, book_isbnMobi, book_isbnPdf, book_cover)


def start():
    session = requests.Session()
    updateHeaders(session)
    conn = pymysql.connect(host=DB_CONFIG['host'], user=DB_CONFIG['user'], passwd=DB_CONFIG['passwd'],
                           db=DB_CONFIG['db'], charset='utf8mb4', port=DB_CONFIG['port'])

    for key in SUBJECTS_DICT.keys():
        logging.warning('Start crawl: Current subject is %s' % key)
        getBookInfo(session, key, conn)
        logging.debug('Finished subject %s' % key)
    conn.close()
    logging.warning('Crwal End >> Success:%s, Fail:%s' % (success_count, fail_count))
    checkFailLog(session, conn=conn)


if __name__ == '__main__':
    start()
