#!/usr/bin/env python 
# -*- coding: utf-8 -*- 
# @Time : 2018-06-15 9:13 
# @Author : Leo
# @File :
import json
import os
import random
import time

import facade
import requests
from bs4 import BeautifulSoup
from requests.adapters import HTTPAdapter
from xjlibrary.our_file_dir import ImageFile
from xjlibrary.our_file_dir.base_dir import BaseDir

cur_path = BaseDir.get_file_dir_absolute(__file__)
TopPath = BaseDir.get_upper_dir(cur_path, -3)
coverPath = BaseDir.get_new_path(TopPath, "download", "TandfJournal", "cover")
BaseDir.create_dir(coverPath)

configfile = BaseDir.get_new_path(cur_path, "db.ini")
logger = facade.get_streamlogger()
mysqlutils = facade.MysqlUtiles(configfile, "db", logger=logger)

# conf = configparser.ConfigParser()
# conf.read(config_path, encoding='utf-8')

BASE_URL = 'https://www.tandfonline.com'
JOURNAL_URL = 'https://www.tandfonline.com/loi/{jid}'

HEADERS = {
    'referer': 'http://taylorandfrancis.com/journals/',
    'upgrade-insecure-requests': '1',
    'user-agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/65.0.3325.181 Safari/537.36',
}


#
# logging.basicConfig(level=logging.INFO,  # 最低输出
#                     format='%(asctime)s %(filename)s[line:%(lineno)d] %(levelname)s %(message)s',
#                     datefmt='%d %b %H:%M:%S')


# # 数据库链接
# def MajorDbConnect():
#     return MySqlDbConnect(cur_path, 'TandfJournalConfig.ini')

def init():
    sql = "update journal set cover_stat = 0 where cover_stat=1"
    # conn = MajorDbConnect()
    # ExeSqlToDB(sql,conn)
    mysqlutils.ExeSqlToDB(sql)


# 从数据库获取期刊
def getTasksFromDB():
    """
    :param conn: 数据库连接对象
    :return: 查询结果
    """
    logger.info('Getting tasks from DB..')
    # conn = MajorDbConnect()
    Sql = "SELECT `jid`, `name` FROM journal WHERE cover_stat < 1 ORDER BY RAND() LIMIT 20"
    return mysqlutils.SelectFromDB(Sql)  # SelctSqlFromDB(Sql, conn)


def updateDBJournal(jid, status):
    if status is None:
        Sql = "UPDATE journal SET cover_stat = 0 WHERE jid = '%s'" % jid
    else:
        Sql = "UPDATE journal SET cover_stat = 1 WHERE jid = '%s'" % jid
    # ExeSqlToDB(Sql, MajorDbConnect())
    mysqlutils.ExeSqlToDB(Sql)


def reqUrl(session, url):
    """
    通用请求方法
    :param session: 请求会话
    :param url: 请求url
    :return: resp.text或None
    """
    try:
        time.sleep(random.random() * 2)
        resp = session.get(url, timeout=20)
        print('HTTP STATUS CODE: %s' % resp.status_code)
        if resp.status_code == 200:
            # print(resp.text)
            return resp
        else:
            return None
    except:
        logger.error('Request url failed: %s' % url)
        return None


def getOneJournalCover(session, jid):
    """
    根据jid获取封面
    :param session: 请求会话对象
    :param jid: journal id
    :return: True, None
    """
    cover_path = os.path.join(coverPath, jid + '.jpg')
    if BaseDir.is_file_exists(cover_path):
        print("存在:" + coverPath)
        return True
    journal_url = JOURNAL_URL.format(jid=jid)
    print(journal_url)
    journal_resp = reqUrl(session, journal_url)
    if journal_resp is None:
        return None
    journal_bsObj = BeautifulSoup(journal_resp.text, 'html.parser')
    # print(journal_resp.text)
    cover_tag = journal_bsObj.find('div', {'class': 'cover'})
    if cover_tag is None:
        logger.info('Can not find cover tag !')
        return None
    dicts = json.loads(cover_tag.find('img')['data-src'])
    url = dicts['src']
    cover_url = BASE_URL + url
    print(cover_url)
    cover_resp = reqUrl(session, cover_url)
    if cover_resp is None:
        return None
    try:
        ImageFile.img_to_format_jpg(cover_resp.content, cover_path)
        # open(cover_path, 'wb').write(cover_resp.content)
        print(coverPath)
        return True
    except:
        # raise IOError
        logger.info('Error: save cover jpg: %s' % cover_url)
        return None


def start():
    session = requests.Session()
    session.headers = HEADERS
    session.mount('http://', HTTPAdapter(max_retries=3))  # 设置重试次数
    session.mount('https://', HTTPAdapter(max_retries=3))
    while True:
        result = getTasksFromDB()
        print(result)
        logger.info('Get %s tasks' % len(result))
        if not result:
            break
        for each_journal in result:
            jid = each_journal[0]
            # jounal_name = each_journal[1]

            status = getOneJournalCover(session, jid)
            updateDBJournal(jid, status)
            logger.info('Current jid: %s, Save status: %s' % (jid, status))
    logger.info('All tasks completed')


def main():
    # init()
    start()


if __name__ == '__main__':
    main()
