import json
import random
import time

import pymysql
from bs4 import BeautifulSoup
from facade.loggerfacade import get_streamlogger
from facade.mysqlfacade import MysqlUtiles
from xjlibrary.mprocesspoll.MThreadingRun import MThreadingRun
from xjlibrary.mrequest.baserequest import MProxyRequest, USER_AGENTS
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir import BaseDir

bookBaseUrl = 'https://pubs.acs.org/series/{0}?seriesCode={0}&'
bookBaseUrl += 'sortBy=Volume&startPage={1}&activeTab=Year&Year={2}'

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")

HEADERS = {'User-Agent': random.choice(USER_AGENTS)}

keytransformdicts = {"port": "db_port", "host": "db_host", "passwd": "db_pw", "user": "db_user",
                     "db": "db_name",
                     "chartset": "db_charset"}
logger = get_streamlogger()
myutil = MysqlUtiles(configfile, "db", keytransformdicts=keytransformdicts, logger=logger)


def readDBProxy():
    return getDataFromRedis(curPath, 'db.ini')


def getBookInfo(html):
    listinfo = []
    bs = BeautifulSoup(html, "lxml")
    div_booklist = bs.find("div", class_="bookList")
    if div_booklist:
        bookBox_tag_all = div_booklist.find_all("div", class_="bookBox")
        for bookBox_tag in bookBox_tag_all:
            img_string = bookBox_tag.find("img")["src"]
            bookMeta = bookBox_tag.find("div", class_="bookMeta")
            href = bookMeta.find("a")["href"]
            title = bookMeta.find("a").get_text().strip()
            divlist = bookMeta.find("div", class_="meta1").find_all("div")
            vol = ""
            date = ""
            author = ""
            for div in divlist:
                if div.get_text().find("Vol") != -1:
                    vol = div.get_text().strip()
                if div.get_text().find("Publication Date") != -1:
                    date = div.get_text().strip()
                if div.get_text().find("Editor") != -1:
                    author = div.get_text().strip()
            divlist2 = bookMeta.find("div", class_="meta2").find_all("div")
            isbn13 = eisbn = doi = ""
            for div in divlist2:
                strings = div.get_text().strip()
                if strings.find("ISBN13") != -1:
                    isbn13 = strings
                if strings.find("eISBN") != -1:
                    eisbn = strings
                if strings.find("DOI") != -1:
                    doi = strings

            dicts = {"img": img_string, "href": href, "title": title, "author": author, "vol": vol,
                     "date": date, "isbn13": isbn13, "eisbn": eisbn, "doi": doi}
            listinfo.append(dicts)
    else:
        raise Exception("没有图书,请检查该页")
    return listinfo


def down_page(result_queue, url, row, list_proxy):
    while True:
        BoolResult, errString, r = MProxyRequest(url,
                                                 Feature="bookListHead",
                                                 HEADERS=HEADERS,
                                                 proxyRingList=list_proxy,
                                                 verify=False,
                                                 timeout=(30, 60))
        if not BoolResult:
            logger.error(errString)
            continue
        bookInfo = getBookInfo(r.text)
        issn = row[4]
        eissn = row[5]
        for dicts in bookInfo:
            jsonmsg = json.dumps({"author": dicts["author"], "vol": dicts["vol"],
                                  "date": dicts["date"], "isbn13": dicts["isbn13"],
                                  "eisbn": dicts["eisbn"], "issn": issn,
                                  "eissn": eissn})
            sql = "insert into `ebook` (`title`,`url`,`coverurl`,`doi`,`jsonmsg`) values ('%s','%s','%s','%s','%s') " \
                  "on DUPLICATE key update `url`='%s',`coverurl`='%s',`jsonmsg`='%s'" % (
                      pymysql.escape_string(dicts["title"]), dicts["href"], dicts["img"], dicts["doi"],
                      jsonmsg.replace("'", "\\\'"), dicts["href"], dicts["img"], jsonmsg.replace("'", "\\\'"))
            # myutil.ExeSqlToDB(sql, errExit=True)
            result_queue.put(sql)
        break
    pages = row[3] + 1
    if pages == row[2]:
        stat = 1
    else:
        stat = 0
    sql = "update `year` set `stat`=%d,pages=%d where `series`='%s' and `year`='%s'" % (
        stat, pages, row[0], row[1])
    result_queue.put(sql)


def SelectListFromDB():
    sql = "select `series`,`year`,`allpages`,`pages`,`issn`,`eissn` from `year` where stat=0"
    rows = myutil.SelectFromDB(sql)
    return rows


# 插入数据库
def InsertIntoDbFromList(ListSqls):
    for sql in ListSqls:
        if sql:
            myutil.ExeSqlToDB(sql, errExit=True)


class downThreadRun(MThreadingRun):

    def __init__(self, num, func):
        super(downThreadRun, self).__init__(num, func)

    def setTask(self, *args, **kwargs):
        if self.thread_pool.work_queue.empty():
            rows = SelectListFromDB()
            if rows:
                for row in rows:
                    url = bookBaseUrl.format(row[0], row[3], row[1])
                    self.add_job(self.func, url, row, self.list_proxy)
            else:
                time.sleep(10)
        else:
            time.sleep(10)

    def dealresult(self, *args, **kwargs):
        InsertIntoDbFromList(self.results)

    def setProxy(self, proxysList=None):
        list_proxy = []
        rows = readDBProxy()
        for proxy in rows:
            list_proxy.append(proxy)
        MThreadingRun.setProxy(self, list_proxy)

    def is_break(self):
        return True


def getTodayDate():
    """
    获取当前日期级时间
    如 20180810
    :return: string
    """
    return time.strftime('%Y-%m-%d 00:00:00', time.localtime(time.time()))


if __name__ == "__main__":
    func = down_page
    threadrun = downThreadRun(30, func)
    threadrun.setfunc(func)
    threadrun.run()
    sql = "update `ebook` set stat=0 where update_time>'{}'".format(getTodayDate())
    myutil.ExeSqlToDB(sql, errExit=True)
    myutil.close()
