# https://pubs.acs.org/series/symposium?seriesCode=symposium&sortBy=Volume&startPage=0&activeTab=Year&Year=2018
import json
import random
from queue import Queue

from bs4 import BeautifulSoup
from facade.loggerfacade import get_streamlogger
from facade.mysqlfacade import MysqlUtiles
from xjlibrary.mrequest.baserequest import USER_AGENTS, MProxyRequest
from xjlibrary.myredis.myredisclient import getDataFromRedis
from xjlibrary.our_file_dir import BaseDir
from xjlibrary.ringList import RingList

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")
logger = get_streamlogger()
bookBaseUrl = 'https://pubs.acs.org/series/{0}?seriesCode={0}&'
bookBaseUrl += 'sortBy=Volume&startPage={1}&activeTab=Year&Year={2}'
bookUrl = Queue()

HEADERS = {'User-Agent': random.choice(USER_AGENTS)}

keytransformdicts = {"port": "db_port", "host": "db_host", "passwd": "db_pw", "user": "db_user",
                     "db": "db_name",
                     "chartset": "db_charset"}
myutil = MysqlUtiles(configfile, "db", keytransformdicts=keytransformdicts)


def get_proxy():
    return getDataFromRedis(curPath, 'db.ini')


list_proxy = RingList()
for proxy in get_proxy():
    list_proxy.append(proxy)


def getBookYear(html):
    bs = BeautifulSoup(html, "lxml")
    select_tag = bs.find('select', id='bookYear')
    if not select_tag:
        raise Exception("没有发现select标签 请检查")
    option_all_tag = select_tag.find_all("option")
    listyear = []
    for option_tag in option_all_tag:
        year = option_tag["value"]
        listyear.append(year)
    return listyear


def getBookListInit(seriesCode, page, year):
    """
    初始化图书列表Url
    :param seriesCode:
    :param page:
    :param year:
    :return:
    """
    print("*" * 50)
    url = bookBaseUrl.format(seriesCode, page, year)
    print(url)

    BoolResult, errString, r = MProxyRequest(url,
                                             Feature="bookListHead",
                                             HEADERS=HEADERS,
                                             proxyRingList=list_proxy,
                                             verify=False,
                                             timeout=(30, 60))
    if not BoolResult:
        logger.error(errString)
        raise Exception("请求页时失败,请检查")
    yearList = getBookYear(r.text)
    [bookUrl.put(
        json.dumps(
            {
                "url": bookBaseUrl.format(seriesCode, page, _year),
                "seriesCode": seriesCode,
                "year": _year
            }
        )
    )
        for _year in yearList]


def getBookIssn(html):
    bs = BeautifulSoup(html, "lxml")
    font_tag = bs.find("strong", text="ISSN:")
    p_tag = font_tag.parent
    Strings = p_tag.get_text()
    liststring = Strings.split("|")
    if len(liststring) == 2:
        eissn = liststring[0].replace("eISSN:", "").strip()
        issn = liststring[1].replace("ISSN:", "").strip()
    else:
        issn = liststring[0].replace("ISSN:", "").strip()
        eissn=""
    pages = bs.find("div", class_="centered smaller").get_text()
    return eissn, issn, pages


def getBookList():
    global list_proxy
    while not bookUrl.empty():
        while True:
            urlInfo = json.loads(bookUrl.get())
            url = urlInfo['url']
            seriesCode = urlInfo['seriesCode']
            year = urlInfo['year']
            BoolResult, errString, r = MProxyRequest(url,
                                                     Feature="bookListHead",
                                                     HEADERS=HEADERS,
                                                     proxyRingList=list_proxy,
                                                     verify=False,
                                                     timeout=(30, 60))
            print("proxy length: "+str(list_proxy.length()))
            if list_proxy.length() < 20:
                for proxy in get_proxy():
                    list_proxy.append(proxy)
            if not BoolResult:
                logger.error(errString)
                continue
            eissn, issn, pages = getBookIssn(r.text)
            allpages = pages.replace("(Showing results", "").replace(")", "")
            allpages = allpages.split("of")[1].strip()
            allpages = (int(allpages) // 20)+1
            sql = "insert ignore into `year` (`series`,`year`,`pages`,`allpages`, `issn`,`eissn`) values ('%s','%s',%s,%s,'%s','%s')" % (
            seriesCode, year, 0, allpages, issn, eissn)
            logger.info(sql)
            myutil.ExeSqlToDB(sql, errExit=True)
            # bookInfo = getBookInfo(r.text)
            break


if __name__ == "__main__":
    #  先清空year表
    sql = "truncate table `year`"
    myutil.ExeSqlToDB(sql, errExit=True)
    getBookListInit('advances', '0', '1984')
    getBookListInit('symposium', '0', '2018')
    getBookList()
    myutil.close()
