import json
import sys

import facade
import requests
from parsel import Selector
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")


class DownIssuePage(object):
    def __init__(self):
        self.url = ""
        self.header = {
            "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "accept-encoding": "gzip, deflate, br",
            "accept-language": "zh-CN,zh;q=0.9,en;q=0.8",
            "cache-control": "no-cache",
            "pragma": "no-cache",
            "upgrade-insecure-requests": "1",
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36"
        }
        self.logger = facade.get_streamlogger()
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)
        self.sn = requests.session()
        self.proxy = {
            "http": "192.168.30.176:8012",
            "https": "192.168.30.176:8012"
        }

    def set_url(self, url):
        self.url = url

    def down_issuepage(self, row):
        BoolResult, errString, r = facade.BaseRequest(self.url,
                                                      sn=self.sn,
                                                      proxies=self.proxy,
                                                      mark="frmAbs",
                                                      headers=self.header,
                                                      timeout=(30, 60),
                                                      verify=False
                                                      )
        if BoolResult:
            self.para_home_page(r, row)
        else:
            BaseDir.single_write_file(r.text, "test.html")
            self.logger.info("下载出错 请检查")
            sys.exit(-1)

    def para_home_page(self, r, row):
        print("开始解析")
        BaseDir.single_write_file(r.text, "./test.html")
        selector = Selector(text=r.text)
        listdiv = selector.xpath('//*[@id="top"]/form/div[2]/div')
        for div in listdiv:
            href = div.xpath('.//a[@class="ref nowrap"]/@href').get()
            title = div.xpath('.//span[@class="hlFld-Title"]/text()').get()
            authors = div.xpath('.//strong/span[@class="hlFld-ContribAuthor"]/a/text()').getall()
            issueyear = div.xpath('.//div[@class="citation tocCitation"]/text()').get()
            page = div.xpath('.//div[@class="citation tocCitation"]/span/text()').get()
            doi = div.xpath('.//div[@class="citation tocCitation"]/a/@href').get()
            authors = ";".join(authors)
            dicts = {}
            dicts["title"] = title.replace("'", "''")
            dicts["authors"] = authors.replace("'", "''")
            dicts["issueyear"] = issueyear.replace("\"", "").strip()
            dicts["page"] = page
            dicts["doi"] = doi
            dicts["issue"] = row[1]
            dicts["vol"] = row[2]
            dicts["jurl"] = row[3]
            jsonmsg = json.dumps(dicts, ensure_ascii=False)
            sql = "insert ignore into article (`url`,`jsonmsg`) values ('{}','{}')".format(href, jsonmsg)
            self.mysqlutils.ExeSqlToDB(sql)

    def select_journal(self):
        sql = "select url,issue,vol,journalurl from volissue where stat=0"
        rows = self.mysqlutils.SelectFromDB(sql)
        return rows

    def update_stat(self, url):
        sql = "update volissue set stat=1 where `url`='{}'".format(url)
        self.mysqlutils.ExeSqlToDB(sql)


def main():
    down = DownIssuePage()
    rows = down.select_journal()
    for row in rows:
        down.set_url(row[0])
        down.down_issuepage(row)
        down.update_stat(row[0])


"""
被封一次 原因是5分钟内创建超过25个会话
尝试使用一个sn下载是否会被封
还有可能遇到验证码问题
"""
if __name__ == "__main__":
    main()
