import json
import re

import facade
import parsel
from xjlibrary.our_file_dir import BaseDir

curPath = BaseDir.get_file_dir_absolute(__file__)
configfile = BaseDir.get_new_path(curPath, "db.ini")


class DownSubject(object):
    def __init__(self):
        self.mysqlutils = facade.MysqlUtiles(configfile,
                                             "db",
                                             logger=facade.get_streamlogger())

    def get_home(self):
        url = "https://www.spiedigitallibrary.org/ebooks"
        header = {
            "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "accept-encoding": "gzip, deflate",
            "accept-language": "zh-CN,zh;q=0.9",
            "cache-control": "no-cache",
            "pragma": "no-cache",
            "upgrade-insecure-requests": "1",
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36"
        }
        BoolResult, errString, r = facade.BaseRequest(url,
                                                      headers=header,
                                                      timeout=30,
                                                      allow_redirects=True,
                                                      verify=False)
        if BoolResult:
            selector = parsel.Selector(r.text)
            listtext = selector.xpath(
                '//*[@id="SearchResultLandingContent"]/div[3]/div/div/div[2]/div[@class="col-xs-6"]/div[contains(@class,"SearchResultsContentTechnologyRowPadding")]/text/@onclick').getall()
            print(listtext)
            for subject in listtext:
                subject = subject.replace("technologyChange('", "").replace("')","")
                sql = "replace into `subject` (`subject`) values ('{}')".format(subject)
                self.mysqlutils.ExeSqlToDB(sql)

    def down_books(self):
        sql = "select `subject` from `subject` where stat=0"
        rows = self.mysqlutils.SelectFromDB(sql)
        for row in rows:
            url1 = row[0].replace(" ", "_").replace("&", "%26")
            url = "https://www.spiedigitallibrary.org/eBooks?SSO=1&technology={}&pageSize=100".format(url1)
            header = {
                "accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
                "accept-encoding": "gzip, deflate",
                "accept-language": "zh-CN,zh;q=0.9",
                "cache-control": "no-cache",
                "pragma": "no-cache",
                "upgrade-insecure-requests": "1",
                "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36"
            }
            BoolResult, errString, r = facade.BaseRequest(url,
                                                          headers=header,
                                                          timeout=30,
                                                          allow_redirects=True,
                                                          verify=False)
            if BoolResult:
                searchObj = re.search("DisplayResults\(\[(.*?)\]\[0\]\);", r.text, flags=0)
                print("searchObj.group(1) : ", searchObj.group(1))
                dicts = json.loads(searchObj.group(1))
                all_books_num = int(dicts["ResultsCount"])
                self.para_html(r.text, row[0])
                pages = int(all_books_num / 100) + 1
                print("总页数为{}".format(pages))
                for i in range(2, pages + 1):
                    print(i)
                    self.down_page(row[0], url1, i)
            sql = "update `subject` set stat=1 where `subject`='{}'".format(row[0])
            self.mysqlutils.ExeSqlToDB(sql)

    def para_html(self, html, subject):
        BaseDir.single_write_file(html, "./test.html")
        searchObj = re.search("DisplayResults\(\[(.*?)\]\[0\]\);", html, flags=0)
        # print("searchObj.group(1) : ", searchObj.group(1))
        dicts = json.loads(searchObj.group(1))
        listobj = dicts["Items"]
        Lists = []
        for obj in listobj:
            rowid = obj["UniqueId"]
            Lists.append((subject,rowid))
        sql = "insert ignore into bookssubject (`subject`,books) values (%s,%s)"
        self.mysqlutils.ExeSqlMany(sql, Lists)

    def down_page(self, subject, url1, page):
        url = "https://www.spiedigitallibrary.org/searchajax/eBook/results?technology={}&pageSize=100&page={}".format(
            url1, page)
        url = url.format(page)
        header = {
            "accept": "*/*",
            "accept-encoding": "gzip, deflate",
            "accept-language": "zh-CN,zh;q=0.9",
            "cache-control": "no-cache",
            "pragma": "no-cache",
            "referer": "https://www.spiedigitallibrary.org/eBooks?all=1&pageSize=100",
            "user-agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/72.0.3626.121 Safari/537.36",
            "x-requested-with": "XMLHttpRequest"
        }
        BoolResult, errString, r = facade.BaseRequest(url,
                                                      mark="Items",
                                                      endstring="",
                                                      headers=header,
                                                      timeout=60)
        if BoolResult:
            self.para_html2(subject, r.text)
        else:
            print("下载页失败")

    def para_html2(self, subject, jsonmsg):
        dicts = json.loads(jsonmsg)
        items = dicts["Items"]
        Lists = []
        for obj in items:
            rowid = obj["UniqueId"]
            Lists.append((rowid, subject))
        sql = "insert ignore into bookssubject (`subject`,books) values (%s,%s)"
        self.mysqlutils.ExeSqlMany(sql, Lists)


if __name__ == "__main__":
    down = DownSubject()
    down.get_home()
    down.down_books()