import pickle
import sys
import time

import facade
import requests
from bs4 import BeautifulSoup
from xjlibrary.mdatetime.mtime import getTodayDate
from xjlibrary.our_file_dir import BaseDir

from cnipr.Step1_login import Login

curPath = BaseDir.get_file_dir_absolute(__file__)
cookiedir = BaseDir.get_new_path(curPath, "cookies")
configfile = BaseDir.get_new_path(curPath, "db.ini")


class Search():

    def __init__(self):
        self.url = "http://search.cnipr.com/search!doOverviewSearch.action"
        self.sn = requests.session()
        self.cookiefile = BaseDir.get_new_path(cookiedir, str(getTodayDate()) + ".txt")
        self.logger = facade.get_streamlogger()
        self.headers = {
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/webp,image/apng,*/*;q=0.8",
            "Accept-Encoding": "gzip, deflate",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
            "Content-Type": "application/x-www-form-urlencoded",
            "Host": "search.cnipr.com",
            "Origin": "http://search.cnipr.com",
            "Pragma": "no-cache",
            "Upgrade-Insecure-Requests": "1",
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.110 Safari/537.36"
        }
        self.postData = {
            "strWhere": "公开（公告）日=(20180904)",
            "yuyijs": "",
            "start": "1",
            "saveFlag": "1",
            "limit": "10",
            "strSynonymous": "",
            "crossLanguage": "",
            "islogicsearch": "false",
            "saveExp": "",
            "showhint": "",
            "channelId": ["FMZL", "SYXX", "WGZL", "FMSQ"],
            "trsq": "1",
            "dan": "1",
            "txt_A": "",
            "txt_B": "",
            "txt_C": "",
            "txt_D": "20180904",
            "txt_E": "",
            "txt_F": "",
            "txt_Q": "",
            "txt_R": "",
            "txt_I": "",
            "txt_J": "",
            "txt_G": "",
            "txt_H": "",
            "txt_L": "",
            "txt_O": "",
            "text": "",
            "txt_K": "",
            "txt_M": "",
            "txt_N": "",
            "txt_U": "",
            "txt_T": "",
            "txt_V": "",
            "txt_X": "",
            "mpage": ["null", "advsch"]
        }
        self.pub_data = ""
        self.mysqlutils = facade.MysqlUtiles(configfile, "db", logger=self.logger)

    def set_cookiefile(self, cookie_file):
        """
        设置cookie文件
        :return:
        """
        self.cookiefile = cookie_file

    def set_cookie(self):
        """
        设置cookie 通过cookie文件
        :return:
        """
        self.logger.info(self.cookiefile)
        if BaseDir.is_file_exists(self.cookiefile):
            with open(self.cookiefile, "rb") as f:
                self.sn.cookies.update(pickle.load(f))

    def write_file(self, r):
        self.logger.info("将cookie写入文件")
        with open(self.cookiefile, "wb") as f:
            pickle.dump(r.cookies, f)

    def down_one_day(self, retrynum=0, relogin=0):
        if retrynum > 3:
            if relogin > 0:
                self.logger.info("重新登陆无效 结束程序查找原因")
                sys.exit(-1)
            self.logger.info("已经连续下载3次 现在怀疑是登陆原因 调用重新登陆")
            Login().login()
            self.down_one_day(0, 1)
        if self.pub_data == "":
            raise Exception("没有设置请求日期")
        self.postData["strWhere"] = "公开（公告）日=({})".format(self.pub_data)
        self.set_cookie()
        self.logger.info(self.postData["strWhere"])
        BoolResult, errString, r = facade.BaseRequestPost(self.url,
                                                          sn=self.sn,
                                                          data=self.postData,
                                                          mark="g_filter",
                                                          headers=self.headers,
                                                          timeout=60)
        if BoolResult:
            self.write_file(r)
            self.logger.info("搜索成功 现在解析数据并存入数据库")
            self.para_search(r)
            self.update_db()
            # time.sleep(10)
        else:
            if errString == "Feature err":
                if r.text.find("你没有访问该页面的权限") > -1:
                    Login().login()
                    self.down_one_day()
            self.logger.info("搜索页面失败 现在睡眠1分钟后重新下载...")
            time.sleep(60)
            retrynum += 1
            self.down_one_day(retrynum)

    def update_db(self):
        sql = "update pubdate set stat=1 where pub_date='{}'".format(self.pub_data)
        self.mysqlutils.ExeSqlToDB(sql)

    def para_search(self, r):
        """
        解析下载的搜索页面并解析到数据库
        :param r:
        :return:
        """
        ListPara = []
        soup = BeautifulSoup(r.text, "lxml")
        input_tag = soup.find("input", id="allRecordCnt")
        if input_tag:
            onclickstring = input_tag["value"]
            self.logger.info("pages is :" + onclickstring)
            if onclickstring == "0":
                return
            pagesnum = onclickstring.replace("goPage4Click(this,", "").replace(");", "").strip()
            allpage = (int(pagesnum) // 10) + 1
            for page in range(1, allpage + 1):
                ListPara.append((self.pub_data, allpage, page))
        else:
            self.logger.info("没有 value")
            BaseDir.single_write_file(r.text, "./test.html")
        sql = "insert into search (pub_date,allpage,page) values (%s,%s,%s)"
        if len(ListPara) > 0:
            self.mysqlutils.ExeSqlMany(sql, ListPara)

    def select(self):
        sql = "select pub_date from pubdate where stat=0 limit 100"
        rows = self.mysqlutils.SelectFromDB(sql)
        return rows

    def test(self):
        ListPara = [('20181012', 5109, 495), ('20181012', 5109, 4955)]
        sql = "insert ignore into search (pub_date,allpage,page) values (%s,%s,%s)"
        if len(ListPara) > 0:
            print(ListPara)
            self.mysqlutils.ExeSqlMany(sql, ListPara)
        # sql = "insert into search (pub_date,allpage,page) values ('20181012',5109,4954)"
        # search.mysqlutils.ExeSqlToDB(sql)


if __name__ == "__main__":
    search = Search()
    while True:
        rows = search.select()
        if not rows:
            break
        for row in rows:
            search.pub_data = row[0]
            search.down_one_day()
    # search.test()
