import json
import re
from time import time, sleep
from requests.auth import HTTPBasicAuth
import requests
from lxml import etree
import traceback
from Redis import RedisClient
from remoteDB import ArticleDB
from adsl import ADSL


class VerifyAndToEs(object):
    db = ArticleDB()
    r = RedisClient()
    adsl = ADSL()

    def verify(self, title, author):
        url = 'http://129.211.169.177:6350/periodical/_search'
        headers = {
            "Content-Type": "application/json"
        }
        data = {
            "query":
                {
                    "bool":
                        {
                            "must":
                                [
                                    {"match_phrase": {"title": title}},
                                    {"match_phrase": {"author": author}}
                                ]
                        }
                }
        }
        try:
            r = requests.get(url, data=json.dumps(data), headers=headers, timeout=10,
                             auth=HTTPBasicAuth('xiaohengheng', '950218'))
            if 300 > r.status_code >= 200:
                data = json.loads(r.text)
                if data['hits']['total']['value'] == 0:
                    return {'option': 'insert'}
                if data['hits']['total']['value'] >= 1:
                    for item in data['hits']['hits']:
                        content = item['_source']['content']
                        if len(content) >= 300:
                            return {'option': 'pass'}
                    return {'option': 'insert'}
            else:
                return {'option': 'error'}
        except:
            return {'option': 'error'}

    def PeriodicalData(self, id, url, article_id, author, title):
        headers = {
            "sec-ch-ua-platform": "macOS",
            "Upgrade-Insecure-Requests": "1",
            "User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/94.0.4606.81 Safari/537.36",
            "Accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9",
            "Sec-Fetch-Site": "same-origin",
            "Sec-Fetch-Mode": "navigate",
            "Sec-Fetch-User": "?1",
            "Sec-Fetch-Dest": "document",
            "Accept-Encoding": "gzip, deflate, br",
            "Accept-Language": "zh-CN,zh;q=0.9,en;q=0.8",
            "x-dsa-host": "ixueshu.com",
            "x-dsa-originalip": "47.0.0.0",
            "x-dsa-secret": "DSA20130311X"
        }

        cookies = json.loads(self.r.get_ixueshu_cookies())
        cookieDict = {}
        for key in cookies.keys():
            cookieDict[key] = cookies[key]
        proxies = {}
        try:
            key, adsl = self.r.getADSL()
        except:
            self.adsl.init(1)
            key, adsl = self.r.getADSL()
        if adsl:
            proxies = {
                'http': 'http://dujhre:4l9axcjk@' + adsl,
                'https': 'http://dujhre:4l9axcjk@' + adsl,
            }
        try:
            r = requests.get(url, headers=headers, timeout=30, proxies=proxies, cookies=cookieDict)
            if 300 > r.status_code >= 200:
                pattern = re.compile(u"var docid_sign = '\w{4}'")
                tree = etree.HTML(r.content)
                try:
                    docid_sign = pattern.search(str(r.content, encoding='utf-8')).group().split('var docid_sign = ')[
                        1].replace(
                        "\'", '')
                except:
                    print(r.text)
                    self.db.UpdateOnePeriodicalStatus(id, 'error', '')
                    return
                try:
                    periodical = tree.xpath('//*[@id="preview"]/div[2]/div[1]/div[1]/span[2]/text()')[0]
                except:
                    periodical = '未知'
                self.db.UpdateOnePeriodical(id, periodical, article_id + '##' + docid_sign)
                try:
                    xmlResponse = requests.get(
                        f'https://www.huadiyingyuan.com//api/v2/preview/pdf/content/pc/{article_id}/p_1_10.xml?sign={docid_sign}',
                        proxies=proxies, timeout=30)
                    if 300 > xmlResponse.status_code >= 200:
                        exp = re.compile(r'<.*?>')
                        periodical_content = exp.sub('', xmlResponse.text).strip().replace('\n', '').replace(' ', '')
                        if len(periodical_content) > 100:
                            data = {
                                "title": title,
                                "author": author,
                                "periodical": periodical,
                                "content": periodical_content,
                                "local_url": article_id + '##' + docid_sign
                            }
                            insertResponse = requests.post('http://fiora.samereport.top/v1/pdf/parse/periodical',
                                                           verify=False, json=data)
                            print(insertResponse.status_code)
                            self.db.UpdateOnePeriodicalStatus(id, 'success', '')
                    elif xmlResponse.status_code == 403:
                        print(403)
                        self.db.UpdateOnePeriodicalStatus(id, 'error', '')
                        self.adsl.DelOneAdsl(key)
                        self.adsl.reflash()
                    else:
                        print(xmlResponse.status_code)
                        self.db.UpdateOnePeriodicalStatus(id, 'error', '')
                except Exception as e:
                    print(e)
                    self.db.UpdateOnePeriodicalStatus(id, 'error', '')
            elif r.status_code == 403:
                print(403)
                self.adsl.DelOneAdsl(key)
                self.adsl.reflash()
                self.db.UpdateOnePeriodicalStatus(id, 'error', '')
            else:
                print(r.status_code)
                self.db.UpdateOnePeriodicalStatus(id, 'error', '')
        except Exception as e:
            print(e)
            self.db.UpdateOnePeriodicalStatus(id, 'error', '')
            if '503' in str(e):
                self.adsl.DelOneAdsl(key)
                self.adsl.reflash()
            if 'timeout' in str(e):
                self.adsl.inscTimeout()
            print(adsl)


def run_test():
    bot = VerifyAndToEs()
    option = bot.verify('序贯应用中药辅助治疗激素依赖性溃疡性结肠炎的临床分析')
    print(option)


if __name__ == '__main__':
    # run_test()
    bot = VerifyAndToEs()
    r = RedisClient()
    while True:
        if r.checkStop() != 'stop':
            article = r.get_one_periodical().split('##')
            id = article[0]
            title = article[1]
            author = article[2]
            url = article[3]
            article_id = article[4]
            option = bot.verify(title, author)
            print(title)
            if option['option'] == 'insert':
                print('insert')
                bot.PeriodicalData(id, 'https://www.ixueshu.com' + url, article_id, author, title)
            if option['option'] == 'update':
                print('update')
                _id = option['id']
                bot.PeriodicalData(id, 'https://www.ixueshu.com' + url, article_id, author, title)
            if option['option'] == 'error':
                bot.db.UpdateOnePeriodicalStatus(id, 'error', '')
            if option['option'] == 'pass':
                bot.db.UpdateOnePeriodicalStatus(id, 'success', '')
                print('pass')
            print('sleep 2...')
        else:
            print('stop ,sleep 5...')
            sleep(5)
