import requests, re

from save_mongo import MongoServer
import threading, redis, time, zlib, random
from tyc_login_yanzheng import tyc_yanzheng_deal
from setting import *
from send_email import EmailModel
from replace_acw_sc__v2 import replace_cookie


def write_log(content):
    with open('tyc_log.txt', 'a', encoding='utf-8') as fp:
        fp.write(f'{time.strftime("%Y-%m-%d %H:%M:%S", time.localtime())} --> {content}\n')


class Tyc(threading.Thread):
    def __init__(self, proxy, mongo):
        super().__init__()
        self.proxy = proxy
        self.mongo_db = mongo
        self.event = threading.Event()
        with open('./tyc_login_yanzheng/tyc_cookie', 'r', encoding='utf8') as fp:
            self.cookie = fp.read()

    def get_response(self, url):
        """获取详情页的html，并转化为etree"""
        refer_list = ['advance/search/e-pc_homeicon', '', 'search?base=sh', 'search?base=bj']
        headers = {
            # 'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/70.0.3538.25 Safari/537.36 Core/1.70.3756.400 QQBrowser/10.5.4039.400',
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/90.0.4430.212 Safari/537.36',
            'cookie': self.cookie,
            'Accept': 'text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8,application/signed-exchange;v=b3;q=0.9',
            'sec-ch-ua': '" Not A;Brand";v="99", "Chromium";v="90", "Google Chrome";v="90"',
            'Referer': 'https://www.tianyancha.com' # + random.choice(refer_list),
        }
        proxies = {
            'http': 'http://' + self.proxy,
            'https': 'https://' + self.proxy
        }
        if self.proxy:
            res = requests.get(url, headers=headers, proxies=proxies)
            # print(res.text)
        else:
            res = requests.get(url, headers=headers)
            # print(res.text)
        return res

    def get_holding_info(self, url):
        tmp = str(time.time())
        com_id = url.replace('/', '').split('company')[1]
        holding_url = f'https://www.tianyancha.com/company/holder_holding_analysis.xhtml?id={com_id}&_={tmp.split(".")[0] + tmp.split(".")[1][:3]}'
        res = self.get_response(holding_url)
        return get_holding(res)

    def run(self):
        global count, verify, cookie_isAlive_acw_sc__v2
        while True:
            # 从redis链表中获取数据
            queue_members = redis_server.rpop('tyc_detail_url')
            queue_members = queue_members.decode('utf-8') if isinstance(queue_members, bytes) else queue_members
            # 判断redis列表中是否还有数据
            if not queue_members:
                print('redis库中的数据已经全部爬取完成')
                break
            detail_url = queue_members.split('-*-')[0]
            name = queue_members.split('-*-')[1]
            print(f'{self.name} --> {detail_url}')
            try:
                # 判断网站是否爬取过
                if redis_server.sadd('tyc_repeat', detail_url):
                    redis_server.srem('tyc_repeat', detail_url)
                    # res = self.get_response('https://www.tianyancha.com/company/3252159164')
                    for i in range(5):
                        if verify or (cookie_isAlive_acw_sc__v2 != 1):
                            raise Exception('验证码出现')
                        else:
                            try:
                                res = self.get_response(detail_url)
                                # print(res.cookies)
                                # print(res.text)
                                break
                            except requests.exceptions.ConnectionError as e:
                                if verify:
                                    raise Exception('验证码出现')
                                print(self.name, e)
                                print('休息两秒！重试次数：', i)
                                if i == 4:
                                    raise Exception('重试过多！')
                                time.sleep(2)

                    if 'verify?' not in res.url:
                        if '哈皮一二' in res.text or ('***' not in res.text and 'setCookie("acw_sc__v2", x)' not in res.text):
                            # 压缩数据流
                            # all_infos = get_info(res)
                            # holding_info = self.get_holding_info(detail_url)
                            # all_item = {**all_infos, **holding_info}
                            data = zlib.compress(res.content)
                            item = {"id": str(res.url).split('/company/')[1], 'name': name, 'text': data}
                            self.mongo_db.process_item(item, self.name)
                            redis_server.sadd('tyc_repeat', detail_url)
                            count += 1
                        else:
                            print(f'当前数量{count}  当前cookie失效！！！！！！！！！')
                            write_log(f'当前数量{count}   当前cookie失效！！！！！！！！！')
                            print('请尽快更新cookie')
                            print(self.cookie)
                            # with open('html_test.html', 'w', encoding='utf8') as fp:
                            #     fp.write(res.text)
                            if 'setCookie("acw_sc__v2", x)' in res.text:
                                cookie_isAlive_acw_sc__v2 = re.search("var arg1='(.*?)';", res.text).group(1)
                            redis_server.rpush('tyc_detail_url', queue_members)
                            # email_cilent.send_email(info='当前cookie失效！！！！！！！！！\n请尽快更新cookie', title='COOKIE')
                            break
                    else:
                        print(f'当前数量{count}  验证码出现！！！！！！！！！')
                        redis_server.rpush('tyc_detail_url', queue_members)
                        verify = 1
                        break
                else:
                    print(f'{detail_url}这个网站已经爬取过了，跳过此网页的爬取')
                    continue
            except Exception as e:
                print(detail_url, e, e.__traceback__.tb_frame.f_globals["__file__"], e.__traceback__.tb_lineno)
                redis_server.rpush('tyc_detail_url', queue_members)
                # except_num += 1
                # if except_num > 1:
                break


def main(threading_num=REQUESTS_COUNT):
    threads = []
    ip = PROXY_IP
    date = time.strftime("%Y-%m-%d", time.localtime(time.time()))
    mongo_db.set_collection(date)
    for i in range(threading_num):
        tyc = Tyc(proxy=ip, mongo=mongo_db)
        threads.append(tyc)

    # 启动多线程
    for t in threads:
        t.start()

    for t in threads:
        t.join()
        # print(t.is_alive())

    print(f'本次爬取共：{count}，速度：{count / ((time.time() - start) / 60)} item/min')
    write_log(f'本次爬取共：{count}，速度：{count / ((time.time() - start) / 60)} item/min')
    print(threading.enumerate())


if __name__ == '__main__':
    count = 0
    # 判断是否出现验证码
    verify = 0
    # 判断cookie是否失效
    cookie_isAlive_acw_sc__v2 = 1
    redis_server = redis.Redis(host=REDIS_HOST, port=REDIS_PORT, decode_responses=True, charset='UTF-8',
                               encoding='UTF-8')
    start = time.time()
    mongo_db = MongoServer()
    # email_cilent = EmailModel()
    write_log('爬虫开始启动！！！')
    while True:
        main()
        if verify:
            tyc_yanzheng_deal.main()
            verify = 0
        elif cookie_isAlive_acw_sc__v2 != 1:
            print('更换新acw_sc__v2')
            print(cookie_isAlive_acw_sc__v2)
            write_log(cookie_isAlive_acw_sc__v2)
            replace_cookie(cookie_isAlive_acw_sc__v2)
            write_log('更换新acw_sc__v2')
            cookie_isAlive_acw_sc__v2 = 1
        else:
            break
    print('主线程结束！', threading.current_thread().name)
    mongo_db.__del__()

    # detail_url = 'https://www.tianyancha.com/company/22822/'
    #
    # res = get_response(detail_url)
    # # print(res.text)
    # if res:
    #     # 提取信息
    #     all_infos = get_info(res)
    #     holding_info = get_holding_info(detail_url)
    #     all_item = {**all_infos, **holding_info}
    #     save_mongoDB(all_item, 'guangzhou')
