import asyncio
import json
import random
import re
import threading
import time
import hashlib
import os
import sys
sys.path.append(os.path.join(os.path.dirname(__file__), '/root/work/arcfox-crawler/'))
from com.arcfox.middleware import async_mysql_middleware as db
import requests
from loguru import logger
from aqc_parser import parse_data
import os
import sys

from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.util.muilty_coroutine_util import concurrency


class Spider:

    def __init__(self):
        self.session = requests.session()
        self.task_manager = RedisTaskManager("company_list_sort_set")
        self.headers = {
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36',
            'Host': 'aiqicha.baidu.com',
            'Accept-Encoding': 'gzip, deflate, br',
            'Accept': '*/*',
            'Connection': 'keep-alive'
        }
        self.proxy = self.get_proxy()

    def parse_id(self, response, company_name):
        content = re.findall(r'{"pid":(.*?),"entName"', response)
        if content:
            company_pid = content[0].replace("\"", "")
            logger.info(f"[{company_name}]获取company_pid: {company_pid}")
            return company_pid
        else:
            if re.findall(r'resultList', response):
                #没查到数据
                return 0
            else:
                #被风控
                return -1

    def get_md5(self, data_str):
        m = hashlib.md5()
        try:
            m.update(data_str.encode("unicode_escape"))
            return m.hexdigest()
        except Exception as e:
            m.update(data_str.encode("utf-8"))
            return m.hexdigest()

    def get_proxy(self):
        self.session = requests.session()
        while True:
            url = "http://webapi.http.zhimacangku.com/getip?num=1&type=1&pro=&city=0&yys=0&port=11&pack=29238&ts=0&ys=0&cs=0&lb=1&sb=0&pb=4&mr=1&regions="
            try:
                response = requests.get(url, timeout=3)
                proxy_str = response.text.replace("\r\n", "")
                logger.info(f"proxy: {proxy_str}")
                proxy = {"http": f"http://{proxy_str}"}
                resp = self.session.get("https://aiqicha.baidu.com/", headers=self.headers, timeout=3, proxies=proxy)
                if not resp.cookies:
                    logger.warning("cookie初始化失败!")
                    time.sleep(3)
                    continue
                logger.info(resp.cookies)
                return proxy
            except Exception as e:
                logger.error(e)
                time.sleep(3)
                continue

    def search(self, company_name, cookies):
        params = {
            'q': company_name,
            't': '0',
        }
        url = 'https://aiqicha.baidu.com/s'
        # url = "https://httpbin.org/get"
        try:
            response = self.session.get(url, params=params, headers=self.headers, timeout=5,
                                        proxies=self.proxy)
            if response.status_code == 200:
                return self.parse_id(response.text, company_name)
        except Exception as e:
            logger.warning("获取id超时换代理")
        return -1

    async def get_company_detail(self, company_name, cookies):
        company_id = self.search(company_name, cookies)
        if company_id != 0 and company_id != -1:
            try:
                response = self.session.get('https://aiqicha.baidu.com/detail/basicAllDataAjax', params={"pid": company_id},
                                       headers=self.headers, timeout=5, proxies=self.proxy)
                if response.status_code == 200:
                    result = parse_data(response)
                    if result:
                        logger.info(result)
                        await self.save_data(result)
                        return 1
                    else:
                        return -1
            except Exception as e:
                logger.warning("获取详情超时换代理")
        return company_id

    async def save_data(self, result):
        # with open("result.txt", "a") as f:
        #     f.write(json.dumps(result, ensure_ascii=False) + "\n")
        task = {
            "company_name": result['company_name'],
            "company_info": json.dumps(result, ensure_ascii=False),
            "task_status": 1,
            "company_unicode": result['credit_no']
        }
        await self.__save_db(tasks=[task])

    @concurrency(5)
    async def __save_db(self, **params):
        task = params['task']
        mapping = db.MysqlMapping("tbl_company_task")
        if await self.check_exist(mapping, task):
            logger.info("数据已存在, 更新!")
            await mapping.update({"company_info": task["company_info"], "task_status": 1, "last_update_time": "now()"},
                                 {"company_name": task['company_name'], "company_unicode": task['company_unicode']})
        else:
            logger.info("数据不已存在, 新增!")
            await mapping.insert(task)

    async def check_exist(self, mapping, data):
        condition = {
            "company_unicode": data['company_unicode']
        }
        result = await mapping.query(condition, ["id"])
        if result:
            return result[0][0]

    def get_cookie(self):
        file_name = "cookies.txt"
        with open(file_name, 'r') as f:
            cookie_list = f.readlines()
        if len(cookie_list) > 0:
            idx = random.choice(range(len(cookie_list)))
            cookie = cookie_list[idx].replace("\n", "")
            # del cookie_list[idx]
            #
            # with open(file_name, 'w') as file:
            #     for line in cookie_list:
            #         file.write(line)
            cookies = {
                'BAIDUID_BFESS': "02CF3D4FD363AB9D68DDBC94D084E822:FG=1",
            }
            logger.info(cookies)
            return cookies
        else:
            logger.info("暂无可用cookie")
            return None

    async def get_redis_cookie(self):
        task_manager = RedisTaskManager("aqc_cookies")
        cookies = await task_manager.pull_tasks(1)
        if cookies:
            logger.info(f"读取cookie: {cookies}")
            return cookies[0]

    async def get_tasks(self):
        tasks = await self.task_manager.pull_tasks(1)
        logger.info(tasks)
        return tasks

    async def run(self):
        #cookies = await self.get_redis_cookie()
        cookies = None
        while True:
            # if not cookies:
            #     time.sleep(10)
            #     logger.info("未获取到cookie, 休眠10秒重新获取")
            #     cookies = await self.get_redis_cookie()
            tasks = await self.get_tasks()
            if not tasks:
                time.sleep(10)
                logger.info("暂无任务, 休眠10秒重试")
            else:
                for task in tasks:
                    result = await self.get_company_detail(task['companyName'], cookies)
                    if result == -1:
                        # 风控了
                        await self.task_manager.add_fail_tasks([task])
                        #cookies = await self.get_redis_cookie()
                        logger.warning(f"风控了!!![{self.proxy}]")
                        self.proxy = self.get_proxy()
                    elif result == 0:
                        # 确实没查到数据
                        mapping = db.MysqlMapping("tbl_company_task")
                        await mapping.update({"task_status": 1, "last_update_time": "now()"}, {"company_name": task['companyName'],
                                              "company_unicode": task['companyUnicode']})
                        logger.info(f"未查询到数据[{self.proxy}]")
                    else:
                        logger.info(f"[{task['companyName']}]数据更新成功[{self.proxy}]")
                    time.sleep(2)


if __name__ == "__main__":
    asyncio.run(Spider().run())
