import asyncio
import re

import aiohttp
from loguru import logger

from com.arcfox.aqc.processor.aqc_processor import AqcProcessor
from com.arcfox.base.base_spider import ExecuteType, BaseSpider
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.middleware.async_redis_middleware import open_redis
from com.arcfox.middleware.proxy_zhima_middleware import get_proxy, format_proxy, get_abyun_proxy
from com.arcfox.source.cookie_jar import MyCookieJar
from com.arcfox.util import async_request as requests
from com.arcfox.util.redis_key_manager import HAND_UNIVERSITY_SCHOOL_LIST_KEY
from com.arcfox.util.util import random_sleep


class CompanyDetailSpider(BaseSpider):
    def __init__(self, proxy=None):
        super().__init__()
        self.base_url = "https://aiqicha.baidu.com/"
        self.task_manager = RedisTaskManager(HAND_UNIVERSITY_SCHOOL_LIST_KEY)
        self.processor = AqcProcessor()
        self.proxy = proxy

    @open_redis
    async def init_data_version(self, client):
        return await super().init_data_version(client)

    async def init_session(self):
        if self.session:
            await self.close_session()
        con = aiohttp.TCPConnector(ssl=False)
        self.session = aiohttp.ClientSession(connector=con, headers=self.get_headers(),
                                                 cookies=self.get_cookies(), cookie_jar=MyCookieJar())

    def get_headers(self):
        headers = {
            "Accept-Encoding": "gzip, deflate, br",
            'User-Agent': 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/116.0.0.0 Safari/537.36',
        }
        logger.info(f"Headers: {headers}")
        return headers

    def get_cookies(self):
        cookies = {"BAIDUID_BFESS": '02CF3D4FD363AB9D68DDBC94D084E822:FG=1'}
        logger.info(f"Cookies: {cookies}")
        return cookies
        #return "BAIDUID_BFESS=02CF3D4FD363AB9D68DDBC94D084E822:FG=1"

    def get_tasks(self):
        company_list = []
        with open("../company.txt", "r", encoding="utf-8") as f:
            data_list = f.readlines()
            for data in data_list:
                company = data.replace("\n", "")
                company_list.append(company)
        return company_list

    async def _pull_task(self):
        tasks = self.get_tasks()
        return tasks, ExecuteType.FINISH
        # tasks = await self.task_manager.pull_tasks(1)
        # return tasks, ExecuteType.FINISH

    async def change_proxy(self):
        self.proxy = await get_abyun_proxy()
        retry_times = 1
        while retry_times < 10 and not self.proxy:
            await random_sleep(3)
            self.proxy = format_proxy(await get_proxy())
            retry_times += 1
        logger.info(f"切换ip成功->{self.proxy}")

    async def search(self, company_name):
        url = f'{self.base_url}s?q={company_name}&t=0'
        #url = f"https://httpbin.org/get?q={company_name}&t=0"
        response = await requests.get(self.session, url, timeout=5)
        if response.code == 200:
            logger.info(response.response)
            return self.parse_id(response.response, company_name)
        return None

    def parse_id(self, response, company_name):
        content = re.findall(r'{"pid":(.*?),"entName"', response)
        if content:
            company_pid = content[0].replace("\"", "")
            logger.info(f"[{company_name}]获取company_pid: {company_pid}")
            return company_pid
        return None

    async def get_company_detail(self, company_name):
        company_id = await self.search(company_name)
        if company_id:
            url = f'{self.base_url}detail/basicAllDataAjax?pid={company_id}'
            response = await requests.get(self.session, url, timeout=5)
            if response.code == 200:
                return response.response
        return None

    async def _crawl_by_task(self, tasks):
        await self.init_session()
        for task in tasks:
            result = await self.get_company_detail(task)
            if result:
                await self.processor.parse_and_save_data(result)
            else:
                await self.init_session()
                # await self.task_manager.add_fail_tasks(task)


if __name__ == "__main__":
    asyncio.run(CompanyDetailSpider().start_crawl())
