import asyncio

from loguru import logger

from com.arcfox.base.base_spider import ExecuteType
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.middleware.proxy_zhima_middleware import get_proxy, format_proxy
from com.arcfox.school.processor.yzy.yzy_school_major_processor import YZYSchoolMajorProcessor
from com.arcfox.school.spider.yzy.yzy_base_spider import YZYBaseSpider
from com.arcfox.util import async_request as requests
from com.arcfox.util.redis_key_manager import YZY_MAJOR_INFO_TASK_KEY
from com.arcfox.util.util import random_sleep


class YZYSchoolMajorSpider(YZYBaseSpider):
    '''
    学校开设专业数据抓取
    '''

    def __init__(self):
        super().__init__()
        # 特色专业
        self.url_profession = "https://uwf7de983aad7a717eb.youzy.cn/youzy.dms.basiclib.api.college.profession.bycollege.get"
        # 全部专业
        self.url_all = "https://uwf7de983aad7a717eb.youzy.cn/youzy.dms.basiclib.api.college.department.bycollege.get"
        # 重点学科
        self.url_main_major = "https://uwf7de983aad7a717eb.youzy.cn/youzy.dms.basiclib.api.college.subjectgroup.query"
        self.task_manager = RedisTaskManager(YZY_MAJOR_INFO_TASK_KEY)
        self.processor = YZYSchoolMajorProcessor()
        self.proxy = None

    def get_params(self, college_code):
        return {
            'collegeCode': college_code,
        }

    def get_headers(self, college_code):
        return {
            "Host": "uwf7de983aad7a717eb.youzy.cn",
            "u-sign": self.get_u_sgin(f"collegeCode={college_code}"),
            "User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/99.0.4844.51 Safari/537.36",
            "Referer": "https://pv4y-pc.youzy.cn/",
        }

    async def _pull_task(self):
        return await self.task_manager.pull_tasks(5), ExecuteType.FINISH

    async def _crawl_by_task(self, tasks):
        for task in tasks:
            all_resp = await self._crawl_all_major(task)
            logger.info(all_resp.json())
            profession_resp = await self._crawl_profession_major(task)
            logger.info(profession_resp.json())
            if all_resp and profession_resp:
                await self.processor.parse_and_save_major_info(task, all_resp.json(), profession_resp.json())
            # main_resp = await self._crawl_main_subject(task)
            # if main_resp:
            #     await self.processor.parse_and_save_main_subject(main_resp.json())

    async def change_proxy(self):
        self.proxy = format_proxy(await get_proxy())
        retry_times = 1
        while retry_times < 10 and not self.proxy:
            await random_sleep(3)
            self.proxy = format_proxy(await get_proxy())
            retry_times += 1
        logger.info(f"切换ip成功->{self.proxy}")

    async def _request(self, url, params, headers, timeout=5):
        resp = await requests.post_with_session(url=url, data=params, headers=headers, timeout=timeout,
                                                proxy=self.proxy)
        if resp.code != 200:
            await self.change_proxy()
            resp = await requests.post_with_session(url=url, data=params, headers=headers, timeout=timeout,
                                                    proxy=self.proxy)
        return resp

    async def _crawl_all_major(self, task):
        '''
        全部专业
        :param task:
        :return:
        '''
        params = self.get_params(task['yzy_code'])
        resp = await self._request(self.url_all, params, self.get_headers(task['yzy_code']))
        if resp.code == 200:
            return resp
        else:
            await self.task_manager.add_fail_tasks(task)
            return None

    async def _crawl_profession_major(self, task):
        '''
        特色专业
        :param task:
        :return:
        '''
        params = self.get_params(task['yzy_code'])
        resp = await self._request(self.url_profession, params, self.get_headers(task['yzy_code']))
        if resp.code == 200:
            return resp
        else:
            await self.task_manager.add_fail_tasks(task)
            return None

    async def _crawl_main_subject(self, task):
        '''
        重点学科
        :param task:
        :return:
        '''
        params = self.get_params(task['yzy_code'])
        resp = await self._request(self.url_main_major, params, self.get_headers(task['yzy_code']))
        if resp.code == 200:
            return resp
        else:
            await self.task_manager.add_fail_tasks(task)
            return None


if __name__ == "__main__":
    asyncio.run(YZYSchoolMajorSpider().start_crawl())
