import asyncio

from loguru import logger

from com.arcfox.base.base_spider import ExecuteType
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.school.processor.jzy.jzy_major_score_processor import MajorScoreProcessor
from com.arcfox.school.spider.jzy.jzy_base_spider import JZYBaseSpider
from com.arcfox.util import async_request as requests
from com.arcfox.util.util import random_sleep

'''
    聚志愿专业分数线抓取
'''


class MajorScoreSpider(JZYBaseSpider):
    def __init__(self):
        super().__init__()
        self.url = f"{self.host}/index/school/ajax_major_scores"
        self.processor = MajorScoreProcessor()
        self.task_manager = RedisTaskManager(self.TASK_KEY_MAJOR_SCORE)

    async def _pull_task(self):
        return await self.task_manager.pull_tasks(5), ExecuteType.FINISH

    async def _crawl_by_task(self, tasks):
        await self.init_session()
        for task in tasks:
            # 参数不带year直接返回所有年份的数据, kemu传0表示查询所有科目数据
            province_map = self.get_province_map()
            try:
                for key in province_map:
                    params = {"jzy_code": task["id"], "province": province_map[key], "kemu": 0}
                    response = await requests.get(self.session, self.url, params=params)
                    if response.code == 200:
                        await self.processor.store_major_score(task, response.json())
                        await random_sleep(0.2)
                    else:
                        await self.task_manager.add_fail_tasks(task)
                        if response.code == -200 or response.code == -100:
                            logger.info("ip被风控, 10分钟后重试!")
                            await random_sleep(60 * 10)
            except Exception as e:
                logger.exception(e)
                await self.task_manager.add_fail_tasks(task)


if __name__ == "__main__":
    asyncio.run(MajorScoreSpider().start_crawl())
