import asyncio

from loguru import logger

from com.arcfox.base.base_spider import ExecuteType
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.school.parse.jzy_school_major_parse import MajorSchoolParser
from com.arcfox.school.processor.jzy.jzy_school_major_processor import SchoolMajorProcessor
from com.arcfox.school.spider.jzy.jzy_base_spider import JZYBaseSpider
from com.arcfox.util import async_request as requests
from com.arcfox.util.util import random_sleep

'''
    聚志愿院校库列表抓取
'''


class SchoolMajorListSpider(JZYBaseSpider):
    def __init__(self):
        super().__init__()
        self.processor = SchoolMajorProcessor()
        self.parser = MajorSchoolParser()
        self.task_manager = RedisTaskManager(self.TASK_SCHOOL_MAJOR_LIST_JZY)
        # self.processor = ()

    async def _pull_task(self):
        return await self.task_manager.pull_tasks(100), ExecuteType.FINISH

    async def _crawl_by_task(self, tasks):
        await self.init_session()
        for task in tasks:
            url = f"{self.host}/school/zhuanye/{task['id']}.html"
            try:
                resp = await requests.get(self.session, url=url, headers=self.get_default_header())
                if resp.code == 200:
                    major_list = self.parser.parse_school_major_list(resp.response, task["school_name"])
                    await self.processor.store_school_major(major_list, self.data_version)
                else:
                    await self.task_manager.add_fail_tasks(task)
            except Exception as e:
                logger.exception(e)
                await self.task_manager.add_fail_tasks(task)
            await random_sleep(1)


if __name__ == "__main__":
    asyncio.run(SchoolMajorListSpider().start_crawl())
