import asyncio

from com.arcfox.base.base_spider import ExecuteType
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.school.parse.jzy_major_parse import MajorParser
from com.arcfox.school.processor.jzy.jzy_major_processor import MajorProcessor
from com.arcfox.school.spider.jzy.jzy_base_spider import JZYBaseSpider
from com.arcfox.util import async_request as requests

'''
    聚志愿院校库列表抓取
'''


class MajorDetailSpider(JZYBaseSpider):
    def __init__(self):
        super().__init__()
        self.processor = MajorProcessor()
        self.parser = MajorParser()
        self.manager = RedisTaskManager(self.TASK_MAJOR_LIST_JZY)

    async def _pull_task(self):
        return await self.manager.pull_tasks(10), ExecuteType.FINISH

    async def _crawl_by_task(self, tasks):
        for task in tasks:
            major_code = task["href"].split("/")[-1].split(".")[0]
            await self.init_session()
            resp = await requests.get(self.session, url=self.host + "/majors" + task["href"],
                                      headers=self.get_default_header())
            respp = await requests.get(self.session, url=self.host + "/majors/zybyqx" + task["href"],
                                       headers=self.get_default_header())
            if resp.code == 200 and respp.code == 200:
                data = self.parser.parse_major_detal(resp.response, respp.response, task["text"], major_code)
                await self.processor.store_major_detail(data, self.data_version)
            else:
                await self.manager.add_fail_tasks(task)


if __name__ == "__main__":
    asyncio.run(MajorDetailSpider().start_crawl())
