import asyncio

from loguru import logger

from com.arcfox.base.base_spider import ExecuteType
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.school.processor.jzy.jzy_school_detail_processor import SchoolDetailProcessor
from com.arcfox.school.processor.jzy.jzy_school_list_processor import SchoolListProcessor
from com.arcfox.school.spider.jzy.jzy_base_spider import JZYBaseSpider
from com.arcfox.util import async_request as requests
from com.arcfox.util.util import random_sleep

'''
    聚志愿院校库详情抓取
'''


class SchoolDetailSpider(JZYBaseSpider):
    def __init__(self):
        super().__init__()
        self.start_url = f"{self.host}/school/list_1.html"
        self.processor = SchoolListProcessor()
        self.page_size = 20
        self.task_manager = RedisTaskManager(self.TASK_KEY_SCHOOL_LIST)
        self.processor = SchoolDetailProcessor()

    async def _pull_task(self):
        return await self.task_manager.pull_tasks(100), ExecuteType.FINISH

    async def _crawl_by_task(self, tasks):
        await self.init_session()
        for task in tasks:
            url = f"{self.host}/school/{task['id']}.html"
            try:
                resp = await requests.get(self.session, url=url, headers=self.get_default_header())
                if resp.code == 200:
                    logo_resp = await requests.get(self.session, url=task['logo_url'], headers=self.get_default_header())
                    await self.processor.parse_and_store_school_detail(task, resp.response, logo_resp.response,
                                                                       self.data_version)
                else:
                    await self.task_manager.add_fail_tasks(task)
            except Exception as e:
                logger.exception(e)
                await self.task_manager.add_fail_tasks(task)
            await random_sleep(0.1)


if __name__ == "__main__":
    asyncio.run(SchoolDetailSpider().start_crawl())
