import asyncio

from com.arcfox.base.base_spider import BaseSpider, ExecuteType
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.middleware.async_redis_middleware import open_redis
from com.arcfox.util import async_request as requests
from com.arcfox.school.processor.universityimg.university_img_processor import UniversityImgProcessor
from com.arcfox.util.redis_key_manager import UNIVERSITY_URLS_KEY


class UniversityUrlSpider(BaseSpider):
    def __init__(self):
        self.processor = UniversityImgProcessor()
        self.task_manager = RedisTaskManager(UNIVERSITY_URLS_KEY)

    @open_redis
    async def init_data_version(self, client):
        return await super().init_data_version(client)

    async def _pull_task(self):
        return await self.task_manager.pull_tasks(10), ExecuteType.FINISH

    async def _crawl_by_task(self, tasks):
        for task in tasks:
            url = task['url']
            if not url:
                continue
            resp = await requests.get_with_session(url, self.get_default_header())
            if resp.code == 200:
                await self.processor.parse_and_save_url(task, resp.response)
            else:
                await self.task_manager.add_fail_tasks(task)


if __name__ == "__main__":
    asyncio.run(UniversityUrlSpider().start_crawl())
