import asyncio

from loguru import logger

from com.arcfox.base.base_spider import BaseSpider, ExecuteType
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.middleware.async_redis_middleware import open_redis
from com.arcfox.school.processor.universityimg.university_img_processor import UniversityImgProcessor
from com.arcfox.util.redis_key_manager import UNIVERSITY_SUB_URLS_KEY
from com.arcfox.util import async_request as requests


class UniversityImgSpider(BaseSpider):
    def __init__(self):
        self.task_manager = RedisTaskManager(UNIVERSITY_SUB_URLS_KEY)
        self.processor = UniversityImgProcessor()
        self.img_count_map = {}
        self.max_img_per_university = 50

    @open_redis
    async def init_data_version(self, client):
        return await super().init_data_version(client)

    async def _pull_task(self):
        return await self.task_manager.pull_tasks(10), ExecuteType.FINISH

    async def _crawl_by_task(self, tasks):
        for task in tasks:
            resp = await requests.get_with_session(task['url'], self.get_default_header(), timeout=5)
            if resp.code == 200:
                img_list = self.processor.parse_all_imgs(task, resp.response)
                for img_url in img_list:
                    if self.img_reach_max_size(task):
                        break
                    img_resp = await requests.get_with_session(img_url, self.get_default_header(), timeout=3)
                    if img_resp.code == 200:
                        self.processor.save_imgs(task, img_url, img_resp.response, self.img_count_map)

    def img_reach_max_size(self, task):
        if task['uid'] in self.img_count_map and self.img_count_map[task['uid']] >= self.max_img_per_university:
            logger.info(f"{task['uid']}图片已达到上限!")
            return True
        return False


if __name__ == "__main__":
    asyncio.run(UniversityImgSpider().start_crawl())
