import asyncio

from com.arcfox.base.base_spider import BaseSpider, ExecuteType
from com.arcfox.middleware.async_redis_middleware import open_redis
from com.arcfox.school.processor.eol.eol_school_title_processor import EolSchoolTitleProcessor
from com.arcfox.util import async_request as requests


class EolSchoolTitleSpider(BaseSpider):
    def __init__(self):
        self.processor = EolSchoolTitleProcessor()

    @open_redis
    async def init_data_version(self, client):
        pass

    async def _pull_task(self):
        tasks = [
            {"url": "https://daxue.eol.cn/985.shtml", "title": "985"},
            {"url": "https://daxue.eol.cn/211.shtml", "title": "211"},
            {"url": "https://daxue.eol.cn/syl.shtml", "title": "双一流"},
            {"url": "https://daxue.eol.cn/bushu.shtml", "title": "中央部委直属"},
            {"url": "https://daxue.eol.cn/jybzs.shtml", "title": "教育部直属"},
            {"url": "https://daxue.eol.cn/sfgz.shtml", "title": "国家示范性高职"},
            {"url": "https://daxue.eol.cn/gjgz.shtml", "title": "国家骨干高职"},
        ]
        return tasks, ExecuteType.ONCE

    async def _crawl_by_task(self, tasks):
        for task in tasks:
            resp = await requests.get_with_session(task['url'], headers=self.get_default_header(), timeout=5)
            if resp.code == 200:
                await self.processor.parse_and_save_data(task, resp.response)


if __name__ == "__main__":
    asyncio.run(EolSchoolTitleSpider().start_crawl())
