import asyncio

from loguru import logger

from com.arcfox.base.base_spider import ExecuteType
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.middleware.proxy_zhima_middleware import get_proxy, format_proxy, get_abyun_proxy
from com.arcfox.school.processor.handcollege.hand_school_list_processor import HandSchoolListProcessor
from com.arcfox.school.spider.handcollege.hand_base_spider import HandBaseSpider
from com.arcfox.util import async_request as requests
from com.arcfox.util.redis_key_manager import HAND_UNIVERSITY_SCHOOL_LIST_KEY
from com.arcfox.util.util import random_sleep


class HandSchoolListSpider(HandBaseSpider):
    def __init__(self):
        '''
        掌上高考学校列表数据抓取(包括更新学校地址，占地面积，学校简介)
        '''
        super().__init__()
        self.url = "https://api.eol.cn/web/api/"
        self.processor = HandSchoolListProcessor()
        self.task_manager = RedisTaskManager(HAND_UNIVERSITY_SCHOOL_LIST_KEY)
        self.proxy = None

    async def _pull_task(self):
        return None, ExecuteType.ONCE

    def get_params(self, page_num=1):
        return {
            'keyword': '',
            'page': page_num,
            'signsafe': self.get_sign_safe(self.url),
            'size': 20,
            'sort': 'view_total',
            'uri': 'apidata/api/gk/school/lists',
        }

    async def change_proxy(self):
        self.proxy = await get_abyun_proxy()
        logger.info(f"切换ip成功->{self.proxy}")

    async def request_school_list(self, page_num):
        resp = await requests.post_with_session(self.url, data=self.get_params(page_num), proxy=self.proxy,
                                                headers=self.get_default_header(), timeout=3)
        if resp.code == 200:
            if resp.json()['code'] != '0000':
                await self.change_proxy()
                resp = await requests.post_with_session(self.url, data=self.get_params(page_num), proxy=self.proxy,
                                                        headers=self.get_default_header(), timeout=3)
        else:
            await self.change_proxy()
            resp = await requests.post_with_session(self.url, data=self.get_params(page_num), proxy=self.proxy,
                                                    headers=self.get_default_header(), timeout=3)
        return resp

    async def _crawl_by_task(self, task):
        task_size = await self.task_manager.task_size()
        #if task_size == 0:
        resp = await self.request_school_list(1)
        if resp.code == 200:
            result_json = resp.json()
            result_data = result_json['data']
            total_size = result_data['numFound']
            total_pages = total_size / 20 if total_size % 20 == 0 else total_size / 20 + 1
            for i in range(1, int(total_pages) + 1):
                page_resp = await self.request_school_list(i)
                if page_resp.code == 200:
                    logger.info(page_resp.json())
                    await self.processor.parse_and_add_task(page_resp.json())
                await random_sleep(0.2)
        # while True:
        #     tasks = await self.task_manager.pull_tasks(20)
        #     if not tasks:
        #         logger.info("数据抓取完成")
        #         break
        #     for task in tasks:
        #         try:
        #             await self.crawl_school_address(task)
        #             await self.crawl_school_summary(task)
        #         except Exception as e:
        #             await self.task_manager.add_fail_tasks(task)

    async def crawl_school_address(self, task):
        url = f"https://static-data.gaokao.cn/www/2.0/school/{task['school_id']}/info.json"
        resp = await requests.get_with_session(url, headers=self.get_default_header(), timeout=3)
        if resp.code == 200:
            logger.info(resp.json())
            await self.processor.parse_and_save_school_address(task, resp.json())
            await random_sleep(0.2)
        else:
            await self.task_manager.add_fail_tasks(task)

    async def crawl_school_summary(self, task):
        url = f"https://static-data.gaokao.cn/www/2.0/school/{task['school_id']}/detail/69000.json"
        resp = await requests.get_with_session(url, headers=self.get_default_header(), timeout=3)
        if resp.code == 200:
            logger.info(resp.json())
            if resp.json():
                await self.processor.parse_and_save_school_summary(task, resp.json())
                await random_sleep(0.2)
        else:
            await self.task_manager.add_fail_tasks(task)


if __name__ == "__main__":
    asyncio.run(HandSchoolListSpider().start_crawl())
