import asyncio

from loguru import logger

from com.arcfox.base.base_spider import ExecuteType
from com.arcfox.manager.redis_task_manager import RedisTaskManager
from com.arcfox.middleware.proxy_zhima_middleware import get_proxy, format_proxy, get_abyun_proxy
from com.arcfox.school.processor.handcollege.hand_major_score_processor import HandMajorScoreProcessor
from com.arcfox.school.spider.handcollege.hand_base_spider import HandBaseSpider
from com.arcfox.util import async_request as requests
from com.arcfox.util.redis_key_manager import HAND_UNIVERSITY_SCHOOL_LIST_KEY
from com.arcfox.util.util import random_sleep


class HandMajorScoreSpider(HandBaseSpider):
    def __init__(self, proxy=None):
        '''
        掌上高考专业分数数据抓取
        '''
        super().__init__()
        self.base_url = "https://static-data.gaokao.cn/www/2.0/schoolspecialindex"
        self.processor = HandMajorScoreProcessor()
        self.task_manager = RedisTaskManager(HAND_UNIVERSITY_SCHOOL_LIST_KEY)
        self.proxy = proxy

    async def _pull_task(self):
        tasks = await self.task_manager.pull_tasks(1)
        return tasks, ExecuteType.FINISH

    async def change_proxy(self):
        self.proxy = await get_abyun_proxy()
        retry_times = 1
        while retry_times < 10 and not self.proxy:
            await random_sleep(3)
            self.proxy = format_proxy(await get_proxy())
            retry_times += 1
        logger.info(f"切换ip成功->{self.proxy}")

    async def _crawl_by_task(self, tasks):
        for task in tasks:
            conditions = await self.crawl_search_conditions(task)
            if conditions:
                batch_list = conditions['batch']
                if batch_list:
                    for key in batch_list:
                        batch_value = batch_list[key]
                        for value in batch_value:
                            data_list = key.split("_")
                            province = data_list[0]
                            year = data_list[1]
                            if year != '2021':
                                logger.info("跳过非2021年数据")
                                continue
                            url = f"{self.base_url}/{year}/{task['school_id']}/{province}/{data_list[2]}/{value}/1.json"
                            resp = await requests.get_with_session(url, headers=self.get_default_header(),
                                                                   proxy=self.proxy, timeout=5)
                            if resp.code == 200:
                                result_json = resp.json()
                                logger.info(result_json)
                                if result_json['code'] == '0000':
                                    result_data = result_json['data']
                                    total_size = result_data['numFound']
                                    total_pages = total_size / 10 if total_size % 10 == 0 else total_size / 10 + 1
                                    for i in range(1, int(total_pages) + 1):
                                        url = f"{self.base_url}/{year}/{task['school_id']}/{province}/{data_list[2]}/{value}/{i}.json"
                                        page_resp = await requests.get_with_session(url,
                                                                                    headers=self.get_default_header(),
                                                                                    proxy=self.proxy, timeout=5)
                                        if page_resp.code == 200:
                                            if page_resp.json()['code'] == '0000':
                                                province_map = self.get_province_map()
                                                province_name = province_map[str(province)]
                                                await self.processor.parse_and_save_data(task, province_name, year,
                                                                                         page_resp.json())
                                            else:
                                                await self.change_proxy()
                                        else:
                                            await self.task_manager.add_fail_tasks(task)
                                        await random_sleep(0.3)
                                else:
                                    await self.change_proxy()
                with open("success_uid.txt", "a") as fa:
                    fa.write(task["uid"] + "\n")

    async def crawl_search_conditions(self, task):
        url = f"https://static-data.gaokao.cn/www/2.0/school/{task['school_id']}/dic/specialscore.json"
        resp = await requests.get_with_session(url, headers=self.get_default_header(), proxy=self.proxy, timeout=5)
        try:
            if resp.code == 200:
                logger.info(resp.response)
                result_data = resp.json()['data']
                return result_data['newsdata']
            else:
                await self.task_manager.add_fail_tasks(task)
                return None
        except:
            return None


if __name__ == "__main__":
    asyncio.run(HandMajorScoreSpider().start_crawl())
