from loguru import logger

from com.arcfox.middleware import async_mysql_middleware as db
from com.arcfox.school.processor.base_processor import BaseProcessor
from com.arcfox.util.muilty_coroutine_util import concurrency


class YiDuSchoolProcessor(BaseProcessor):
    def __init__(self):
        self.SOURCE = "易度"

    async def parse_and_save_data(self, result_json):
        items = result_json['result']['items']
        data_list = []
        for item in items:
            university_name = item['college']['cnName']
            university_uid = self.generate_school_uid(item['college']['cnName'])
            ranking_year = item['year']
            ranking_sort = item['rank']
            ranking_score = item['diffScore']
            university_type = " ".join(item['college']['categories'])
            ranking_source = self.SOURCE
            data_list.append({
                "university_uid": university_uid,
                "university_name": university_name,
                "ranking_year": ranking_year,
                "ranking_sort": ranking_sort,
                "ranking_score": ranking_score,
                "university_type": university_type,
                "ranking_source": ranking_source,
            })
        await self.__save_data(tasks=data_list)

    @concurrency(5)
    async def __save_data(self, **params):
        task = params['task']
        mapping = db.MysqlMapping("tb_university_ranking")
        old_data = await mapping.query(
            {"university_uid": task["university_uid"], "ranking_year": task['ranking_year'],
             "ranking_source": self.SOURCE},
            ["id"]
        )
        if old_data:
            logger.info("数据已存在!")
        else:
            await mapping.insert(task)
