from loguru import logger

from com.arcfox.middleware import async_mysql_middleware as db
from com.arcfox.school.processor.base_processor import BaseProcessor
from com.arcfox.util.muilty_coroutine_util import concurrency


class YZYMajorRankingProcessor(BaseProcessor):
    '''
    优志愿(易度)专业排名数据解析
    '''

    def __init__(self):
        self.SOURCE = "易度"

    async def parse_and_save_data(self, task, result_json):
        items = result_json['result']['items']
        data_list = []
        for item in items:
            university_name = item['college']['cnName']
            university_uid = self.generate_school_uid(university_name)
            rank = item['rank']
            hits = item['hits']
            diff_score = item['diffScore']
            data_list.append({
                "major_name": task['major_name'],
                "major_code": task['major_code'],
                "university_name": university_name,
                "university_uid": university_uid,
                "ranking_sort": rank,
                "hots": hits,
                "ranking_score": diff_score,
                "ranking_year": task['year'],
                "ranking_source": self.SOURCE
            })
        await self.__save_data(tasks=data_list)

    @concurrency(5)
    async def __save_data(self, **params):
        task = params['task']
        mapping = db.MysqlMapping("tb_major_ranking")
        old_data = await mapping.query({"university_uid": task['university_uid'], "major_code": task['major_code'],
                                        "ranking_year": task['ranking_year'], "ranking_source": task['ranking_source']})
        if old_data:
            logger.info("数据已存在!")
        else:
            await mapping.insert(task)
