from loguru import logger

from com.arcfox.school.processor.base_processor import BaseProcessor
from com.arcfox.util.muilty_coroutine_util import concurrency
from com.arcfox.middleware import async_mysql_middleware as db


class YZYScoreSegmentProcessor(BaseProcessor):

    def __init__(self):
        pass

    async def parse_and_save_data(self, task, result_json):
        data_list = result_json['result']['yfyds']
        province = task['province_name']
        score_year = task['year']
        score_category = task['subject']
        records = []
        if data_list:
            for item in data_list:
                score = item['minScore']
                rank_segment = f"{item['highestRank']}-{item['lowestRank']}"
                suggest_rank = item['lowestRank']
                same_score_person = item['sameCount']
                records.append({
                    "province": province, "score_year": score_year, "score_category": score_category,
                    "score": score, "rank_segment": rank_segment,
                    "suggest_rank": suggest_rank, "same_score_person": same_score_person
                })
        await self.__save(tasks=records)

    @concurrency(1)
    async def __save(self, **params):
        task = params['task']
        mapping = db.MysqlMapping("tb_score_segment")
        old_data = await mapping.query({"province": task["province"], "score_year": task["score_year"],
                                        "score_category": task['score_category'], "score": task['score']}, ["id"])
        if old_data:
            logger.info("数据已存在!")
        else:
            await mapping.insert(task)
