import datetime
import json
import os
import time
from shutil import copyfile


class MsgQueueCount:
    @classmethod
    def _copy_file(cls, src_file: str = "", dst_file: str = "", force=True):
        """
        移动文件或文件夹,成功返回1,失败返回0

        :param src_file:
        :param dst_file:
        :param force 强制覆盖标志
        :return:
        """
        if not os.path.exists(src_file):
            raise SyntaxError(f"源文件不存在, 源路径：{src_file}")
        if os.path.exists(dst_file) and force:
            os.remove(dst_file)
        copyfile(src_file, dst_file)

    @classmethod
    def get_count_content(cls, count_file, logger):
        data = {"produce": {}, "consume": {}}
        if os.path.exists(count_file):
            with open(count_file, "r") as f:
                try:
                    data = json.load(f)
                except Exception as e:
                    logger.error(f"load count_file:{count_file} error, try to load bak file, err msg: {e}, "
                                 f"error line:{e.__traceback__.tb_lineno}")
                    try:
                        with open(count_file + ".bak", "r") as f_bak:
                            data = json.load(f_bak)
                        cls._copy_file(count_file + ".bak", count_file)
                    except Exception as e:
                        logger.error(f"load bak count_file:{count_file}.bak still error, rebuild a new count file, "
                                     f"err msg: {e}, error line:{e.__traceback__.tb_lineno}")
                        with open(count_file, "w") as f_write:
                            data = {"produce": {}, "consume": {}}
                            json.dump(data, f_write, indent=4)

        return data

    @classmethod
    def assembly_count_info(cls, last_count_info, count_unit: int):
        try:
            last_count_info["total"] += count_unit
            last_record_time = last_count_info["last_record_time"]
            cur_time = time.time()
            cur_str_time = timestamp_2_str(cur_time)
            time_period_str = f"From {timestamp_2_str(last_record_time)} to {cur_str_time}"
            last_count_info["last_record_time"] = cur_time
            last_count_info["last_record_str_time"] = cur_str_time
            last_count_info["last_record_period"] = time_period_str
            average_tps = last_count_info.get("average_tps", None)
            if average_tps is None:
                average_tps = int(last_count_info["total"] / (cur_time - last_count_info["start_time"])) \
                    if cur_time - last_count_info["start_time"] != 0 else 0
            else:
                if average_tps + cur_time - last_record_time == 0:
                    average_tps = 0
                else:
                    total_time = (last_count_info["total"] - count_unit) / average_tps + cur_time - last_record_time
                    average_tps = int(last_count_info["total"] / total_time) if total_time != 0 else 0
            last_count_info["average_tps"] = average_tps
            lasted_count_unit_tps = int(count_unit / (cur_time - last_record_time))
            last_count_info["lasted_count_unit_tps"] = lasted_count_unit_tps
            if lasted_count_unit_tps > last_count_info["highest_count_unit_tps"]:
                last_count_info["highest_count_unit_tps"] = lasted_count_unit_tps
                last_count_info["highest_count_unit_tps_period"] = time_period_str
            if lasted_count_unit_tps < last_count_info["lowest_count_unit_tps"]:
                last_count_info["lowest_count_unit_tps"] = lasted_count_unit_tps
                last_count_info["lowest_count_unit_tps_period"] = time_period_str
            if last_count_info["total"] % (1000 * 1000) == 0:
                last_million_time = last_count_info["last_million_time"]
                last_count_info["last_million_time"] = cur_time
                last_count_info["last_million_str_time"] = cur_str_time
                million_time_period_str = f"From {timestamp_2_str(last_million_time)} to {cur_str_time}"
                last_count_info["last_million_period"] = million_time_period_str
                lasted_million_tps = int(1000 * 1000 / (cur_time - last_million_time))
                last_count_info["lasted_million_tps"] = lasted_million_tps
            return last_count_info
        except Exception as e:
            return last_count_info

    @classmethod
    def get_init_count_info(cls):
        init_count_info = dict()
        init_count_info["total"] = 0
        cur_time = time.time()
        cur_str_time = timestamp_2_str(cur_time)
        time_period_str = f"From {cur_str_time} to {cur_str_time}"
        init_count_info["start_time"] = cur_time
        init_count_info["start_str_time"] = cur_str_time
        init_count_info["last_record_time"] = cur_time
        init_count_info["last_record_str_time"] = cur_str_time
        init_count_info["last_record_period"] = time_period_str
        init_count_info["lasted_count_unit_tps"] = 0
        init_count_info["highest_count_unit_tps"] = 0
        init_count_info["highest_count_unit_tps_period"] = time_period_str
        init_count_info["lowest_count_unit_tps"] = 99999999
        init_count_info["lowest_count_unit_tps_period"] = time_period_str
        init_count_info["last_million_time"] = cur_time
        init_count_info["last_million_str_time"] = cur_str_time
        init_count_info["last_million_period"] = time_period_str
        init_count_info["lasted_million_tps"] = 0
        return init_count_info

    @classmethod
    def write_count(cls, count_file, data):
        with open(count_file, "w") as f_write:
            json.dump(data, f_write, indent=4)
        cls._copy_file(count_file, count_file + ".bak")


def timestamp_2_str(_timestamp):
    """
    时间戳转格式化字符串
    :param _timestamp
    """
    _timestamp = float(_timestamp)
    dt = datetime.datetime.fromtimestamp(_timestamp)
    str_time = dt.strftime("%Y-%m-%d %H:%M:%S")
    return str_time


COUNT_FILE = "queue_count.yaml"
KAFKA_COUNT_FILE = "kafka_count.yaml"
