#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import json
import os

from dws.commons.common import log
from dws.commons.const import DwsRetEnum, RpcParamKey
from common.common import execute_cmd
from common.util.exec_utils import su_exec_rm_cmd, exec_overwrite_file


def write_file(path: str, file_context: str):
    """
    函数功能：写入文件
    参数： path 文件名字
          file_context： 写入文件内容
    Note： 如果该文件是一个软件连接的话，抛出异常
    """
    if os.path.islink(path):
        log.error(f"Link file {path}.")
        raise Exception(f'{path} is link file.')
    path = os.path.realpath(path)
    if not exec_overwrite_file(path, file_context, json_flag=False):
        raise Exception(f"Write file failed.")
    log.debug(f"Write progress end, path:{path}, result: {file_context}")


def write_lines(file_path: str, lines):
    file_path = os.path.realpath(file_path)
    if not exec_overwrite_file(file_path, lines, json_flag=False):
        raise Exception(f"Write file failed.")
    log.debug(f"Write progress end, path:{file_path}, result: {lines}")


def read_file(path):
    with open(path, "r", encoding='utf-8') as tmp:
        result = json.load(tmp)
    log.debug(f"Read progress end, path: {path}, result: {result}")
    return result


def get_progress_path(copy_id, sub_job_id, cache_path):
    # 获取进度输出路径
    result_path = os.path.join(cache_path, 'tmp', copy_id, f'resultNotify_{sub_job_id}.txt')
    log.debug(f"Result path:{result_path}")
    return result_path


def get_metadata_path(copy_id, meta_path):
    # 获取元数据保存路径
    metadata_path = os.path.join(meta_path, 'meta', copy_id, 'copyInfo.txt')
    log.debug(f"Path: {metadata_path}, copy_id: {copy_id}")
    return metadata_path


def invoke_rpc_tool_interface(unique_id: str, interface_name: str, param_dict: dict):
    def clear_file(path):
        path = os.path.realpath(path)
        if os.path.isfile(path):
            if not su_exec_rm_cmd(path):
                log.warn(f"Fail to remove {path}.")

    input_file_path = os.path.join(RpcParamKey.PARAM_FILE_PATH, RpcParamKey.INPUT_FILE_PREFFIX + unique_id)
    output_file_path = os.path.join(RpcParamKey.RESULT_PATH, RpcParamKey.OUTPUT_FILE_PREFFIX + unique_id)
    write_file(input_file_path, json.dumps(param_dict))

    cmd = f"sh {RpcParamKey.RPC_TOOL} {interface_name} {input_file_path} {output_file_path}"
    try:
        ret, std_out, std_err = execute_cmd(cmd)
    except Exception as err:
        log.error(f"Invoke rpc_tool interface [{interface_name}] err: {err}.")
        raise err
    finally:
        # 执行命令后删除输入文件
        clear_file(input_file_path)

    if ret != DwsRetEnum.SUCCESS:
        err_info = f"Invoke rpc_tool script failed, std_err: {std_err}."
        log.error(err_info)
        raise Exception(err_info)

    # 读取文件成功后删除文件
    try:
        result = read_file(output_file_path)
    except Exception as err:
        log.error(f'Read result file err: {err}.')
        raise err
    finally:
        clear_file(output_file_path)

    return result


def report_job_details_comm(job_id, sub_job_details: dict):
    try:
        result_info = invoke_rpc_tool_interface(job_id, RpcParamKey.REPORT_JOB_DETAILS, sub_job_details)
    except Exception as err:
        log.error(f"Invoke rpc_tool interface exception, err: {err}.")
        return False
    ret_code = result_info.get("code")
    if ret_code != 0:
        log.error(f"Invoke rpc_tool interface failed, result code: {ret_code}.")
        return False
    return True


def get_total_data_size_from_speed_file(cache_path, copy_id, job_id):
    total_data_size = 0
    speed_dir_path = os.path.join(cache_path, "tmp", copy_id, "speed")
    if not os.path.isdir(speed_dir_path):
        log.error(f"Speed dir path [{speed_dir_path}] not exists, job_id: {job_id}.")
        return total_data_size
    for host_key in os.listdir(speed_dir_path):
        host_key_path = os.path.join(speed_dir_path, host_key)
        if not os.path.isdir(host_key_path):
            continue
        if not os.listdir(host_key_path):
            log.info(f"{host_key} is empty, skip.")
            continue
        single_data_size = get_speed_file_data_size(host_key_path, job_id)
        last_single_data_size = get_last_speed_file_data_size(host_key_path, job_id)
        # 可能因为读写冲突导致speed文件读取失败数据量为0，因此需要与上一次数据量作对比
        if single_data_size > last_single_data_size:
            write_last_speed_file(host_key_path, single_data_size, job_id)
            total_data_size += single_data_size
        elif single_data_size < last_single_data_size:
            log.warn(f"{host_key} data size is smaller than last data size, use last data size.")
            total_data_size += last_single_data_size
        else:
            total_data_size += single_data_size
    return total_data_size


def get_speed_file_data_size(speed_file_dir, job_id):
    # xbsa会在两个地方上报数据量: 1.xbsa会话结束上报 2.另一线程每15秒查询上报一次
    # 两个上报各写一个xbsa_speed文件，插件以数据量大的为准（数据量大的一定是最新的）
    total_size = [0, 0]
    for speed_item in os.listdir(speed_file_dir):
        speed_txt_file = os.path.join(speed_file_dir, speed_item)
        if os.path.basename(speed_txt_file).startswith("xbsa_speed"):
            try:
                speed_obj = read_file(speed_txt_file)
            except Exception as err:
                log.exception(f"Get speed obj err: {err}, job_id: {job_id}.")
                continue
            if not speed_obj:
                log.warn(f"Get json obj from [{speed_txt_file}] failed, job_id: {job_id}.")
                continue
            total_size[0] += speed_obj.get("totalSizeInMB", 0)
        elif os.path.basename(speed_txt_file).startswith("schedule_thread_xbsa_speed"):
            try:
                speed_obj = read_file(speed_txt_file)
            except Exception as err:
                log.exception(f"Get speed obj err: {err}, job_id: {job_id}.")
                continue
            if not speed_obj:
                log.warn(f"Get json obj from [{speed_txt_file}] failed, job_id: {job_id}.")
                continue
            total_size[1] += speed_obj.get("totalSizeInMB", 0)
    host_key = os.path.basename(speed_file_dir)
    log.info(f"{host_key} speed file data size: {total_size}")
    return max(total_size)


def get_last_speed_file_data_size(speed_file_dir, job_id):
    last_speed_file_path = os.path.join(speed_file_dir, "last_xbsa_speed.txt")
    host_key = os.path.basename(speed_file_dir)
    if not os.path.exists(last_speed_file_path):
        log.info(f"{host_key} last speed file has not been created yet.")
        return 0
    try:
        speed_obj = read_file(last_speed_file_path)
    except Exception as err:
        log.exception(f"Get speed obj err: {err}, job_id: {job_id}.")
        return 0
    if not speed_obj:
        log.warn(f"Get json obj from [{last_speed_file_path}] failed, job_id: {job_id}.")
    last_data_size = speed_obj.get("totalSizeInMB", 0)
    log.info(f"{host_key} last speed file data size: {last_data_size}")
    return last_data_size


def write_last_speed_file(speed_file_dir, data_size, job_id):
    last_speed_file_path = os.path.join(speed_file_dir, "last_xbsa_speed.txt")
    data = {"totalSizeInMB": data_size}
    json_str = json.dumps(data)
    try:
        with open(last_speed_file_path, 'w', encoding='utf-8') as file:
            file.write(json_str)
    except Exception as err:
        log.exception(f"Write last xbsa speed file err: {err}, job_id: {job_id}.")
    log.info("Succeed to write last xbsa speed file.")
