#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import json
from dataclasses import dataclass

from common.logger import Logger
from common.common_models import ActionResult, SubJobDetails
from common.parse_parafile import CopyParamParseUtil
from common.const import SubJobStatusEnum, ExecuteResultEnum, CopyDataTypeEnum
from common.common import check_del_dir, output_execution_result_ex, output_result_file, \
    check_path_legal, check_del_file
from common.number_const import NumberConst

LOGGER = Logger().get_logger(filename="db2.log")


@dataclass
class CopyInfo:
    """
    删除的副本信息
    """
    copy_id: str
    cache_path: str = None
    meta_path: str = None
    data_reps: list = None


class DeleteCopy:
    def __init__(self, task_name, pid, job_id, param_dict, sub_job_id=None):
        self._task_name = task_name
        self._pid = pid
        self._job_id = job_id
        self._sub_job_id = sub_job_id
        self._param_dict = param_dict

    def delete_copies(self):
        """
        执行副本删除
        """
        LOGGER.info(f"Start exec delete copy. job_id :{self._job_id}.")
        # 1. 获取所有待删除副本信息
        all_del_copies = self.get_copies()
        action_result = ActionResult(code=ExecuteResultEnum.INTERNAL_ERROR, message="The delete copy failed")
        if not all_del_copies:
            LOGGER.error(f"Copy dir not found. job_id: {self._job_id}.")
            self.save_progress(SubJobStatusEnum.FAILED)
            return action_result

        # 2. 删除所有副本
        self.save_progress(SubJobStatusEnum.RUNNING)
        sub_job_status = SubJobStatusEnum.COMPLETED
        for copy_info in all_del_copies:
            for data_rep in copy_info.data_reps:
                data_path = data_rep.get("path")[0]
                if not self.clear_copy_dir(data_path):
                    LOGGER.error(f"Failed to delete the copy path:{data_path}. job_id: {self._job_id}, "
                                 f"copy_id: {copy_info.copy_id}.")
                    sub_job_status = SubJobStatusEnum.FAILED

        # 3. 保存删除状态
        self.save_progress(sub_job_status)
        if sub_job_status == SubJobStatusEnum.COMPLETED:
            LOGGER.info("Copy deleted successfully.")
            action_result.code = ExecuteResultEnum.SUCCESS
        else:
            LOGGER.error(f"Failed to delete the copy. job_id: {self._job_id}.")
        return action_result

    def delete_progress(self):
        """
        删除副本进度查询
        """
        LOGGER.info(f"Start exec delete copy. job_id: {self._job_id}, sub_job_id: {self._sub_job_id}.")
        # 1. 获取cache仓的副本删除进度文件
        action_result = SubJobDetails(taskId=self._job_id, subTaskId=self._sub_job_id,
                                      taskStatus=SubJobStatusEnum.FAILED.value, progress=int(0))
        progress_file_path = self.get_progress_path()
        if not progress_file_path:
            LOGGER.error(f"Failed to get the progress file. job_id: {self._job_id}.")
            return action_result

        json_dict = self.read_progress_file(progress_file_path)
        if not json_dict:
            LOGGER.error(f"Failed to read the progress information. job_id: {self._job_id}.")
            return action_result

        task_status = json_dict.get('status', -1)
        if task_status == -1:
            LOGGER.error(f"Failed to get progress parameters(status). job_id: {self._job_id}.")
            return action_result

        # 3. 删除进度：进行中上报50%，完成上报100%
        delete_progress = NumberConst.FIFTY
        if task_status == SubJobStatusEnum.COMPLETED.value:
            LOGGER.info(f"The delete job:{self._job_id} is complete.")
            delete_progress = NumberConst.HUNDRED
            check_del_file(progress_file_path)
        action_result.task_status = task_status
        action_result.progress = delete_progress
        return action_result

    def output_action_result(self, action_ret):
        output_result_file(self._pid, action_ret.dict(by_alias=True))

    def clear_copy_dir(self, copy_dir):
        delete_status = True
        # 1. 获取待删除目录所有文件
        copy_path_file_list = os.listdir(copy_dir)
        if not copy_path_file_list:
            LOGGER.info(f"No copy file exists in this path:{copy_dir}.")
            return delete_status

        # 2. 删除待删除目录所有文件
        for copy_file_path in copy_path_file_list:
            del_path = os.path.join(copy_dir, copy_file_path)
            if os.path.isfile(del_path):
                try:
                    check_del_file(del_path)
                except Exception as exception_str:
                    LOGGER.error(f"Failed to delete copy file:{del_path}. {exception_str}")
                    delete_status = False
            else:
                try:
                    check_del_dir(del_path)
                except Exception as exception_str:
                    LOGGER.error(f"Failed to delete copy dir:{del_path}. {exception_str}")
                    delete_status = False
        LOGGER.info(f"The clear_copy_dir:{self._job_id} is complete.")
        return delete_status

    def get_copies(self):
        """
        删除副本进度查询
        """
        all_del_copies = []
        for copy in CopyParamParseUtil.get_copies(self._param_dict):
            copy_type = CopyParamParseUtil.get_copy_type(copy)
            if copy_type == CopyDataTypeEnum.LOG_COPY:
                copy_data_path = CopyParamParseUtil.get_log_reps(copy)
            else:
                copy_data_path = CopyParamParseUtil.get_data_reps(copy)
            all_del_copies.append(
                CopyInfo(
                    copy_id=CopyParamParseUtil.get_copy_id(copy),
                    cache_path=CopyParamParseUtil.get_cache_path(copy),
                    meta_path=CopyParamParseUtil.get_meta_path(copy),
                    data_reps=copy_data_path
                ))
        return all_del_copies

    def get_progress_path(self):
        # 1. 从参数文件获取cache仓
        cache_path = ""
        for copy in CopyParamParseUtil.get_copies(self._param_dict):
            cache_path = CopyParamParseUtil.get_cache_path(copy)
            if cache_path:
                break

        if not cache_path:
            LOGGER.error(f"Failed to get the cache path. job_id: {self._job_id}.")
            return ''

        # 2. 组装参数文件路径
        progress_file_path = os.path.join(cache_path, f"{self._job_id}_progress.json")
        if not check_path_legal(progress_file_path, "/mnt/databackup/"):
            LOGGER.error(f"Failed to verify the progress file path:{progress_file_path}. job_id: {self._job_id}.")
            return ''

        return progress_file_path

    def save_progress(self, sub_job_status: SubJobStatusEnum):
        # 1. 获取进度文件
        progress_file_path = self.get_progress_path()
        if not progress_file_path:
            LOGGER.error(f"Failed to get the progress file. job_id: {self._job_id}.")
            return False

        # 2. 保存进度到进度文件
        output_execution_result_ex(progress_file_path, {'status': sub_job_status.value})
        return True

    def read_progress_file(self, file_path):
        if not os.path.exists(file_path):
            LOGGER.error(f"The progress file does not exist. job_id: {self._job_id}.")
            return {}

        # 2. 解析进度文件
        try:
            with open(file_path, 'r') as jsonfile:
                json_dict = json.loads(jsonfile.read())
        except Exception as exception_str:
            LOGGER.error(f"Failed to parse the parameter file. job_id: {self._job_id}.")
            return {}
        return json_dict
