#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
from copy import deepcopy

from common.const import JobData, RepositoryDataTypeEnum, BackupTypeEnum
from common.logger import Logger
from informix.comm.comm import get_last_copy_info, exec_echo_env_var, get_log_time_info, exec_rc_tool_cmd, \
    get_informix_dir, identify_env_user
from informix.comm.const import FileConstant, RpcParamKey, InformixCustomParmConstant, JsonConstant, InformixInfo
from informix.comm.informix_cmd import InformixCommand

log = Logger().get_logger("informix.log")


class InformixBackup:

    def __init__(self, param, job_manager):
        self.db_user = identify_env_user() if identify_env_user() else InformixInfo.INFORMIX_USER
        self.param = param
        self.custom_dict_curr = {}
        self.instance_name_curr = ""
        self.onconfig_file_curr = ""
        self.sqlhosts_file_curr = ''
        self.backup_type = ''
        self.informixdir = get_informix_dir(self.db_user)
        self.bar_bsalib_path = ""
        self.backup_dir = ""
        self.servernum = ""
        self.bar_ixbar_path = ""
        self.job_id = job_manager.job_id
        self.sub_job_id = job_manager.sub_job_id
        self.copy_id = ''
        self.metadata_repository_path = ""
        self.backup_record_file = ""
        self.data_area_path = ''
        self.roodbs_path_curr = ''
        self.ixbar_sub_path = None
        self.init_patam()

    @staticmethod
    def get_informixdir():
        """
        执行echo ${INFORMIXDIR}
        :return: informixdir
        """
        log.info("Enter get_informixdir.")
        ret, res = exec_echo_env_var("INFORMIXDIR")
        if not ret:
            log.error("Failed to obtain INFORMIXDIR")
            return ""
        return res.strip()

    @staticmethod
    def _get_logical_id(res):
        logical_line = list(map(lambda x: x.strip(), filter(lambda x: "-C-" in x.strip(), res.split("\n"))))
        if not logical_line:
            log.warn("Failed to obtain logical line.")
            return ""
        logical_line_elem = list(map(lambda x: x.strip(), filter(lambda x: x.strip(), logical_line[0].split(" "))))
        if len(logical_line_elem) < 8:
            log.warn("Failed to obtain correct logical line.")
            return ""
        logical_info = logical_line_elem[3]
        return logical_info

    def init_patam(self):
        self.copy_id = self.param.get_copy_id()
        self.servernum = self.param.get_server_num()
        self.backup_type = self.param.get_backup_type()
        self.sqlhosts_file_curr = self.param.get_sqlhosts_file_curr()
        self.onconfig_file_curr = self.param.get_onconfig_file_curr()
        self.instance_name_curr = self.param.get_instance_name_curr()
        self.roodbs_path_curr = self.param.get_rootdbs_path_curr()
        self.custom_dict_curr = {
            InformixCustomParmConstant.INSTANCE_NAME: self.instance_name_curr,
            InformixCustomParmConstant.SQLHOSTS_PATH: self.sqlhosts_file_curr,
            InformixCustomParmConstant.CONFIG_PATH: self.onconfig_file_curr
        }
        self.data_area_path = self.param.get_data_path()
        self.backup_dir = os.path.join(self.data_area_path, self.copy_id)
        self.bar_ixbar_path = os.path.join(self.backup_dir, "ixbar." + self.servernum)
        self.metadata_repository_path = self.param.get_meta_path()
        self.backup_record_file = os.path.join(self.metadata_repository_path, FileConstant.BACKUP_JSON_FILE)
        self.ixbar_sub_path = 'etc' if self.db_user == InformixInfo.INFORMIX_USER else 'backups'

    def add_backup_file_device(self):
        ret = InformixCommand(self.custom_dict_curr).add_backup_file_device(self.bar_ixbar_path)
        if not ret:
            log.error(f"Failed to add backup device.")
            return False
        return True

    def open_log_auto_backup(self, log_backup_tool_path):
        ret = InformixCommand(self.custom_dict_curr).open_log_auto_backup(log_backup_tool_path)
        if not ret:
            log.error("Failed to set log auto log backup operation.")
            return False
        return ret

    def close_log_auto_backup(self):
        ret = InformixCommand(self.custom_dict_curr).close_log_auto_backup()
        if not ret:
            log.error("Failed to close auto log backup operation.")
            return False
        return ret

    def exec_backup(self, backup_type, log_backup_staus, file_path):
        ret, res = InformixCommand(self.custom_dict_curr).backup(backup_type, log_backup_staus, file_path)
        if not ret:
            log.error(f"Failed to exec {backup_type} backup operation.{res}")
            return ret, res
        return ret, res

    def get_logical_id(self):
        ret, res = InformixCommand(self.custom_dict_curr).show_status_and_version()
        if not ret:
            log.error(f"Failed to exec 'onstat -l' command.")
            return ''
        if not res:
            log.error(f"Failed to obtain logical id from host.")
            return ''
        return self._get_logical_id(res)

    def get_log_common(self):
        return f"Pid: {JobData.PID} jobId: {self.job_id} subjobId: {self.sub_job_id}."

    def support_info(self, backup_time):
        """
        主动上报副本信息，包括副本主备关系、备份实例名、servernum等，有具体的数据模型
        将具体的结果写入指定的文件中
        :return:
        """
        json_copy = {
            "extendInfo": {
                "backupTime": int(backup_time),
                "copyId": self.copy_id,
                "instance_name": self.instance_name_curr,
                "rootdbsPath": self.roodbs_path_curr,
                "prim_name": self.instance_name_curr,
                "servernum": self.servernum
            },
            "timestamp": int(backup_time)
        }
        out_repositories = self.set_repositories()
        json_copy["repositories"] = out_repositories
        if self.backup_type == BackupTypeEnum.INCRE_BACKUP:
            try:
                diff_copy_info = get_last_copy_info(1, self.job_id, self.sub_job_id,
                                                    self.param.get_job_protectobject())
            except Exception as e_obj:
                log.exception(e_obj)
                return False
            diff_copy_id = diff_copy_info.get(JsonConstant.EXTEND_INFO, {}).get(JsonConstant.COPY_ID, '')
            try:
                full_copy_info = get_last_copy_info(3, self.job_id, self.sub_job_id,
                                                    self.param.get_job_protectobject())
            except Exception as e_obj:
                log.exception(e_obj)
                return False
            full_copy_id = full_copy_info.get(JsonConstant.EXTEND_INFO, {}).get(JsonConstant.COPY_ID, '')
            if not all((full_copy_id, diff_copy_id)):
                return False
            json_copy.get(JsonConstant.EXTEND_INFO, {})[JsonConstant.BASE_BACKUPSET.value] = \
                [full_copy_id, diff_copy_id]
        if self.backup_type == BackupTypeEnum.DIFF_BACKUP:
            try:
                full_copy_info = get_last_copy_info(3, self.job_id, self.sub_job_id,
                                                    self.param.get_job_protectobject())
            except Exception as e_obj:
                log.exception(e_obj)
                return False
            full_copy_id = full_copy_info.get(JsonConstant.EXTEND_INFO, {}).get(JsonConstant.COPY_ID, '')
            json_copy.get(JsonConstant.EXTEND_INFO, {})[JsonConstant.BASE_BACKUPSET.value] = [full_copy_id]
        copy_info = {"copy": json_copy, "jobId": self.job_id}
        log.info(f"copy_info: {copy_info}.")
        try:
            exec_rc_tool_cmd(self.job_id + self.sub_job_id, RpcParamKey.REPORT_COPY_INFO, copy_info)
        except Exception as err_info:
            log.error(f"Report copy info fail.{err_info}")
            return False
        return True

    def set_repositories(self):
        _repositories = self.param.get_job_repositories()
        get_repositories = deepcopy(_repositories)
        out_repositories = []
        for repository in get_repositories:
            repository_type = repository.get(JsonConstant.REPORITORYTYPE, '')
            if repository_type == RepositoryDataTypeEnum.DATA_REPOSITORY:
                old_remote_path = repository.get(JsonConstant.REMOTE_PATH)
                repository[JsonConstant.REMOTE_PATH] = os.path.join(old_remote_path, self.copy_id)
                out_repositories.append(repository)
            elif repository_type == RepositoryDataTypeEnum.META_REPOSITORY:
                old_remote_path = repository.get(JsonConstant.REMOTE_PATH)
                if "LogRepository" not in old_remote_path:
                    repository[JsonConstant.REMOTE_PATH] = os.path.join(old_remote_path, "meta", self.copy_id)
                    out_repositories.append(repository)
        return out_repositories

    def set_log_repositories(self):
        _repositories = self.param.get_job_repositories()
        get_repositories = deepcopy(_repositories)
        out_repositories = []
        for repository in get_repositories:
            repository_type = repository.get(JsonConstant.REPORITORYTYPE, '')
            if repository_type == RepositoryDataTypeEnum.META_REPOSITORY:
                old_remote_path = repository.get(JsonConstant.REMOTE_PATH)
                repository[JsonConstant.REMOTE_PATH] = os.path.join(old_remote_path, "meta", self.copy_id)
                out_repositories.append(repository)
            elif repository_type == RepositoryDataTypeEnum.LOG_REPOSITORY:
                old_remote_path = repository.get(JsonConstant.REMOTE_PATH)
                repository[JsonConstant.REMOTE_PATH] = os.path.join(old_remote_path, self.copy_id)
                out_repositories.append(repository)
        return out_repositories

    def support_log_info(self, start_id, end_id):
        """
        上报日志副本信息
        """
        json_copy = {
            "extendInfo": {
                "beginTime": None,
                "endTime": None,
                "backupTime": None
            }
        }
        out_repositories = self.set_log_repositories()
        json_copy["repositories"] = out_repositories
        ixbar_path = os.path.join(self.informixdir, self.ixbar_sub_path, f'ixbar.{self.servernum}')
        log.info(f"Support log info, informix dir: %s, start id: %s, end id: %s", self.informixdir, start_id, end_id)
        try:
            ret, start_time, _ = get_log_time_info(ixbar_path, start_id)
        except Exception as e:
            log.exception(e)
            return False
        if not ret:
            return False
        if not self.check_log_time(start_time):
            return False
        ret, _, end_time = get_log_time_info(ixbar_path, end_id)
        log.info(f"Log backup start time: %s, end time: %s", start_time, end_time)
        if not ret:
            return False
        json_copy.get('extendInfo')["beginTime"] = start_time
        json_copy.get('extendInfo')["endTime"] = end_time
        json_copy.get('extendInfo')["backupTime"] = end_time
        json_copy["timestamp"] = end_time
        copy_info = {
            "copy": json_copy,
            "jobId": self.job_id
        }
        log.info(f"copy_info: {copy_info}.")
        try:
            exec_rc_tool_cmd(self.job_id + self.sub_job_id, RpcParamKey.REPORT_COPY_INFO, copy_info)
        except Exception as err_info:
            log.error(f"Report copy info fail.{err_info}")
            return False
        return True

    def check_log_time(self, start_time):
        if not start_time:
            log.error(f"The log start time is incorrect.")
            return False
        get_all_type_copy = 5
        last_copy_info = get_last_copy_info(get_all_type_copy, self.job_id, self.sub_job_id,
                                            self.param.get_job_protectobject())
        copy_extend_info = last_copy_info.get("extendInfo", {})
        end_time = copy_extend_info.get("backupTime")
        if not end_time:
            log.error(f"Failed to obtain the backup time of the data copy.")
            return False
        if end_time < start_time:
            log.error(f"The log time is not continuous with the data copy time.log start_time:%s, data end_time:%s",
                      start_time, end_time)
            return False
        return True
