#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import json
import stat

from common.parse_parafile import ParamFileUtil
from common.cleaner import clear
from common.const import RepositoryDataTypeEnum, RestoreType

from ndmp.comm.const import CopyFormat, NdmpRestoreType
from ndmp.comm.log import log
from ndmp.comm.param_base import ParamBase
from ndmp.comm.utils import get_env_in_variable, is_dst_reachable


class RestoreParam(ParamBase):
    def __init__(self, req_id):
        super().__init__(req_id)
        self._application: dict = dict()
        self.parse_param()

    @staticmethod
    def get_data_path(repositories, repository_type):
        repositories_path = ""
        for repository in repositories:
            if repository['repositoryType'] == repository_type:
                repositories_path = repository.get("remotePath", "")
                break
        return repositories_path

    def parse_param(self):
        """
        备份恢复参数结构:
        """
        try:
            self._param = ParamFileUtil.parse_param_file(self.req_id)
        except Exception as err:
            raise Exception(f"Failed to parse job param file for {err}") from err
        if not self._param:
            raise Exception(f"Failed to parse job param file is none")

    def clear(self):
        """
        清理敏感信息
        :return:
        """
        clear(self.get_src_auth())
        clear(self.get_dst_auth())
        clear(self.get_src_pwd())
        clear(self.get_dst_pwd())

    def get_param(self):
        return self._param

    def get_src_path(self):
        copy = self._param.get("job", {}).get("copies", [])
        src_path = copy[0].get("protectObject", {}).get("name")
        return src_path

    def get_dst_path(self):
        dst_path = self._param.get("job", {}).get("targetObject", []).get("name")
        return dst_path

    def get_sub_src_path(self, copy_path):
        sub_path = self.get_sub_job_info()
        sub_src_path = os.path.join(copy_path, sub_path.lstrip("/"))
        return sub_src_path

    def get_src_ip(self):
        src_ip = self._param.get("job", {}).get("targetEnv", {}).get("endpoint")
        return src_ip

    def get_dst_ip(self):
        dst_ips = self._param.get("job", {}).get("targetEnv", {}).get("extendInfo", {}).get("serviceIp", []).split(",")
        log.info(f"dst_ips:{dst_ips}")
        for ip in dst_ips:
            if is_dst_reachable(self.get_src_ip(), ip, self.get_port()):
                return ip
        return ""

    def get_src_auth(self):
        """
        获取本端用户名
        """
        src_auth = get_env_in_variable(f"job_targetEnv_auth_extendInfo_authKey_{self.req_id}")
        return src_auth

    def get_dst_auth(self):
        """
        获取远端端用户名
        """
        dst_auth = get_env_in_variable(f"job_targetEnv_auth_authKey_{self.req_id}")
        return dst_auth

    def get_src_pwd(self):
        """
        获取本端用户密码
        """
        src_pwd = get_env_in_variable(f"job_targetEnv_auth_extendInfo_authPwd_{self.req_id}")
        return src_pwd

    def get_dst_pwd(self):
        """
        获取远端用户密码
        """
        dst_pwd = get_env_in_variable(f"job_targetEnv_auth_authPwd_{self.req_id}")
        return dst_pwd

    def get_server_gap(self):
        return self._param.get("job", {}).get("targetEnv", {}).get("extendInfo", {}).get("ndmpServerGap", "0")

    def get_copies(self):
        copies = self._param.get("job", {}).get("copies", [])
        if not copies:
            raise Exception(f"The copies value in the param file is empty or does not exist")
        return copies

    def get_protect_object(self):
        copies = self._param.get("job", {}).get("copies", [])
        protect = copies[0].get("protectObject", {})
        if not protect:
            raise Exception(f"The protectObject does not exist")
        return protect

    def get_port(self):
        s_port = self._param.get("job", {}).get("targetEnv", {}).get("port")
        log.info(f"get_s_port: {s_port}")
        return s_port

    def get_copy_id(self):
        return self._param.get("job", {}).get("copies", [])[-1].get("extendInfo", {}).get("copy_id")

    def get_sub_dir(self):
        sub_objs = self._param.get("job", {}).get("restoreSubObjects", [])
        sub_dir_list = list()
        for obj in sub_objs:
            sub_dir = obj.get("name", "")
            if sub_dir:
                sub_dir_list.append(sub_dir)
        return sub_dir_list if sub_dir_list else []

    def get_sub_job_info(self):
        job_info_str = self._param.get("subJob", {}).get("jobInfo", "")
        return job_info_str

    def get_sub_job_name(self):
        sub_job_name = self._param.get("subJob", {}).get("jobName", "")
        return sub_job_name

    def get_cache_path(self):
        copies = self.get_copies()
        repositories = copies[-1].get("repositories", [])
        cache_path = list()
        for repository in repositories:
            if repository['repositoryType'] == RepositoryDataTypeEnum.CACHE_REPOSITORY.value:
                cache_path = repository.get("path", [])
        return cache_path[0] if cache_path else ""

    def get_back_file_path(self):
        copies = self.get_copies()
        repositories = copies[-1].get("repositories", [])
        data_path = list()
        for repository in repositories:
            if repository['repositoryType'] == RepositoryDataTypeEnum.DATA_REPOSITORY.value:
                data_path = repository.get("path", [])
        back_file_path = data_path[0] + "/" + self.get_copy_id()
        return back_file_path

    def get_meta_root_path(self):
        copies = self.get_copies()
        repositories = copies[-1].get("repositories", [])
        data_path = list()
        for repository in repositories:
            if repository['repositoryType'] == RepositoryDataTypeEnum.META_REPOSITORY.value:
                data_path = repository.get("path", [])
        return data_path[0] if data_path else ""

    def get_data_root_path(self):
        copies = self.get_copies()
        repositories = copies[-1].get("repositories", [])
        data_path = list()
        for repository in repositories:
            if repository['repositoryType'] == RepositoryDataTypeEnum.DATA_REPOSITORY.value:
                data_path = repository.get("path", [])
        return data_path[0] if data_path else ""
    
    def get_restore_level(self):
        sub_job_type = self._param.get("job", {}).get("jobParam", {}).get("restoreType")
        if sub_job_type == RestoreType.FINE_GRAINED_RESTORE.value:
            return 1
        return 0

    def get_restore_dst_file_system(self):
        return self._param.get("job", {}).get("targetObject", {}).get("extendInfo", {}).get("fullName")

    def get_restore_files(self):
        file_list = self._param.get("job", {}).get("restoreSubObjects", [])
        data_path = self.get_data_root_path()
        meta_path = self.get_meta_root_path()
        tmp_file_name = os.path.join(self.get_file_restore_copy_id(), "restore_files.tmp")
        tmp_file_path = os.path.join(data_path, tmp_file_name)
        total_str = ""
        file_path = ""
        total_str += str(len(file_list)) + "\n"
        log.debug(f"restore files:{file_list}")
        for file in file_list:
            file_path = file["name"]
            if file_path == "/":
                return NdmpRestoreType.FULL_LEVEL
            else:
                origin_path = file_path[1:]
                dst_path = self.get_dst_path() + file_path
                node_id, fh_info = self.get_restore_file_node_fh_info(file_path)
                if node_id and fh_info:
                    total_str += origin_path + "|" + dst_path + "|" + node_id + "|" + fh_info + "\n"
        self.write_restore_file_tmp_info(total_str, tmp_file_path)
        return NdmpRestoreType.FILE_LEVEL

    def write_restore_file_tmp_info(self, str_line, tmp_path):
        log.info(f"Entering write restore tmp file:{tmp_path}, str:{str_line}")
        flags = os.O_WRONLY | os.O_CREAT
        modes = stat.S_IWUSR | stat.S_IRUSR | stat.S_IXUSR
        try:
            with os.fdopen(os.open(tmp_path, flags, modes), 'w') as restore_file:
                restore_file.write(str_line)
        except Exception as err:
            log.error(err, exc_info=True)            

    def get_file_restore_copy_id(self):
        return self._param.get("job", {}).get("copies", [])[-1].get("extendInfo", {}).get("copy_id")

    def get_restore_file_node_fh_info(self, name):
        file_type = "f"
        if (name[-1] == "/"):
            file_type = "d"
            name = name[0:len(name) - 1]
        meta_path = self.get_meta_root_path()
        meta_file = "index_" + self.get_file_restore_copy_id() + ".txt"
        meta_file = os.path.join(meta_path, meta_file)
        node_id = ""
        fh_info = ""
        try:
            node_id, fh_info = self.get_one_node_fh_info(meta_file, name, file_type)
        except Exception as err:
            log.error(err, exc_info=True)            
        return node_id, fh_info
    
    def get_one_node_fh_info(self, meta_file, name, file_type):
        flags = os.O_RDONLY
        modes = stat.S_IRUSR        
        with os.fdopen(os.open(meta_file, flags, modes), 'r') as rfi_file:
            line = rfi_file.readline()
            while True:
                line = rfi_file.readline()
                if line is None or line == "":
                    break                
                one_file = json.loads(line.strip())
                if one_file["path"] == name and one_file["type"] == file_type: 
                    node_id = one_file["inode"]
                    fh_info = one_file["id"]
                    return node_id, fh_info
        return None, None