#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import pwd
import re

from common.util.cmd_utils import cmd_format
from gaussdbt.commons.const import Env, RoachConstant, FailCases, SUPPORT_VERSION, SUPPORT_B_VERSION, ProgressInfo, \
    IS_CLONE_FILE_SYSTEM
from gaussdbt.commons.database_common import check_if_path_in_cache
from gaussdbt.commons.roach_meta_info import umount_bind_backup_path, write_progress_file
from gaussdbt.resource.gaussdbt_resource import GaussCluster
from common.common import execute_cmd, check_command_injection_exclude_quote, clean_dir
from common.const import JobData, CMDResult
from common.logger import Logger
from common.parse_parafile import get_user_name
from common.util.exec_utils import su_exec_cat_cmd, exec_overwrite_file, check_path_valid

log = Logger().get_logger("gaussdbt_plugin.log")


class ExecRestore:
    def __init__(self, inputs: {}):
        self.input_info = inputs

    @staticmethod
    def check_zengine_version(ver: str):
        log.info(f'Check version {ver} support or not skip param.')
        now_version = 0
        for index in range(3):
            ver_number = int(re.findall(r'\d+', ver)[index]) * 10 ** (2 - index)
            now_version = now_version + ver_number
        support_version = SUPPORT_VERSION
        is_new = False
        if now_version == support_version:
            is_new = True
        elif now_version > support_version:
            log.info(f'Gaussdbt {now_version} Will skip scp.')
            return True
        else:
            is_new = False
        if is_new:
            if "SPC" in ver or int(re.findall(r'\d+', re.findall(r'B\d+', ver)[0])[0]) > SUPPORT_B_VERSION:
                log.info('Will skip scp.')
                return True
        log.info('Will not skip scp.')
        return False

    def restart_cluster(self):
        log.info("Restarting cluster!")
        progress_file = os.path.join(self.input_info.get("cache_path"),
                                     f'{JobData.JOB_ID}{GaussCluster.get_hostname()}{"restore_main"}')
        user_name = get_user_name(f"{Env.USER_NAME}_{JobData.PID}")
        if not user_name:
            log.error("Get user name error!")
            write_progress_file(ProgressInfo.FAILED, progress_file)
            return False
        roach_home = GaussCluster.get_roach_home(user_name)
        if not roach_home:
            log.error("Failed when get roach_home")
            write_progress_file(ProgressInfo.FAILED, progress_file)
            return False
        if check_command_injection_exclude_quote(roach_home):
            log.error(f"The roach_home parameter contains special characters.")
            return False
        cmd = f'su - {user_name} -c "{roach_home}{RoachConstant.ROACH_START}"'
        return_code, std_out, std_err = execute_cmd(cmd)
        if return_code != "0":
            log.error(f"Failed to start the cluster!")
            write_progress_file(ProgressInfo.FAILED, progress_file)
            return False
        log.info("Succeed to start the cluster")
        write_progress_file(ProgressInfo.SUCCEED, progress_file)
        return True

    def do_restore_post(self):
        if not self.input_info.get(IS_CLONE_FILE_SYSTEM, True):
            data_cache_dir = os.path.join(self.input_info.get("cache_path"), 'data')
            if check_path_valid(data_cache_dir, is_check_white_list=False) and os.path.exists(data_cache_dir):
                clean_dir(data_cache_dir)
            meta_cache_dir = os.path.join(self.input_info.get("cache_path"), 'meta')
            if check_path_valid(meta_cache_dir, is_check_white_list=False) and os.path.exists(meta_cache_dir):
                clean_dir(meta_cache_dir)
        tmp_file = os.path.join(self.input_info.get("cache_path"), self.input_info.get("backup_key"))
        umount_bind_backup_path()
        if os.path.exists(tmp_file) and check_if_path_in_cache(tmp_file, self.input_info.get("cache_path")):
            os.remove(tmp_file)
        else:
            log.error(f'Progress file {tmp_file} not exist! failed to delete while doing post job!')
        cluster_state = GaussCluster.get_cluster_state()
        if cluster_state == "Normal":
            return True
        else:
            return False

    def prepare_restore_cmd(self):
        user_name = get_user_name(f"{Env.USER_NAME}_{JobData.PID}")
        if not user_name:
            log.error("Get user name error!")
            return ''
        roach_home = GaussCluster.get_roach_home(user_name)
        if not roach_home:
            log.error("Failed when get roach_home")
            return ''
        if check_command_injection_exclude_quote(roach_home):
            log.error(f"The roach_home parameter contains special characters.")
            return ''
        restore_port = GaussCluster.check_port()
        if not restore_port:
            log.error(f"The restore port parameter is none.")
            return ''
        restore_word = f'--media-destination {os.path.join(RoachConstant.ROACH_DATA_FILE_PATH, "mediadata")} ' \
                       f'--metadata-destination {self.input_info.get("meta_path")} --clean'
        if self.input_info.get("new_cluster") == "new":
            restore_word += ' --restore-new-cluster'
        if self.input_info.get("time_stamp"):
            restore_word += f' --restore-target-time \'{self.input_info.get("time_stamp")}\'' \
                            f' --arch-destination {RoachConstant.ROACH_LOG_FILE_PATH}'
        else:
            restore_word += f' --backup-key {self.input_info.get("backup_key")}'
        gauss_roach_help_cmd = f'su - {user_name} -c "{roach_home}{RoachConstant.ROACH_HELP}"'
        return_code, std_out, std_err = execute_cmd(gauss_roach_help_cmd)
        if return_code != CMDResult.SUCCESS:
            log.error(f"get gauss roach help, error: {std_err}")
            return ''
        parallel_process = self.input_info.get("parallel_process")
        if parallel_process:
            cmd = cmd_format("--parallel-process {} --master-port {}", parallel_process, restore_port)
        else:
            cmd = cmd_format("--master-port {}", restore_port)
        cmd = f'{cmd} {restore_word}'
        if "--skip-scp-backupset" in std_out and not self.input_info.get("new_backup_copy"):
            log.info("Normal backup copy, using '--skip-scp-backupset' option.")
            cmd = f'{cmd} --skip-scp-backupset'
        cmd = f'su - {user_name} -c "{roach_home}{RoachConstant.ROACH_RESTORE} {cmd}"'
        return cmd

    def do_restore_job(self):
        log.info("Start to do restore task")
        progress_file = os.path.join(self.input_info.get("cache_path"),
                                     f'{JobData.JOB_ID}{GaussCluster.get_hostname()}{"restore_main"}')
        write_progress_file(ProgressInfo.START, progress_file)
        # roach命令拼接
        restore_cmd = self.prepare_restore_cmd()
        if not all([os.path.join(RoachConstant.ROACH_DATA_FILE_PATH, 'mediadata'), self.input_info.get("meta_path"),
                    self.input_info.get("backup_key")]):
            log.error(f'Command error, will not be executed, taskID: {JobData.JOB_ID}')
            write_progress_file(ProgressInfo.FAILED, progress_file)
            return False
        if not restore_cmd:
            log.error(f'Failed to get prepare restore cmd, taskID: {JobData.JOB_ID}')
            write_progress_file(ProgressInfo.FAILED, progress_file)
            return False
        ret, std_out, _ = execute_cmd(restore_cmd)
        tmp_file = os.path.join(self.input_info.get("cache_path"), self.input_info.get("backup_key"))
        exec_overwrite_file(tmp_file, std_out, json_flag=False)
        if not os.path.exists(tmp_file):
            log.error(f"out_file:{tmp_file} not exist")
        # 获取文件的元数据信息
        stat_info = os.stat(tmp_file)
        # 获取属主用户的用户名
        owner_username = pwd.getpwuid(stat_info.st_uid).pw_name
        return_code, data = su_exec_cat_cmd(tmp_file, owner_username)
        if ret != CMDResult.SUCCESS.value or not return_code:
            log.error(f'Fail to exec restore, Gauss cmd output: {data}')
            write_progress_file(ProgressInfo.FAILED, progress_file)
            return False
        log.info("Succeed to exec restore")
        return True

    def query_progress(self):
        """
        解析备份进度
        :return:
        """
        log.info('Query restore progress!')
        progress = 0
        progress_file = os.path.join(self.input_info.get("cache_path"), self.input_info.get("backup_key"))
        log.info(f'{progress_file}')
        if not os.path.exists(progress_file):
            log.error(f"Progress file: {progress_file} not exist")
            return False, progress
        log.info(f'Path exist')
        with open(progress_file, "r", encoding='UTF-8') as f:
            data = f.read()
        for fail_case in FailCases:
            if fail_case in data:
                progress = 0
                return False, progress
        if "Performing restore completed" in data:
            progress = 100
        elif "%" not in data:
            progress = 5
        else:
            progress_info = data.split("\n")
            progress_info.reverse()
            for info in progress_info:
                if "%" in info:
                    progress = int(re.findall("\d+\.\d+", info)[0].split('.')[0])
                    break
        log.info(f'Progress is {progress}')
        return True, progress
