#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import time

from common.common import get_host_sn, execute_cmd_list_out_to_file
from common.const import CopyDataTypeEnum, RpcParamKey
from common.exception.common_exception import ErrCodeException
from common.file_common import delete_path
from common.util.exec_utils import exec_overwrite_file
from mysql import log
from mysql.src.common.constant import ExecCmdResult, MySQLJsonConstant
from mysql.src.common.error_code import MySQLErrorCode
from mysql.src.common.execute_cmd import mysql_backup_files
from mysql.src.common.parse_parafile import ReadFile
from mysql.src.service.backup.backup_func import get_last_copy_info, get_bin_log_sql_time
from mysql.src.service.backup.backup_param import BackupParam
from mysql.src.utils.common_func import find_log_bin_path_dir, get_binlog_filenames


class BackupLog:

    def __init__(self, job_id, sub_job_id, param: BackupParam, my_cnf_path):
        self.job_id = job_id
        self.sub_job_id = sub_job_id
        self.param = param
        self.my_cnf_path = my_cnf_path
        self.last_backup_binlog = ""
        self.start_binlog_time = ""
        self.last_backup_binlog = ""

    def get_log_comm(self):
        return f"pid:{self.param.pid} jobId:{self.job_id} sub_job_id:{self.sub_job_id}"

    def get_log_backup_extend_info(self, start_timestamp, end_timestamp):
        log.info("start_timestamp: %s, end_timestamp: %s" % (start_timestamp, end_timestamp))
        # 构造日志副本信息中需要的字段
        # 判断如果起始时间超过首次全量备份时间，就截断
        param_application = self.param.protect_object
        out_info = get_last_copy_info(None, self.job_id, [RpcParamKey.LOG_COPY], application=param_application)
        if out_info:
            begin_time = int(out_info.get("extendInfo", {}).get("endTime", ""))
            log.info(f"begin_time: {begin_time}, start_timestamp:{start_timestamp}")
            if begin_time < start_timestamp:
                begin_time = self.get_start_full_time(param_application)
        else:
            begin_time = self.get_start_full_time(param_application)
        json_dict = {
            "logDirName": "",
            "associatedCopies": [],
            "beginTime": begin_time,
            "endTime": end_timestamp,
            "backupBinlog": self.last_backup_binlog
        }
        return json_dict

    def get_start_full_time(self, param_application):
        out_info = get_last_copy_info(None, self.job_id, [RpcParamKey.FULL_COPY, RpcParamKey.INCREMENT_COPY,
                                                          RpcParamKey.DIFF_COPY], application=param_application)
        log.info(f"full_copy_out_info:{out_info}")
        return int(out_info.get("extendInfo", {}).get("backupTime", ""))

    def get_copy_info_last_backup_time(self):
        out_info = get_last_copy_info(self.param, self.job_id, [CopyDataTypeEnum.LOG_COPY.value])
        if not out_info:
            out_info = get_last_copy_info(self.param, self.job_id, [CopyDataTypeEnum.FULL_COPY.value])
            if not out_info:
                log.error(f"Failed to get last copy info. {self.get_log_comm()}")
                return False, ""
            return True, int(out_info.get("extendInfo", {}).get("backupTime", ""))
        return True, int(out_info.get("extendInfo", {}).get("endTime", ""))

    def exec_backup_log(self):
        log.info(f"Log backup begin. {self.get_log_comm()}")
        copy_path = self.param.get_copy_path(self.job_id)
        try:
            start_timestamp, end_timestamp = self.backup(copy_path)
            log.info(f"log time:{start_timestamp}--{end_timestamp}.{self.get_log_comm()}")
            # 获取上一个（全量、增量、差异、日志）副本的时间，与上一个副本时间做比较，校验日志是否连续
            ret, last_backup_time = self.get_copy_info_last_backup_time()
            json_extend_info = self.get_log_backup_extend_info(last_backup_time, end_timestamp)
            self.write_copy_info_log_backup(end_timestamp, json_extend_info)
        except Exception as error:
            log.exception(error)
            log.error(f"exec_backup_log error:{error}")
            delete_path(copy_path)
            raise error
        log.info(f"Log backup success.  {self.get_log_comm()}")

    def write_copy_info_log_backup(self, date_time, extend_info):
        copy_info_path = os.path.join(self.param.cache_path, f"copy_info_{self.job_id}")
        copy_json = self.param.get_copy()
        copy_json.update({
            "timestamp": date_time,
            "extendInfo": extend_info
        })
        exec_overwrite_file(copy_info_path, copy_json)
        return True

    def backup(self, copy_path):
        binlog_dir = find_log_bin_path_dir(self.param.sql_param, self.my_cnf_path)
        binlog_filenames = get_binlog_filenames(self.param.sql_param)
        # 最后一个文件是flush log 生成的，不备份
        binlog_filenames = binlog_filenames[:-1]
        if not binlog_dir or not binlog_filenames:
            log.info(f"dir_path or binlog_filenames query empty,{self.get_log_comm()}")
            raise ErrCodeException(MySQLErrorCode.SYSTEM_ERROR, message="dir_path or binlog_filenames query empty")
        filter_binlog_names = self.filter_backup_binlog_names(binlog_dir, binlog_filenames)
        backup_files = [os.path.join(binlog_dir, file) for file in filter_binlog_names]
        log.info(f"all file array size: {len(backup_files)}")
        log.info(f"target_path: {copy_path}")
        # 备份工具限制为1000 插件可以指定更大的数字 覆盖掉1000 避免在binlog日志文件数量大于1000时备份失败
        ret = mysql_backup_files(self.job_id, backup_files, copy_path, len(backup_files))
        if not ret:
            log.error(f"Backup log file failed. {self.get_log_comm()}")
            raise ErrCodeException(MySQLErrorCode.SYSTEM_ERROR, message="backup binlog files error!")
        start_ret, start_timestamp, _ = get_bin_log_sql_time(os.path.join(binlog_dir, filter_binlog_names[0]))
        self.last_backup_binlog = filter_binlog_names[-1]
        end_ret, _, end_timestamp = get_bin_log_sql_time(os.path.join(binlog_dir, filter_binlog_names[-1]))
        if not start_ret or not end_ret:
            log.error(f"Get logCopy info failed. {self.get_log_comm()}")
            raise ErrCodeException(MySQLErrorCode.SYSTEM_ERROR, message="analyze binlog files error!")
        ret, start_binlog_time, _ = get_bin_log_sql_time(os.path.join(binlog_dir, binlog_filenames[0]))
        self.start_binlog_time = str(start_binlog_time)
        return start_timestamp, end_timestamp

    def filter_backup_binlog_names(self, binlog_dir, binlog_filenames):
        last_binlog_name = self.get_last_backup_log(binlog_filenames)
        if not last_binlog_name:
            log.info(f"last_start_time or last_binlog_name is empty:{self.get_log_comm()}")
            return binlog_filenames
        log.info(f"last_binlog_name:{last_binlog_name}")
        ret, current_start_timestamp, _ = get_bin_log_sql_time(os.path.join(binlog_dir, binlog_filenames[0]))
        if last_binlog_name not in binlog_filenames:
            return binlog_filenames
        index = binlog_filenames.index(last_binlog_name)
        filter_binlog_names = binlog_filenames[index:]
        return filter_binlog_names

    def get_last_backup_log(self, binlog_filenames):
        log.info(f"binlog_filenames:{binlog_filenames}")
        last_log_copy_info = get_last_copy_info(self.param, self.job_id, [RpcParamKey.LOG_COPY])
        last_full_copy_info = get_last_copy_info(self.param, self.job_id,
                                                 [RpcParamKey.FULL_COPY, RpcParamKey.INCREMENT_COPY,
                                                  RpcParamKey.DIFF_COPY])
        log.info(f"last_full_copy_info:{last_full_copy_info}")
        if not last_full_copy_info:
            log.info(f"last full copy is empty:{self.get_log_comm()}")
            return ""
        log.info(f"last_log_copy_info:{last_log_copy_info}")
        if last_log_copy_info:
            last_binlog_name = last_log_copy_info.get("extendInfo", {}).get("backupBinlog")
            return last_binlog_name
        backup_filename = last_full_copy_info.get("extendInfo", {}).get("backupBinlog")
        log.info(f"backup_filename:{backup_filename},binlog_filenames:{binlog_filenames}")
        return backup_filename
