#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import os
import re
import shutil
import time
from datetime import datetime

from common.cleaner import clear_repository_dir
from common.util.backup import query_progress, backup_files
from common.util.exec_utils import exec_mkdir_cmd
from mysql.src.common.constant import RoleType
from mysql.src.utils.common_func import SQLParam, exec_mysql_sql_cmd, get_conf_by_key, get_value
from mysql import log


def parse_time_stamp(time_stamp_str):
    time_stamp = time.localtime(int(time_stamp_str))
    return time.strftime("%Y-%m-%d %H:%M:%S", time_stamp)


def parse_xtrabackup_info(file_path):
    info = {}
    with open(file_path, 'r') as file:
        for line in file:
            if '=' in line:
                key, value = line.strip().split('=', 1)
                info[key.strip()] = value.strip()
    if info.get("binlog_pos"):
        binlog_pos = info.get('binlog_pos')
        match = re.search(r"filename '([^']+)', position '(\d+)'", binlog_pos)
        if match:
            filename = match.group(1)
            position = match.group(2)
            info.update({"binlog_filename": filename, "binlog_position": position})
    return info


def parse_log_meta(meta_file):
    timestamp_id_dict = {}
    with open(meta_file, 'r', encoding='utf-8') as file_read:
        for line in file_read.readlines():
            key_value = line.strip('\n').split(";")
            key = key_value[0].strip()
            value = key_value[1].strip()
            start_stamp = value.split("~")[0]
            end_stamp = value.split("~")[1]
            time_range = {
                "start_stamp": start_stamp,
                "end_stamp": end_stamp
            }
            timestamp_id_dict.update({key: time_range})
    return timestamp_id_dict


def convert_to_timestamp(time_str):
    datetime_obj = datetime.strptime(time_str, '%Y-%m-%d %H:%M:%S')
    return int(datetime_obj.timestamp())


def parse_num(file_name):
    return int(''.join(char for char in file_name if char.isdigit()))


def get_bin_log_names(bin_log_copy_dir):
    binlog_file_names = list()
    for file_name in os.listdir(bin_log_copy_dir):
        if file_name.endswith(".index"):
            continue
        new_path = os.path.join(bin_log_copy_dir, file_name)
        if not os.path.isfile(new_path):
            continue
        binlog_file_names.append(file_name)
    binlog_file_names.sort(key=lambda file: parse_num(file), reverse=True)
    return binlog_file_names


def convert_restore_binlog_files_str(restore_files: [str]):
    restore_files.sort(key=lambda f: parse_num(os.path.basename(f)))
    return restore_files


def show_master_status(sql_param: SQLParam):
    sql_param.sql = "show master status"
    ret, output = exec_mysql_sql_cmd(sql_param)
    if not ret:
        return "", ""
    return output[0][0], output[0][1]


def stop_slave(sql_param: SQLParam):
    sql_param.sql = "stop slave"
    ret, output = exec_mysql_sql_cmd(sql_param)
    if not ret:
        return False
    return True


def start_slave(sql_param: SQLParam):
    sql_param.sql = "start slave"
    ret, output = exec_mysql_sql_cmd(sql_param)
    if not ret:
        return False
    return True


def reset_slave_all(sql_param: SQLParam):
    sql_param.sql = "reset slave all"
    ret, output = exec_mysql_sql_cmd(sql_param)
    if not ret:
        return False
    return True


def get_undo_size_in_point_dir(undo_dir: str):
    prefix_files = [file for file in os.listdir(undo_dir) if file.startswith('undo')]
    return len(prefix_files)


def binlog_pattern(my_cnf_path: str):
    log_bin_path = get_conf_by_key(my_cnf_path, "log_bin")
    if not log_bin_path:
        return "mysql-bin"
    if os.path.isabs(log_bin_path) and os.path.isfile(log_bin_path):
        return os.path.basename(log_bin_path)
    return "mysql-bin"


def is_master_node(node):
    return get_value(node.get("extendInfo", {}), "role") == RoleType.ACTIVE_NODE


class BackupStatus:
    COMPLETED = 1
    RUNNING = 2
    FAILED = 3


def is_e1000_mount_type(json_param):
    return json_param.get("job", {}).get("extendInfo", {}).get("agentMountType", "") == "fuse"


def copy_files(src_path: str, target_path: str, job_id=""):
    log.info(f"Start copying file: {src_path} to path: {target_path}")
    if os.path.isdir(src_path):
        src_path = f"{src_path}" if src_path.endswith("/") else f"{src_path}/"
    res = backup_files(job_id, [src_path], target_path, write_meta=True)
    if not res:
        log.error(f"Failed to start backup, jobId: {job_id}.")
        return False
    return get_restore_status(job_id)


def copy_directory(source_dir, destination_dir):
    try:
        if os.path.exists(destination_dir) and os.path.isdir(destination_dir):
            clear_repository_dir(destination_dir)
        else:
            exec_mkdir_cmd(destination_dir, is_check_white_list=False)
        for item in os.listdir(source_dir):
            source_item = os.path.join(source_dir, item)
            destination_item = os.path.join(destination_dir, item)
            if os.path.isdir(source_item):
                shutil.copytree(source_item, destination_item)
            else:
                shutil.copy2(source_item, destination_item)
        log.info(f"copy_directory:{source_dir} copy to destination_dir:{destination_dir} success")
        return True
    except Exception as exception_str:
        log.exception(exception_str)
        log.error(f"copy_directory:{source_dir} copy to destination_dir:{destination_dir} failed")
        return False


def get_restore_status(job_id):
    restore_status = False
    while True:
        time.sleep(10)
        status, progress, data_size = query_progress(job_id)
        log.info(f"Get restore result: status:{status}, progress:{progress}, data_size:{data_size}")
        if status == BackupStatus.COMPLETED:
            log.info(f"Restore completed, jobId: {job_id}.")
            restore_status = True
            break
        elif status == BackupStatus.RUNNING:
            continue
        elif status == BackupStatus.FAILED:
            log.error(f"Restore failed, jobId: {job_id}.")
            restore_status = False
            break
        else:
            log.error(f"Backup failed, status error jobId: {job_id}.")
            restore_status = False
            break
    return restore_status
