#
# This file is a part of the open-eBackup project.
# This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0.
# If a copy of the MPL was not distributed with this file, You can obtain one at
# http://mozilla.org/MPL/2.0/.
#
# Copyright (c) [2024] Huawei Technologies Co.,Ltd.
#
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND,
# EITHER EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT,
# MERCHANTABILITY OR FIT FOR A PARTICULAR PURPOSE.
#

import json
import os
import sys

from dws.commons.common import log
from common.util.check_utils import is_valid_id
from common.common import exter_attack
from common.const import ParamConstant, SubJobStatusEnum
from common.common_models import SubJobDetails

from dws.commons.const import RestoreProgressFileName, RestoreSubtaskName, CopyGenerateType
from dws.commons.progress_notify import get_progress_path
from dws.restore.parse_restore_job import ParseRestoreJobParam
from dws.restore.roach_restore_job import RoachRestoreJob
from dws.restore.gds_restore_job import GDSRestoreJob
from common.util.exec_utils import exec_overwrite_file



def read_progress_file(file_name):
    if not os.path.exists(file_name):
        log.error(f"File:{file_name} not exist.")
        return ""
    try:
        with open(file_name, "r") as tmp_fo:
            lines = tmp_fo.readlines()
            if len(lines) == 0:
                log.error(f"Fail to read progress file {file_name}")
                return ""
            return lines[0]
    except Exception as e:
        log.error(f"Read progress fie {file_name} failed for reason {e}.")
    return ""


def write_progress_output_file(input_pid, data):
    output_file_name = "{}{}".format("result", input_pid)
    output_file_path = os.path.join(ParamConstant.RESULT_PATH, output_file_name)
    output_file_path = os.path.realpath(output_file_path)
    log.info(f"Write result to {output_file_path}")
    exec_overwrite_file(output_file_path, data, json_flag=False)


@exter_attack
def update_prerequisite_job_progress(input_pid):
    log.info(f"Start to query restore prerequisite progress. pid: {input_pid}")
    restore_job_param = ParseRestoreJobParam(input_pid)
    if restore_job_param.get_copy_generate_type() == CopyGenerateType.GENERATE_BY_GDS and \
            restore_job_param.get_copy_type() != "s3Archive":
        log.info("Get restore gds prerequisite job progress")
        progress_data = SubJobDetails(taskId=restore_job_param.get_main_job_id(),
                                      subTaskId="",
                                      progress=100, taskStatus=SubJobStatusEnum.COMPLETED.value)
        write_progress_output_file(input_pid, json.dumps(progress_data.dict(by_alias=True)))
        log.info("Restore gds prerequisite job success")
        return True
    _, cache_path = restore_job_param.get_available_path()
    if not cache_path:
        log.error(f"No cache/metadata path can be read or written. cache_path: {cache_path}")
        return False
    progress_file = os.path.join(cache_path, "tmp",
                                 f"{RestoreProgressFileName.PREREQUISITE_PROGRESS}{restore_job_param.get_copy_id()}")
    progress_data = read_progress_file(progress_file)
    if not progress_data:
        log.error(f"Read prerequisite progress from {progress_file} failed.")
        return False
    write_progress_output_file(input_pid, progress_data)
    return True


def get_white_list_job_progress(restore_job_param, input_pid):
    _, cache_path = restore_job_param.get_available_path()
    if not cache_path:
        log.error(f"No cache/metadata path can be read or written. cache_path:{cache_path}")
        return False
    if restore_job_param.get_copy_generate_type() == CopyGenerateType.GENERATE_BY_GDS:
        input_sub_task_id = restore_job_param.get_subtask_id()
        progress_file = get_progress_path(restore_job_param.get_source_backup_copy_id(), input_sub_task_id, cache_path)
        progress = read_progress_file(progress_file)
        job_result = SubJobDetails(taskId=restore_job_param.get_main_job_id(),
                                    subTaskId=restore_job_param.get_subtask_id(),
                                    progress=100, taskStatus=SubJobStatusEnum.FAILED.value)
        progress = progress.strip()
        if progress == "Fail":
            log.error(f"Get progress file info failed job id :{input_sub_task_id}")
        elif progress == "Running":
            log.info(f"Get progress file info running job id :{input_sub_task_id}")
            job_result.task_status = SubJobStatusEnum.RUNNING.value
        elif progress == "Success":
            log.info(f"Get progress file info complete job id: {input_sub_task_id}")
            job_result.task_status = SubJobStatusEnum.COMPLETED.value
        else:
            log.error(f"Get sub job status invailed {progress}, job id:{input_sub_task_id} ")
            return False
        write_progress_output_file(input_pid, json.dumps(job_result.dict(by_alias=True)))
        return True
    input_sub_task_id = restore_job_param.get_subtask_id()
    progress_file = get_progress_path(restore_job_param.get_copy_id(), input_sub_task_id, cache_path)
    progress_data = read_progress_file(progress_file)
    log.info(f"White list task progress file: {progress_file}. data:{progress_data}")
    write_progress_output_file(input_pid, progress_data)
    return True


def get_clean_job_progress(restore_job_param, input_pid):
    if restore_job_param.get_copy_generate_type() != CopyGenerateType.GENERATE_BY_GDS:
        progress_data = SubJobDetails(taskId=restore_job_param.get_main_job_id(),
                                      subTaskId=restore_job_param.get_subtask_id(),
                                      progress=100, taskStatus=SubJobStatusEnum.COMPLETED.value)
        write_progress_output_file(input_pid, json.dumps(progress_data.dict(by_alias=True)))
        return True
    _, cache_path = restore_job_param.get_available_path()
    if not cache_path:
        fail_result = SubJobDetails(taskId=restore_job_param.get_main_job_id(),
                                    subTaskId=restore_job_param.get_subtask_id(),
                                    progress=100, taskStatus=SubJobStatusEnum.FAILED.value)
        write_progress_output_file(input_pid, json.dumps(fail_result.dict(by_alias=True)))
        log.error(f"No cache/metadata path can be read or written. cache_path:{cache_path}")
        return False
    input_sub_task_id = restore_job_param.get_subtask_id()
    progress_file = get_progress_path(restore_job_param.get_source_backup_copy_id(), input_sub_task_id, cache_path)
    progress = read_progress_file(progress_file)
    progress = progress.strip()
    job_result = SubJobDetails(taskId=restore_job_param.get_main_job_id(),
                                subTaskId=restore_job_param.get_subtask_id(),
                                progress=100, taskStatus=SubJobStatusEnum.FAILED.value)
    if progress == "Fail":
        log.error(f"Get progress file info failed job id :{input_sub_task_id}")
    elif progress == "Running":
        log.info(f"Get progress file info running job id :{input_sub_task_id}")
        job_result.task_status = SubJobStatusEnum.RUNNING.value
    elif progress == "Success":
        log.info(f"Get progress file info complete job id: {input_sub_task_id}")
        job_result.task_status = SubJobStatusEnum.COMPLETED.value
    else:
        log.error(f"Get sub job status invailed {progress}, job id:{input_sub_task_id} ")
        return False
    write_progress_output_file(input_pid, json.dumps(job_result.dict(by_alias=True)))
    return True


def get_restore_job_progress(restore_job_param, input_pid):
    if restore_job_param.get_copy_generate_type() == CopyGenerateType.GENERATE_BY_GDS:
        progress_data = SubJobDetails(taskId=restore_job_param.get_main_job_id(),
                                      subTaskId=restore_job_param.get_subtask_id(),
                                      progress=100, taskStatus=SubJobStatusEnum.FAILED.value)
        try:
            gds_restore_job = GDSRestoreJob(restore_job_param, True)
        except Exception as e:
            log.error(f"GDS restore job init failed exception {e}")
            write_progress_output_file(input_pid, json.dumps(progress_data.dict(by_alias=True)))
            return True
        progress_data = json.dumps(gds_restore_job.get_process().dict(by_alias=True))
    else:
        roach_store_job = RoachRestoreJob(restore_job_param)
        progress_file = roach_store_job.get_progress_file()
        progress_data = read_progress_file(progress_file)
        log.info(f"Restore subtask progress data:{progress_data}")
    write_progress_output_file(input_pid, progress_data)
    return True


@exter_attack
def update_restore_job_progress(input_pid):
    log.info(f"Start to query restore subtask progress. pid:{input_pid}")
    restore_job_param = ParseRestoreJobParam(input_pid)
    progress_func_dict = {
        RestoreSubtaskName.WHILE_LIST_SUBTASK: get_white_list_job_progress,
        RestoreSubtaskName.RESTORE_SUBTASK: get_restore_job_progress,
        RestoreSubtaskName.CLEAN_SUBTASK: get_clean_job_progress
    }
    progress_func = progress_func_dict.get(restore_job_param.get_subtask_name())
    if not progress_func:
        log.error(f"Have no func execute subjob name is: {restore_job_param.get_subtask_name()}")
        return False
    return progress_func(restore_job_param, input_pid)


@exter_attack
def update_post_job_progress(input_pid):
    log.info(f"Start to query post job progress. pid: {input_pid}")
    restore_job_param = ParseRestoreJobParam(input_pid)
    _, cache_path = restore_job_param.get_available_path()
    if not cache_path:
        log.error(f"No cache/metadata path can be read or written. cache_path: {cache_path}")
        return False
    progress_file = os.path.join(cache_path,
                                 f"{RestoreProgressFileName.POST_JOB_PROGRESS}{restore_job_param.get_copy_id()}")
    progress_data = read_progress_file(progress_file)
    log.info(f"Read post job progress: {progress_data}")
    if not progress_data:
        log.error(f"Read post jog progress from {progress_file} fail.")
        return False
    write_progress_output_file(input_pid, progress_data)
    return True


if __name__ == "__main__":
    if len(sys.argv) < 3:
        log.error("Not enough input parameters.")
        sys.exit(1)
    task_type = sys.argv[1]
    pid = sys.argv[2]

    if not is_valid_id(pid):
        log.warn(f"pid is invalid")
        sys.exit(1)

    log.info(f"Query restore progress for {pid} {task_type}")
    if task_type == "prerequisite":
        update_prerequisite_job_progress(pid)
    if task_type == "restore":
        update_restore_job_progress(pid)
    if task_type == "post_job":
        update_post_job_progress(pid)
    sys.exit(0)
