import json
import os
import queue
import shutil
from datetime import datetime
import uuid
from vulcanus.log.log import LOGGER
from zeus.operation_service.app.constant import TaskOperationResultCode, DATA_COLLECT_PATH
from zeus.operation_service.app.core.framework.common.constant import TaskType
from zeus.operation_service.app.core.file_util import FileUtil, U_RW, G_READ, O_READ
from zeus.operation_service.app.core.framework.common.result_code import TaskResultCode, WorkFlowResultCode
from zeus.operation_service.app.core.framework.task.task_factory.base_task import BaseTask
from zeus.operation_service.app.core.framework.workflow.workflow_exception import WorkFlowException
from zeus.operation_service.app.proxy.task import TaskProxy


class DataCollectTask(BaseTask):

    def _handle_exception(self, exception):
        task_proxy = TaskProxy()
        FileUtil.dir_remove(os.path.join(DATA_COLLECT_PATH, self.task_id))
        if isinstance(exception, (queue.Empty, TimeoutError)):
            task_proxy.set_failed_status(self.task_id, TaskResultCode.TIMEOUT.code)
        elif isinstance(exception,
                        WorkFlowException) and exception.error_code == WorkFlowResultCode.ERR_WORKFLOW_TIMEOUT:
            task_proxy.set_failed_status(self.task_id, TaskResultCode.TIMEOUT.code)
        else:
            LOGGER.error(exception)
            task_proxy.set_failed_status(self.task_id, TaskResultCode.UNKNOWN.code)

    def _post_success(self):
        task_proxy = TaskProxy()
        task = task_proxy.get_task_by_id(self.task_id)
        data_collect_path = os.path.join(DATA_COLLECT_PATH, self.task_id)
        zip_name = '_'.join(self.task_name.split('_')[:-2])
        dest_zip_path = os.path.join(data_collect_path, zip_name)
        LOGGER.warning(f"make archive {zip_name}.zip")
        tmp_dest_dir = os.path.join(os.path.dirname(data_collect_path), str(uuid.uuid1()))
        try:
            if not os.path.exists(tmp_dest_dir):
                os.makedirs(tmp_dest_dir)

            # 解压采集文件，并删除原压缩文件
            LOGGER.info(f"begin to unzip data result file")
            for file_name in os.listdir(data_collect_path):
                if not str(file_name).endswith(".zip"):
                    continue
                zip_file = os.path.join(data_collect_path, file_name)
                unzip_path = ".".join(str(zip_file).split(".")[0:-1])
                os.makedirs(unzip_path)
                FileUtil.unzip(zip_file, unzip_path)
                FileUtil.file_remove(zip_file)

            # 打包全部节点的采集结果，保存为任务名压缩包
            shutil.make_archive(os.path.join(tmp_dest_dir, zip_name), 'zip', data_collect_path)
            tmp_dest_zip_path = os.path.join(tmp_dest_dir, zip_name) + '.zip'
            LOGGER.warning(f"{zip_name}.zip archive successfully")
            shutil.copyfile(tmp_dest_zip_path, dest_zip_path + '.zip')
        except Exception as e:
            LOGGER.error(f"task {self.task_id} zip failed, error:{e}")
            task_proxy.set_failed_status(self.task_id, TaskResultCode.FAILED.code)
        else:
            # 压缩完成，采集任务成功
            task_proxy.update_task(
                task_id=self.task_id,
                status=TaskResultCode.SUCCESS.code,
                end_time=datetime.now(),
                progress=1.0
            )
        finally:
            FileUtil.dir_remove(tmp_dest_dir)

    class TaskYaml(BaseTask.TaskYaml):
        def __init__(self, task_params: dict):
            super().__init__(task_params)
            self.workflow_template = "data_collect_workflow.yml"
            self.task_type = TaskType.DATA_COLLECT
            self.task_name = '_'.join(task_params.get('task_name').split('_')[:-2])

        def generate_agent_config(self, task_assets, task_case_node):
            config_json = super().generate_agent_config(task_assets, task_case_node)
            config_json['task_name'] = self.task_name
            LOGGER.info(f"config json: {config_json}")
            config_json_fd = os.open(os.path.join(self.local_path, "config.json"), os.O_WRONLY | os.O_CREAT,
                                     U_RW | G_READ | O_READ)
            with os.fdopen(config_json_fd, "w") as f:
                json.dump(config_json, f)

        def init_context_params(self):
            context_hosts = self.generate_host_list()
            context = {
                "hosts": context_hosts,
                "jobs": {
                    "hosts": "[" + ",".join([x["hostname"] for x in context_hosts]) + "]"
                },
                "remote_path": self.remote_path,
                "local_path": self.local_path,
                "task_id": self.task_id,
                "task_name": self.task_name,
                "data_collect_dir": TaskType.DATA_COLLECT
            }
            return context
