import copy
import json
import logging
import os
import time
from dataclasses import asdict
import yaml
import sys

PROJECT_ROOT = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
if BASE_DIR not in sys.path:
    sys.path.insert(0, BASE_DIR)

LOG_FORMAT = "%(asctime)s - %(levelname)s - [%(filename)s:%(lineno)d] - %(message)s"
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)

from utils.gitcode_helper import GitCodeHelper
from utils.pipeline_helper import PipeLineHelper
from utils.common import func_retry
from utils.parse_params import parse_repo_info_by_webhook_payload
from ci_config.constants import TASK_CONFIGS, ENV_MAPPING

ENV_VARS_TO_TRANSLATE = ["gitcode_domain", "gitcode_username", "gitcode_token", "conan_user", "conan_password",
                         "image_version",
                         ]

ENV_VARS_WHITELIST = ["image_version"]


class WorkFlow:
    def __init__(self, env_class, owner, repo, branch):
        self.env_class = env_class
        self.owner = owner
        self.repo = repo
        self.branch = branch

        self._pipeline_helper = PipeLineHelper(env_class.get("ak"), env_class.get("sk"))
        self._gitcode_helper = GitCodeHelper(owner, repo, "", env_class.get("gitcode_rw_token"),
                                             env_class.get("gitcode_api_domain"))

        self.repo_dir = os.path.join(PROJECT_ROOT, "ci-repos", repo)
        self.repo_info = self._gitcode_helper.get_repo()

        config_path = os.path.join(PROJECT_ROOT, "ci-repos", self.repo, "config.yaml")
        self.repo_config = self._read_config_file(config_path)
        template_task_path = os.path.join(BASE_DIR, "ci_config", "templates", "template_task.json")
        self.task_env_template = self._read_config_file(template_task_path)
        template_pipeline_path = os.path.join(BASE_DIR, "ci_config", "templates", "template_pipeline.json")
        self.pipeline_env_template = self._read_config_file(template_pipeline_path)

        templates_dir = os.path.join(BASE_DIR, "ci_config", "templates")
        self.pipeline_template = self._read_config_file(os.path.join(templates_dir, "create_pipeline.json"))

        for task in TASK_CONFIGS:
            config_key = task.get("config_key", task["name"])
            if self.repo_config.get(config_key):
                task_name = task["sName"]
                task_file = f"task_{task_name}.json"
                task_path = os.path.join(templates_dir, task_file)
                pipeline_file = f"pipeline_{task_name}.json"
                pipeline_path = os.path.join(templates_dir, pipeline_file)

                if not os.path.exists(task_path):
                    logging.warning(f"为已启用的任务 '{task_name}' 找不到模板文件: {task_path}")
                    continue
                task_content = self._read_config_file(task_path)
                if not task_content:
                    logging.warning(f"为已启用的任务 '{task_name}' 读取的模板文件为空: {task_path}")
                    continue
                if not os.path.exists(pipeline_path):
                    logging.warning(f"为已启用的任务 '{task_name}' 找不到流水线模板文件: {pipeline_path}")
                    continue
                pipeline_content = self._read_config_file(pipeline_path)
                if not pipeline_content:
                    logging.warning(f"为已启用的任务 '{task_name}' 读取的流水线模板文件为空: {pipeline_path}")
                    continue

                template_task_key = f"task_{task_name}"
                setattr(self, template_task_key, task_content)
                pipeline_key = f"pipeline_{task_name}"
                setattr(self, pipeline_key, pipeline_content)

    @staticmethod
    def _read_config_file(file_path):
        try:
            with open(file_path, "r", encoding="utf-8") as f:
                if file_path.endswith((".yaml", ".yml")):
                    return yaml.safe_load(f)
                else:
                    return json.load(f)
        except FileNotFoundError:
            logging.warning(f"Config file not found: {file_path}")
            return {}
        except json.JSONDecodeError as e:
            logging.error(f"Error decoding JSON from {file_path}: {e}")
            return {}
        except yaml.YAMLError as e:
            logging.error(f"Error decoding YAML from {file_path}: {e}")
            return {}
        except Exception as e:
            logging.error(f"Unexpected error reading config file {file_path}: {e}")
            return {}

    def _get_task_command(self, task_name):
        task_config = next((item for item in TASK_CONFIGS if item["sName"] == task_name), None)
        if not task_config:
            logging.warning(f"任务 [{task_name}] 的配置未在 TASK_CONFIGS 中找到。")
            return ""

        script_path = f"pre_{task_config.get('sName')}.sh"
        full_script_path = os.path.join(BASE_DIR, "ci_config", "hooks", script_path)
        try:
            with open(full_script_path, "r", encoding="utf-8") as file:
                command = file.read()
                return command % self.repo
        except FileNotFoundError:
            logging.error(f"配置的 pre-hook 脚本文件未找到：[{full_script_path}]")
            return ""
        except Exception as e:
            logging.error(f"读取 pre-hook 脚本文件 [{full_script_path}] 时发生未知错误: {e}")
            return ""

    def _get_pipeline_definition(self, task_job_ids):
        """
        根据 repo_config 和 task_job_ids 动态生成完整的流水线定义。
        此版本使用 constants.TASK_CONFIGS 作为单一事实来源来构建流水线。
        """
        pipeline_template = copy.deepcopy(self.pipeline_env_template)
        target_jobs_list = pipeline_template["stages"][1]["jobs"]

        for task_config in TASK_CONFIGS:
            task_name = task_config["name"]
            config_key = task_config.get("config_key", task_name)

            if self.repo_config.get(config_key):
                sName = task_config.get("sName")
                job_id = task_job_ids.get(sName)

                template_attr_name = f"pipeline_{sName}"
                template = getattr(self, template_attr_name, None)

                if template and job_id:
                    job_definition = copy.deepcopy(template)
                    job_definition["steps"][0]["inputs"][2]["value"] = job_id
                    job_definition["steps"][0]["inputs"][3]["value"] = self.repo
                    target_jobs_list.insert(0, job_definition)
                else:
                    logging.warning(
                        f"任务 '{task_name}' 在 config.yaml 中已启用，但其模板 "
                        f"(attr: {template_attr_name}) 或 job_id 未找到，已跳过。"
                    )

        return json.dumps(pipeline_template)

    def _get_task_env(self):
        env_jsons = []
        for name, value in self.env_class.items():
            if name not in ENV_VARS_TO_TRANSLATE:
                continue

            if value is None or value == "":
                continue

            env_json = copy.deepcopy(self.task_env_template)
            env_json["params"][0]["value"] = name
            if name in ENV_VARS_WHITELIST:
                env_json["params"][2]["value"] = str(value)

            env_jsons.append(env_json)
        return env_jsons

    def _get_repo_source(self):
        return {
            "type": "code",
            "params": {
                "git_type": "gitcode",
                "git_url": self.repo_info.get("http_url_to_repo"),
                "default_branch": self.repo_info.get("default_branch"),
                "endpoint_id": "437c8f000ec34eae97be1b7a28037e52",
                "codehub_id": self.repo_info.get("id"),
                "repo_name": self.repo_info.get("path")
            }
        }

    def _get_triggers(self):
        triggers = copy.deepcopy(self.pipeline_template.get("triggers", []))
        if not triggers:
            return []

        triggers[0]["git_url"] = self.repo_info.get("http_url_to_repo")
        triggers[0]["codehub_id"] = self.repo_info.get("id")

        for event in triggers[0].get("events", []):
            if event.get("type") == "merge_request":
                event["branches"] = [self.repo_info.get("default_branch")]
                break
        return triggers

    def _create_or_update_task(self, task_config, force=False):
        task_name = task_config["sName"]
        template_key = f"task_{task_name}"
        template = getattr(self, template_key)

        task_job_name = template["job_name"].format(self.repo)
        command = self._get_task_command(task_name)

        task_info = self._pipeline_helper.get_task_by_name(self.env_class.get("project_id"), task_job_name)

        if not task_info:
            body = copy.deepcopy(template)
            body["job_name"] = task_job_name
            body["project_id"] = self.env_class.get("project_id")
            body["parameters"].extend(self._get_task_env())
            body["steps"][0]["properties"]["command"] = command
            task_info = self._pipeline_helper.create_task(body)
            return task_info.get("result", {}).get("job_id")

        task_job_id = task_info.get("id")
        task_details = self._pipeline_helper.get_task_by_id(task_job_id)
        existing_command = task_details.get("result", {}).get("steps", [{}])[0].get("properties", {}).get("command")

        if force or existing_command != command:
            body = copy.deepcopy(task_details.get("result", {}))
            body.update(copy.deepcopy(template))
            body["job_name"] = task_job_name
            body["project_id"] = self.env_class.get("project_id")
            body["parameters"] = template.get("parameters", []) + self._get_task_env()
            body["steps"][0]["properties"]["command"] = command
            self._pipeline_helper.put_task_by_id(body)

        return task_job_id

    def update(self, force=False):
        task_job_ids = {}
        for task in TASK_CONFIGS:
            config_key = task.get("config_key", task["name"])
            if self.repo_config.get(config_key):
                job_id = self._create_or_update_task(task, force)
                task_job_ids[task["sName"]] = job_id

        pipeline_name = self.pipeline_template["name"].format(self.repo)
        pipeline_info = self._pipeline_helper.get_pipeline_by_name(self.env_class.get("project_id"), pipeline_name)
        pipelines = [p for p in pipeline_info.get("pipelines", []) if p.get("name") == pipeline_name]

        pipeline_body = copy.deepcopy(self.pipeline_template)
        pipeline_body.update({
            "name": pipeline_name,
            "description": self.pipeline_template.get("description", "").format(self.repo),
            "sources": [self._get_repo_source()],
            "definition": self._get_pipeline_definition(task_job_ids),
            "triggers": self._get_triggers()
        })

        if not pipelines:
            pipeline_info = self._pipeline_helper.create_pipeline(self.env_class.get("project_id"), pipeline_body,
                                                                  self.env_class)
            pipeline_id = pipeline_info.get("pipeline_id")
            self._pipeline_helper.set_variable(self.env_class.get("project_id"), pipeline_id,
                                               self.env_class.get("variable_id"))
            logging.info(f"创建 pipeline {pipeline_name} 成功，pipeline_id: {pipeline_id}")
        else:
            pipeline_id = pipelines[0]["pipeline_id"]
            self._pipeline_helper.update_pipeline(self.env_class.get("project_id"), pipeline_id, pipeline_body,
                                                  self.env_class)
            self._pipeline_helper.set_variable(self.env_class.get("project_id"), pipeline_id,
                                               self.env_class.get("variable_id"))
            logging.info(f"更新 pipeline {pipeline_name} 成功，pipeline_id: {pipeline_id}")

    def _delete_task(self, task_config):
        template = getattr(self, task_config["template_key"])
        task_name = template["job_name"].format(self.repo)
        task_info = self._pipeline_helper.get_task_by_name(self.env_class.get("project_id"), task_name)
        if task_info:
            self._pipeline_helper.delete_task(job_id=task_info["id"])

    @func_retry(tries=3, delay=3)
    def delete(self):
        pipeline_name = self.pipeline_template["name"].format(self.repo)
        pipeline_info = self._pipeline_helper.get_pipeline_by_name(self.env_class.get("project_id"), pipeline_name)
        pipelines = [p for p in pipeline_info.get("pipelines", []) if p.get("name") == pipeline_name]

        if pipelines:
            pipeline_id = pipelines[0]["pipeline_id"]
            webhook_info = self._pipeline_helper.get_webhook(self.env_class.get("project_id"), pipeline_id)
            for wh in self._gitcode_helper.get_webhooks():
                if wh.get("url") == webhook_info.get("webhookUrl"):
                    self._gitcode_helper.delete_webhook(wh["id"])
            self._pipeline_helper.delete_pipeline(project_id=self.env_class.get("project_id"),
                                                  pipeline_id=pipeline_id)

        for task in TASK_CONFIGS:
            self._delete_task(task)


class GitCodeTools:
    success_comment = "Hello, your submission has been successfully executed."

    def __init__(self, owner, repo, pr_number, gitcode_token, domain):
        self._gitcode_helper = GitCodeHelper(owner, repo, pr_number, gitcode_token, domain=domain)

    def get_repo_by_payload_body(self):
        pr_files = self._gitcode_helper.get_pr_files()
        return list(
            set(pr["filename"].split("/")[1] for pr in pr_files if pr.get("filename", "").startswith("ci-repos/")))

    def get_need_delete_repo(self, repos):
        return [repo for repo in repos if not os.path.exists(os.path.join(BASE_DIR, "ci-repos", repo))]

    def get_need_update_repo(self, repos, need_delete_repos):
        return list(set(repos) - set(need_delete_repos))

    def comment(self, comment_body):
        self._gitcode_helper.create_comment({"body": comment_body})


def get_env_variables():
    env_vars = {key: os.getenv(env_name) for key, env_name in ENV_MAPPING.items()}
    missing_vars = [env_name for key, env_name in ENV_MAPPING.items() if not env_vars.get(key)]
    if missing_vars:
        raise ValueError(f"Missing environment variables: {', '.join(missing_vars)}")
    return env_vars


def _execute_workflow_action(env_vars, owner, repo, action_type):
    logging.info(f"Processing '{action_type}' for repo: {repo}")
    try:
        workflow = WorkFlow(env_vars, owner, repo, "")
        if action_type == "update":
            workflow.update()
        elif action_type == "delete":
            workflow.delete()

        time.sleep(31)
    except Exception as e:
        logging.error(f"Failed to {action_type} for repo {repo}: {e}", exc_info=True)


def process_repositories(env_vars, owner, repos, action_type):
    if action_type not in ["update", "delete"]:
        logging.error(f"Invalid action type provided: {action_type}")
        return

    for repo in repos:
        _execute_workflow_action(env_vars, owner, repo, action_type)


def main():
    logging.info("-" * 24 + " start to auto config ci " + "-" * 24)
    gitcode_tools = None
    try:
        env_vars = get_env_variables()
        owner, repo, branch, pr_number, _ = parse_repo_info_by_webhook_payload()
        gitcode_tools = GitCodeTools(owner, repo, pr_number, env_vars["gitcode_rw_token"],
                                     env_vars["gitcode_api_domain"])

        ci_repos = gitcode_tools.get_repo_by_payload_body()
        need_delete_repos = gitcode_tools.get_need_delete_repo(ci_repos)
        need_update_repos = gitcode_tools.get_need_update_repo(ci_repos, need_delete_repos)

        logging.info(f"CI repos to process: {', '.join(ci_repos)}")
        logging.info(f"Repos to delete: {', '.join(need_delete_repos)}")
        logging.info(f"Repos to update: {', '.join(need_update_repos)}")

        process_repositories(env_vars, owner, need_update_repos, "update")
        process_repositories(env_vars, owner, need_delete_repos, "delete")

        gitcode_tools.comment(gitcode_tools.success_comment)

    except Exception as e:
        error_message = f"auto_config failed: {e}"
        logging.error(error_message, exc_info=True)
        if gitcode_tools:
            failure_comment = getattr(gitcode_tools, "failed_comment", f"An error occurred: {e}")
            gitcode_tools.comment(failure_comment)
        raise
    finally:
        logging.info("-" * 24 + " end to auto config ci " + "-" * 24)


if __name__ == '__main__':
    main()
