#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# @Time    : 2024/11/1 15:55
# @Author  : Tom_zc
# @FileName: ci_config.py
# @Software: PyCharm
import copy
import json
import logging
import os
import sys
import time
from dataclasses import dataclass, asdict

import yaml

BASE_DIR = os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
sys.path.insert(0, os.path.join(BASE_DIR, "ci-scripts"))

logging.basicConfig(level=logging.INFO,
                    format="%(asctime)s  %(levelname)s:%(message)s")

from utils.gitcode_helper import GitCodeHelper
from utils.common import func_retry
from utils.parse_params import parse_repo_info_by_webhook_payload
from utils.pipeline_helper import PipeLineHelper


# 1.过滤事件，过滤出创建事件，删除事件
# 2.创建事件：
#    1.先创建编译构建任务
#    2.再创建流水线
#    3.配置webhook
#    4.设置仓库的webook
#    5.评论
# 3.修改事件：
#    1.删除构建任务
#    2.删除流水线
#    3.删除webhook
#    4.评论


@dataclass
class EnvClass:
    """get the variable from env"""
    ak: str
    sk: str
    obs_endpoint: str
    obs_static_endpoint: str
    obs_bucket_name: str
    majun_domain: str
    majun_token: str
    majun_api_token: str
    gitcode_domain: str
    gitcode_api_domain: str
    gitcode_username: str
    gitcode_token: str
    gitcode_rw_token: str
    conan_user: str
    conan_password: str
    project_id: str
    sca_appId: str
    sca_secretKey: str
    sca_domain: str
    image_version: str

    def get_properties(self):
        return [self.gitcode_token, self.gitcode_username, self.gitcode_api_domain, self.gitcode_domain,
                self.ak, self.sk, self.obs_endpoint, self.obs_bucket_name, self.obs_static_endpoint,
                self.majun_domain, self.majun_token, self.conan_user, self.conan_password, self.project_id,
                self.image_version, self.gitcode_rw_token]

    def check_empty(self):
        if not all(self.get_properties()):
            logging.info("Lack of params, Please check.")
            sys.exit(-1)

    @staticmethod
    def translate_range():
        return ["gitcode_username", "gitcode_token", "gitcode_domain", "conan_user", "conan_password", "image_version"]

    @staticmethod
    def get_whitelist():
        return ["obs_static_endpoint", "image_version", "gitcode_domain"]


class WorkFlow:
    def __init__(self, env_class, owner, repo, pr_number, domain=os.getenv("gitcode_api_domain")):
        """init workflow"""
        self.privileged_str = "--privileged=true"
        self.env_class = env_class
        self.repo = repo
        self._gitcode_helper = GitCodeHelper(owner, repo, pr_number, env_class.gitcode_rw_token, domain=domain)
        self._pipeline_helper = PipeLineHelper(env_class.ak, env_class.sk)
        self.repo_config = None
        self.build_config = None
        self.ut_config = None
        self.it_config = None
        self.coverage_config = None
        self.vc_config = None
        self.dependency_config = None
        self.ipmi_config = None
        self.line_config = None
        self.header_config = None
        self.smoke_config = None
        self.model_config = None

        self._read_ci_config()

        self.pipeline_template = None
        self.task_build_template = None
        self.task_ut_template = None
        self.task_it_template = None
        self.task_coverage_template = None
        self.task_vc_template = None
        self.task_dependency_template = None
        self.task_ipmi_template = None
        self.task_line_template = None
        self.pipeline_complain_template = None
        self.task_env_template = None
        self.task_header_template = None
        self.task_smoke_template = None
        self.task_model_template = None
        self._read_template_config()

        self.post_build_hook = None
        self.post_ut_hook = None
        self.post_it_hook = None
        self.post_coverage_hook = None
        self.pre_build_hook = None
        self.pre_ut_hook = None
        self.pre_it_hook = None
        self.pre_coverage_hook = None
        self.pre_vc_hook = None
        self.pre_dependency_hook = None
        self.pre_ipmi_hook = None
        self.pre_line_hook = None
        self.pre_header_hook = None
        self.pre_smoke_hook = None
        self.pre_model_hook = None
        self._read_hook_config()

        self.pipeline_job_build_template = None
        self.pipeline_job_ut_template = None
        self.pipeline_job_it_template = None
        self.pipeline_job_coverage_template = None
        self._read_pipeline_job_template()

    def _read_ci_config(self):
        """read the ci config from config yaml"""
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "config.yaml")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.repo_config = yaml.safe_load(file)
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "build.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.build_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "ut.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.ut_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "it.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.it_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "coverage.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.coverage_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "version_check.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.vc_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "dependency_check.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.dependency_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "ipmi_check.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.ipmi_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "line_limit_check.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.line_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "header_check.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.header_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "smoke_test.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.header_config = file.read()
        path = os.path.join(BASE_DIR, "ci-repos", self.repo, "scripts", "model_check.sh")
        if os.path.exists(path):
            with open(path, "r", encoding="utf-8") as file:
                self.model_config = file.read()

    def _read_template_config(self):
        """read the template from config yaml"""
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_pipeline.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_build.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_build_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_ut.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_ut_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_it.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_it_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_coverage.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_coverage_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_vc.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_vc_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_dependency.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_dependency_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_ipmi.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_ipmi_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_line.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_line_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_header.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_header_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_smoke_test.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_smoke_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "create_task_model_check.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_model_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_template.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_complain_template = json.load(file)
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "task_template.json")
        with open(path, "r", encoding="utf-8") as file:
            self.task_env_template = json.load(file)

    def _read_hook_config(self):
        """read the hook from config yaml"""
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "post_build.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.post_build_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "post_ut.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.post_ut_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "post_it.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.post_it_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "post_coverage.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.post_coverage_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_build.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_build_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_ut.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_ut_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_it.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_it_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_coverage.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_coverage_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_vc.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_vc_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_dependency.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_dependency_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_ipmi.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_ipmi_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_line.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_line_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_header.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_header_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_smoke_test.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_smoke_hook = file.read()
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "hooks", "pre_model_check.sh")
        with open(path, "r", encoding="utf-8") as file:
            self.pre_model_hook = file.read()

    def _read_pipeline_job_template(self):
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_build.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_build_template = json.load(file)

        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_it.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_it_template = json.load(file)

        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_ut.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_ut_template = json.load(file)

        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_coverage.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_coverage_template = json.load(file)

        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_vc.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_vc_template = json.load(file)

        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_dependency.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_dependency_template = json.load(file)

        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_ipmi.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_ipmi_template = json.load(file)

        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_line.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_line_template = json.load(file)

        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_header.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_header_template = json.load(file)

        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_smoke_test.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_smoke_template = json.load(file)
        
        path = os.path.join(BASE_DIR, "ci-scripts", "ci_config", "templates", "pipeline_job_model_check.json")
        with open(path, "r", encoding="utf-8") as file:
            self.pipeline_job_model_template = json.load(file)

    def _get_task_build_command(self):
        sub_command = self.pre_build_hook % self.repo
        if self.repo_config.get("build_output_log_files"):
            sub_command += "\r\n" + self.post_build_hook
        return sub_command

    def _get_task_ut_command(self):
        sub_command = self.pre_ut_hook % self.repo
        if self.repo_config.get("ut_output_log_files"):
            sub_command += "\r\n" + self.post_ut_hook
        return sub_command

    def _get_task_it_command(self):
        sub_command = self.pre_it_hook % self.repo
        if self.repo_config.get("it_output_log_files"):
            sub_command += "\r\n" + self.post_it_hook
        return sub_command

    def _get_task_coverage_command(self):
        sub_command = self.pre_coverage_hook
        if self.repo_config.get("coverage_output_log_files"):
            sub_command += "\r\n" + self.post_coverage_hook
        return sub_command

    def _get_task_vc_command(self):
        return self.pre_vc_hook

    def _get_task_dependency_command(self):
        return self.pre_dependency_hook

    def _get_task_ipmi_command(self):
        return self.pre_ipmi_hook % self.repo

    def _get_task_line_command(self):
        return self.pre_line_hook

    def _get_task_header_command(self):
        return self.pre_header_hook % self.repo

    def _get_task_smoke_test_command(self):
        return self.pre_smoke_hook % self.repo

    def _get_task_model_check_command(self):
        return self.pre_model_hook

    def _get_pipeline_definition(
            self,
            task_build_job_id,
            task_ut_job_id,
            task_it_job_id,
            task_coverage_job_id,
            task_vc_job_id,
            task_dependency_job_id,
            task_ipmi_job_id,
            task_line_job_id,
            task_header_job_id,
            task_smoke_job_id,
            task_model_job_id
    ):
        pipeline_template = copy.deepcopy(self.pipeline_complain_template)
        if self.repo_config.get("ut"):
            self.pipeline_job_ut_template["steps"][0]["inputs"][2]["value"] = task_ut_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_ut_template)
        if self.repo_config.get("it"):
            self.pipeline_job_it_template["steps"][0]["inputs"][2]["value"] = task_it_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_it_template)
        if self.repo_config.get("coverage"):
            self.pipeline_job_coverage_template["steps"][0]["inputs"][2]["value"] = task_coverage_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_coverage_template)
        if self.repo_config.get("build"):
            self.pipeline_job_build_template["steps"][0]["inputs"][2]["value"] = task_build_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_build_template)
        if self.repo_config.get("version_check"):
            self.pipeline_job_vc_template["steps"][0]["inputs"][2]["value"] = task_vc_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_vc_template)
        if self.repo_config.get("dependency_check"):
            self.pipeline_job_dependency_template["steps"][0]["inputs"][2]["value"] = task_dependency_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_dependency_template)
        if self.repo_config.get("ipmi_check"):
            self.pipeline_job_ipmi_template["steps"][0]["inputs"][2]["value"] = task_ipmi_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_ipmi_template)
        if self.repo_config.get("line_limit_check"):
            self.pipeline_job_line_template["steps"][0]["inputs"][2]["value"] = task_line_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_line_template)
        if self.repo_config.get("header"):
            self.pipeline_job_header_template["steps"][0]["inputs"][2]["value"] = task_header_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_header_template)
        if self.repo_config.get("smoke_test"):
            self.pipeline_job_smoke_template["steps"][0]["inputs"][2]["value"] = task_smoke_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_smoke_template)
        if self.repo_config.get("model_check"):
            self.pipeline_job_model_template["steps"][0]["inputs"][2]["value"] = task_model_job_id
            pipeline_template["stages"][1]["jobs"].insert(0, self.pipeline_job_model_template)
        return json.dumps(pipeline_template)

    def _get_task_env(self):
        env_jsons = list()
        for name, value in asdict(self.env_class).items():
            if name not in EnvClass.translate_range():
                continue
            env_json = copy.deepcopy(self.task_env_template)
            env_json["params"][0]["value"] = name
            env_json["params"][2]["value"] = value
            if name in EnvClass.get_whitelist():
                env_json["params"][4]["value"] = False
            env_jsons.append(env_json)
        return env_jsons

    def _get_pipeline_env(self):
        env_list = list()
        for name, value in asdict(self.env_class).items():
            if name == "project_id":
                continue
            env_list.append({
                "name": name,
                "type": "string",
                "value": value,
                "is_secret": True,
                "is_reset": False,
                "description": "key",
                "is_runtime": False
            })
        return env_list

    # noinspection DuplicatedCode
    @func_retry(tries=3, delay=3)
    def update(self):
        """ create the pipeline and task and webhook
            1.先创建编译构建任务
            2.再创建流水线
            3.配置webhook
            4.设置仓库的webook
            5.评论"""
        (
            task_build_job_id,
            task_ut_job_id,
            task_it_job_id,
            task_coverage_job_id,
            task_vc_job_id,
            task_dependency_job_id,
            task_ipmi_job_id,
            task_line_job_id,
            task_header_job_id,
            task_smoke_job_id,
            task_model_job_id
        ) = (None, None, None, None, None, None, None, None, None, None, None)
        # 1.add the build job
        if self.repo_config.get("build"):
            task_build_name = self.task_build_template["job_name"].format(self.repo)
            task_build_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_build_name)
            if not task_build_info:
                body = copy.deepcopy(self.task_build_template)
                body["job_name"] = task_build_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_build_command()
                task_build_info = self._pipeline_helper.create_task(body)
                task_build_job_id = task_build_info["result"]["job_id"]
            else:
                task_build_job_id = task_build_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_build_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_build_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_build_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_build_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_build_command()
                    self._pipeline_helper.put_task_by_id(body)
        # 2.add the ut job
        if self.repo_config.get("ut"):
            task_test_name = self.task_ut_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_ut_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_ut_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_ut_job_id = task_test_info["result"]["job_id"]
            else:
                task_ut_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_ut_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_ut_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_ut_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_ut_command()
                    self._pipeline_helper.put_task_by_id(body)
        # 2.add the it job
        if self.repo_config.get("it"):
            task_test_name = self.task_it_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_it_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_it_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_it_job_id = task_test_info["result"]["job_id"]
            else:
                task_it_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_it_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_it_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_it_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_it_command()
                    self._pipeline_helper.put_task_by_id(body)
        # 2.2 add the coverage job
        if self.repo_config.get("coverage"):
            task_test_name = self.task_coverage_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_coverage_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_coverage_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_coverage_job_id = task_test_info["result"]["job_id"]
            else:
                task_coverage_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_coverage_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_coverage_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_coverage_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_coverage_command()
                    self._pipeline_helper.put_task_by_id(body)
        # 3.add the vc job
        if self.repo_config.get("version_check"):
            task_test_name = self.task_vc_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_vc_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_vc_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_vc_job_id = task_test_info["result"]["job_id"]
            else:
                task_vc_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_vc_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_vc_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_vc_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_vc_command()
                    self._pipeline_helper.put_task_by_id(body)
        # 4.add the dependency job
        if self.repo_config.get("dependency_check"):
            task_test_name = self.task_dependency_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_dependency_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_dependency_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_dependency_job_id = task_test_info["result"]["job_id"]
            else:
                task_dependency_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_dependency_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_dependency_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_dependency_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_dependency_command()
                    self._pipeline_helper.put_task_by_id(body)
        # 5.add the ipmi job
        if self.repo_config.get("ipmi_check"):
            task_test_name = self.task_ipmi_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_ipmi_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_ipmi_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_ipmi_job_id = task_test_info["result"]["job_id"]
            else:
                task_ipmi_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_ipmi_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_ipmi_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_ipmi_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_ipmi_command()
                    self._pipeline_helper.put_task_by_id(body)
        # 6.add the line job
        if self.repo_config.get("line_limit_check"):
            task_test_name = self.task_line_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_line_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_line_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_line_job_id = task_test_info["result"]["job_id"]
            else:
                task_line_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_line_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_line_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_line_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_line_command()
                    self._pipeline_helper.put_task_by_id(body)
        # 7.add the header job
        if self.repo_config.get("header"):
            task_test_name = self.task_header_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_header_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_header_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_header_job_id = task_test_info["result"]["job_id"]
            else:
                task_header_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_header_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_header_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_header_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_header_command()
                    self._pipeline_helper.put_task_by_id(body)

        # 8.add the smoke test job
        if self.repo_config.get("smoke_test"):
            task_test_name = self.task_smoke_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_smoke_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_smoke_test_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_smoke_job_id = task_test_info["result"]["job_id"]
            else:
                task_smoke_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_smoke_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_smoke_test_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_smoke_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_smoke_test_command()
                    self._pipeline_helper.put_task_by_id(body)

        # 9.add the model check job
        if self.repo_config.get("model_check"):
            task_test_name = self.task_model_template["job_name"].format(self.repo)
            task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
            if not task_test_info:
                body = copy.deepcopy(self.task_model_template)
                body["job_name"] = task_test_name
                body["project_id"] = self.env_class.project_id
                body["parameters"].extend(self._get_task_env())
                body["steps"][0]["properties"]["command"] = self._get_task_model_check_command()
                task_test_info = self._pipeline_helper.create_task(body)
                task_model_job_id = task_test_info["result"]["job_id"]
            else:
                task_model_job_id = task_test_info["id"]
                task_info = self._pipeline_helper.get_task_by_id(task_model_job_id)
                exist_command = task_info["result"]["steps"][0]["properties"]["command"]
                cur_command = self._get_task_model_check_command()
                if exist_command != cur_command:
                    body_template = copy.deepcopy(self.task_smoke_template)
                    body = copy.deepcopy(task_info["result"])
                    body.update(body_template)
                    body["job_name"] = task_test_name
                    body["project_id"] = self.env_class.project_id
                    body["parameters"].extend(self._get_task_env())
                    body["steps"][0]["properties"]["command"] = self._get_task_model_check_command()
                    self._pipeline_helper.put_task_by_id(body)

        # 3.add the pipeline job
        pipeline_name = self.pipeline_template["name"].format(self.repo)
        pipeline_info = self._pipeline_helper.get_pipeline_by_name(self.env_class.project_id, pipeline_name)
        pipelines = [pipeline for pipeline in pipeline_info["pipelines"] if pipeline["name"] == pipeline_name]
        if not pipelines:
            body = copy.deepcopy(self.pipeline_template)
            body["name"] = pipeline_name
            body["description"] = body["description"].format(self.repo)
            body["variables"].extend(self._get_pipeline_env())
            body["definition"] = self._get_pipeline_definition(
                task_build_job_id,
                task_ut_job_id,
                task_it_job_id,
                task_coverage_job_id,
                task_vc_job_id,
                task_dependency_job_id,
                task_ipmi_job_id,
                task_line_job_id,
                task_header_job_id,
                task_smoke_job_id,
                task_model_job_id
            )
            pipeline_info = self._pipeline_helper.create_pipeline(self.env_class.project_id, body,
                                                                  asdict(self.env_class))
            pipeline_id = pipeline_info["pipeline_id"]
        else:
            body = copy.deepcopy(self.pipeline_template)
            body["name"] = pipeline_name
            body["description"] = body["description"].format(self.repo)
            body["variables"].extend(self._get_pipeline_env())
            body["definition"] = self._get_pipeline_definition(
                task_build_job_id,
                task_ut_job_id,
                task_it_job_id,
                task_coverage_job_id,
                task_vc_job_id,
                task_dependency_job_id,
                task_ipmi_job_id,
                task_line_job_id,
                task_header_job_id,
                task_smoke_job_id,
                task_model_job_id
            )
            self._pipeline_helper.update_pipeline(self.env_class.project_id, pipelines[0]["pipeline_id"], body,
                                                  asdict(self.env_class))
            pipeline_id = pipelines[0]["pipeline_id"]

        # 4.set the webhook in pipeline
        # to enable the webhook
        self._pipeline_helper.set_webhook(self.env_class.project_id, pipeline_id)
        # to set the webhook
        self._pipeline_helper.set_webhook(self.env_class.project_id, pipeline_id)
        webhook_info = self._pipeline_helper.get_webhook(self.env_class.project_id, pipeline_id)
        # 5.add the webhook
        data = {
            "url": webhook_info["webhookUrl"],
            "push_events": True,
            "tag_push_events": False,
            "issues_events": False,
            "note_events": True,
            "merge_requests_events": True
        }
        webhooks = self._gitcode_helper.get_webhooks()
        webhook_ids = [i for i in webhooks if i["url"] == data["url"]]
        logging.info("webhook_ids: {}".format(webhook_ids))
        if not webhook_ids:
            self._gitcode_helper.create_webhook(data)

    @func_retry(tries=3, delay=3)
    def delete(self):
        # 1.delete the webhook and pipeline
        pipeline_name = self.pipeline_template["name"].format(self.repo)
        pipeline_info = self._pipeline_helper.get_pipeline_by_name(self.env_class.project_id, pipeline_name)
        pipelines = [pipeline for pipeline in pipeline_info["pipelines"] if pipeline["name"] == pipeline_name]
        if pipelines:
            pipeline_id = pipelines[0]["pipeline_id"]
            webhook_info_by_pipeline = self._pipeline_helper.get_webhook(self.env_class.project_id, pipeline_id)
            webhooks = self._gitcode_helper.get_webhooks()
            webhook_infos = [i for i in webhooks if i["url"] == webhook_info_by_pipeline["webhookUrl"]]
            for webhook_info in webhook_infos:
                self._gitcode_helper.delete_webhook(webhook_info["id"])
            self._pipeline_helper.delete_pipeline(project_id=self.env_class.project_id, pipeline_id=pipeline_id)
        # 2.delete the build job
        task_build_name = self.task_build_template["job_name"].format(self.repo)
        task_build_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_build_name)
        if task_build_info:
            self._pipeline_helper.delete_task(job_id=task_build_info["id"])
        # 3.delete the ut job
        task_test_name = self.task_ut_template["job_name"].format(self.repo)
        task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
        if task_test_info:
            self._pipeline_helper.delete_task(job_id=task_test_info["id"])
        # 4.delete the it job
        task_test_name = self.task_it_template["job_name"].format(self.repo)
        task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
        if task_test_info:
            self._pipeline_helper.delete_task(job_id=task_test_info["id"])
        # 5.delete the vc job
        task_test_name = self.task_vc_template["job_name"].format(self.repo)
        task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
        if task_test_info:
            self._pipeline_helper.delete_task(job_id=task_test_info["id"])
        # 6.delete the coverage job
        task_test_name = self.task_coverage_template["job_name"].format(self.repo)
        task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
        if task_test_info:
            self._pipeline_helper.delete_task(job_id=task_test_info["id"])
        # 7.delete the smoke job
        task_test_name = self.task_smoke_template["job_name"].format(self.repo)
        task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
        if task_test_info:
            self._pipeline_helper.delete_task(job_id=task_test_info["id"])
        # 8.delete the model job
        task_test_name = self.task_model_template["job_name"].format(self.repo)
        task_test_info = self._pipeline_helper.get_task_by_name(self.env_class.project_id, task_test_name)
        if task_test_info:
            self._pipeline_helper.delete_task(job_id=task_test_info["id"])


class GitCodeTools:
    success_comment = "Hello, your submission has been successfully executed."
    failed_comment = "Hello, the content you submitted failed to execute, please contact the [@openUBMC-robot](https://gitcode.com/openUBMC-robot)."

    def __init__(self, owner, repo, pr_number, gitcode_token, domain):
        self._gitcode_helper = GitCodeHelper(owner, repo, pr_number, gitcode_token, domain=domain)

    def get_repo_by_payload_body(self):
        repos_info = list()
        pr_files = self._gitcode_helper.get_pr_files()
        for pr in pr_files:
            if pr["filename"].startswith("ci-repos/"):
                repos_info.append(pr["filename"].split("/")[1])
        return list(set(repos_info))

    def get_need_delete_repo(self, repos):
        need_delete_repo = list()
        for repo in repos:
            path = os.path.join(BASE_DIR, "ci-repos", repo)
            if not os.path.exists(path):
                need_delete_repo.append(repo)
        return need_delete_repo

    def get_need_update_repo(self, repos, need_delete_repos):
        return list(set(repos) - set(need_delete_repos))

    def comment(self, comment):
        self._gitcode_helper.create_comment({"body": comment})


def main():
    logging.info("-" * 24 + "start to auto config ci" + "-" * 24)
    env_class = EnvClass(
        ak=os.getenv("ak"),
        sk=os.getenv("sk"),
        obs_endpoint=os.getenv("obs_endpoint"),
        obs_static_endpoint=os.getenv("obs_static_endpoint"),
        obs_bucket_name=os.getenv("obs_bucket_name"),
        majun_domain=os.getenv("majun_domain"),
        majun_token=os.getenv("majun_token"),
        majun_api_token=os.getenv("majun_api_token"),
        gitcode_domain=os.getenv("gitcode_domain"),
        gitcode_api_domain=os.getenv("gitcode_api_domain"),
        gitcode_username=os.getenv("gitcode_username"),
        gitcode_rw_token=os.getenv("gitcode_rw_token"),
        gitcode_token=os.getenv("gitcode_r_token"),
        conan_user=os.getenv("conan_r_user"),
        conan_password=os.getenv("conan_r_password"),
        project_id=os.getenv("project_id"),
        sca_appId=os.getenv("sca_appId"),
        sca_secretKey=os.getenv("sca_secretKey"),
        sca_domain=os.getenv("sca_domain"),
        image_version=os.getenv("image_version")
    )
    env_class.check_empty()

    owner, repo, branch, pr_number, url = parse_repo_info_by_webhook_payload()
    gitcode_tools = GitCodeTools(owner, repo, pr_number, env_class.gitcode_rw_token, env_class.gitcode_api_domain)

    ci_repos = gitcode_tools.get_repo_by_payload_body()
    need_delete_repo = gitcode_tools.get_need_delete_repo(ci_repos)
    need_update_repo = gitcode_tools.get_need_update_repo(ci_repos, need_delete_repo)
    logging.info("parse the ci repos:{}".format(",".join(ci_repos)))
    logging.info("parse the need_delete repos:{}".format(",".join(need_delete_repo)))
    logging.info("parse the need_update repos:{}".format(",".join(need_update_repo)))
    try:
        for ci_repo in need_update_repo:
            workflow = WorkFlow(env_class, owner, ci_repo, "")
            workflow.update()
            time.sleep(60)
        for ci_repo in need_delete_repo:
            workflow = WorkFlow(env_class, owner, ci_repo, "")
            workflow.delete()
            time.sleep(60)
        gitcode_tools.comment(gitcode_tools.success_comment)
    except Exception as e:
        logging.info("auto_config:{}".format(str(e)))
        gitcode_tools.comment(gitcode_tools.success_comment)
    logging.info("-" * 24 + "end to auto config ci" + "-" * 24)

if __name__ == '__main__':
    main()
