#!/usr/bin/python3
# ******************************************************************************
# Copyright (c) Huawei Technologies Co., Ltd. 2020-2020. All rights reserved.
# licensed under the Mulan PSL v2.
# You can use this software according to the terms and conditions of the Mulan PSL v2.
# You may obtain a copy of Mulan PSL v2 at:
#     http://license.coscl.org.cn/MulanPSL2
# THIS SOFTWARE IS PROVIDED ON AN "AS IS" BASIS, WITHOUT WARRANTIES OF ANY KIND, EITHER EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO NON-INFRINGEMENT, MERCHANTABILITY OR FIT FOR A PARTICULAR
# PURPOSE.
# See the Mulan PSL v2 for more details.
# ******************************************************************************/
"""
Description: conver multiple Jenkins restful APIs to python methods
Class:
"""
import gevent
from gevent import monkey

monkey.patch_all()

import jenkins
from functools import wraps
from collections import defaultdict
from javcra.common import constant
from requests.exceptions import RequestException
from javcra.libs.log import logger
import math
import time
import xml.etree.ElementTree as ET


def catch_jenkins_error(func):
    """
    Exception capture decorator
    """

    @wraps(func)
    def inner(*args, **kwargs):
        """
        capture decorator
        """
        try:
            return func(*args, **kwargs)
        except (jenkins.JenkinsException, RequestException, ValueError) as error:
            logger.error(error)

    return inner


class JenkinsJob:

    def __init__(self, base_url, jenkins_user, jenkins_passwd, paral_num, branch, update_time):
        self.server = jenkins.Jenkins(base_url, username=jenkins_user, password=jenkins_passwd)
        self.paral_job_num = paral_num
        self.paral_job_date = None
        self.branch = branch
        self.release_date = update_time

    def get_base_path(self):
        """
        get base path of parallel jenkins jobs
        Returns:
            base_path
        """
        path_prefix = constant.JENKINS_PATH_PREFIX
        if not path_prefix:
            raise ValueError("failed to get jenkins path prefix")

        base_path = path_prefix + "/update_" + self.paral_job_date
        return base_path

    def create_folder(self):
        """
        create folder for jenkins job
        """
        prefix = constant.JENKINS_PATH_PREFIX
        if not prefix:
            raise ValueError("failed to get jenkins path prefix")

        base_path = self.get_base_path()
        folder_list = [prefix, base_path, base_path + "/aarch64", base_path + "/x86_64"]
        for folder_name in folder_list:
            # If the folder name already exists, it will not be created repeatedly
            self.server.create_folder(folder_name, ignore_failures=True)

    def generate_parallel_job_name(self):
        """
        generate the parallel jenkins job name for self-build
        Returns:
            parallel_job_dict: like {"aarch64": [...], "x86": [...]}
        """
        parallel_job_dict = {"aarch64": [], "x86": []}
        base_path = self.get_base_path()
        for index in range(1, self.paral_job_num + 1):
            parallel_job_dict["aarch64"].append(base_path + "/aarch64/" + self.branch + "_" + str(index))
            parallel_job_dict["x86"].append(base_path + "/x86_64/" + self.branch + "_" + str(index))
        return parallel_job_dict

    @staticmethod
    def get_job_pkg_dict(packages, jobs):
        """
        get the dict that which jenkins job process which packages
        Args:
            packages: parallel pkgs
            jobs: parallel jobs

        Returns:
            job_pkg_dict
        """
        job_num = len(jobs)
        job_pkg_dict = defaultdict(list)

        for idx, pkg in enumerate(packages):
            job_name = jobs[idx % job_num]
            job_pkg_dict[job_name].append(pkg)
        return job_pkg_dict

    def get_jobs_to_create(self, template_job):
        # 根据输入的模板获取需要新建的jobs,返回值为list类型
        """
        get the jobs to create according to the template_job
        Args:
            template_job: template job name

        Returns:
            jobs to create, list
        """
        base_path = self.get_base_path()
        paral_job_dict = self.generate_parallel_job_name()

        if "trigger" in template_job:
            return [base_path + "/trigger"]
        elif "aarch64" in template_job:
            return paral_job_dict["aarch64"]
        elif "x86" in template_job:
            return paral_job_dict["x86"]
        else:
            raise ValueError("wrong template job name, failed to create new jenkins job.")

    def create_new_job(self, job_name, job_config):
        """
        create new jenkins job

        Args:
            job_name: jenkins job name
            job_config: xml config

        Returns:
            True or False
        """
        try:
            self.server.create_job(job_name, job_config)
            return True
        except jenkins.JenkinsException as err:
            logger.error("failed to create %s. %s", (job_name, err))
            return False

    def create_selfbuild_jenkins_jobs(self, packages):
        """
        Create trigger, aarch64, x86 jobs respectively
        Returns:

        """
        self.paral_job_date = time.strftime("%Y%m%d%H%M%S", time.localtime())
        self.create_folder()

        # 创建trigger
        template_trigger = constant.TRIGGER_TM_JOB
        if template_trigger:
            target_jobs = self.get_jobs_to_create(template_trigger)
            self.create_multi_job(template_trigger, target_jobs, packages)
        else:
            logger.error("error occurred when getting template trigger job name.")
            return False

        template_jobs = [constant.AARCH64_TM_JOB, constant.X86_TM_JOB]
        for tm_job in template_jobs:
            # 创建aarch64并行任务
            if tm_job:
                target_jobs = self.get_jobs_to_create(tm_job)
                self.create_multi_job(tm_job, target_jobs, packages)
            else:
                logger.error("error in getting template job name for creating parallel jobs.")
                return False
        return True

    def build_jenkins_job(self, job_name, params=None, retry=3):
        """
        build the jenkins job
        Args:
            job_name: name of job
            params: params to build the jenkins job
            retry: default to 3

        Returns:
            queue_item: a queue item number

        """
        try:
            queue_item = self.server.build_job(job_name, params)

            # If trigger the build task fails, retry
            count = 0
            while not queue_item and count < retry:
                queue_item = self.server.build_job(job_name, params)
                count += 1
                logger.error("failed to build %s, retrying %s." % (job_name, count))
            return queue_item

        except RequestException:
            logger.error("Bad Request for url, please check the job name and parameters for trigger building.")
            return None

    @staticmethod
    def update_trigger_config(root, parallel_jobs):
        """
        update the config of trigger job
        Args:
            root:
            parallel_jobs:

        Returns:
            root

        """
        # 修改trigger的配置
        ele_cmd = root.find("definition/script")
        if ele_cmd is not None:
            script = ele_cmd.text

            script_lines = script.splitlines()
            # 找出开始的index 和结束的index
            start_index = None
            end_index = None

            for idx, line in enumerate(script_lines):
                if line.strip() == "parallel(":
                    start_index = idx
                if line.strip() == ")":
                    end_index = idx
                if start_index and end_index:
                    break

            new_script_lines = []
            new_script_lines.extend(script_lines[:start_index + 1])

            job_num = len(parallel_jobs)
            for idx, job in enumerate(parallel_jobs):
                job_str = "\'self_build_" + str(idx) + "\': { build job: \'" + job + "\', propagate: false}"
                if idx != job_num - 1:
                    job_str = job_str + ","
                new_script_lines.append(job_str)

            new_script_lines.extend(script_lines[end_index:])
            new_script = "\n".join(new_script_lines)
            ele_cmd.text = new_script
            return root
        else:
            logger.error("failed to get script of trigger job.")
            return None

    def update_paral_job_config(self, root, pkg_job_dict, target_job):
        """
        update the config of parallel job
        Args:
            root:
            pkg_job_dict:
            target_job:

        Returns:
            root
        """
        # 修改具体并行任务的配置
        ele_cmd = root.find("properties/hudson.model.ParametersDefinitionProperty//parameterDefinitions")
        if ele_cmd is not None and pkg_job_dict.get(target_job):
            for node in ele_cmd:
                verify_pkgs = ",".join(pkg_job_dict.get(target_job))
                key = node.find('name').text
                if key == "PKG_NAME":
                    node.find('defaultValue').text = verify_pkgs
                elif key == "UPDATE_TIME":
                    node.find('defaultValue').text = self.release_date
                elif key == "BRANCH":
                    node.find('defaultValue').text = self.branch
            return root
        else:
            logger.error("failed to get parameter definition of parallel job.")
            return None

    def update_config(self, target_job, template_job_config, packages):

        # 获取trigger需要触发的并行任务
        paral_job_dict = self.generate_parallel_job_name()
        paral_jobs = []
        paral_jobs.extend(paral_job_dict["aarch64"])
        paral_jobs.extend(paral_job_dict["x86"])

        # 获取每个任务对应的软件包
        job_pkg_dict = dict()
        if "aarch64" in target_job:
            job_pkg_dict = self.get_job_pkg_dict(packages, paral_job_dict["aarch64"])
        elif "x86" in target_job:
            job_pkg_dict = self.get_job_pkg_dict(packages, paral_job_dict["x86"])

        root = ET.fromstring(template_job_config.encode("utf-8"))
        if root:
            # 修改trigger的配置
            if "trigger" in target_job:
                root = self.update_trigger_config(root, paral_jobs)
            else:
                # 修改具体并行任务的配置
                root = self.update_paral_job_config(root, job_pkg_dict, target_job)
            return ET.tostring(root).decode('utf-8')

        return None

    @catch_jenkins_error
    def get_job_result_status(self, job_name, job_id):
        '''
        role:获取job名为job_name的job的某次构建的执行结果状态
        SUCCESS : job执行成功
        FAILURE ：job执行失败
        ABORTED ：人为结束构建
        None : 正在构建中
        '''
        while True:
            res = self.server.get_build_info(job_name, job_id)['result']
            time.sleep(5)
            if res:
                break
        print("%s %s任务构建完成" % (job_name, job_id))
        return res

    def build_specific_job(self, job_name, params=None):
        """
        编译指定名字的job, 并返回其last_job_num
        Args:
            params:
            job_name:

        Returns:

        """
        if params:
            queue_item = self.build_jenkins_job(job_name, params)
        else:
            queue_item = self.build_jenkins_job(job_name)

        if queue_item:
            logger.info("successfully build %s, please waiting the jenkins job result..." % job_name)
        else:
            logger.error("unable to build specific job %s." % job_name)
            return None

        # The returned dict will have a "why" key if the queued item is still waiting for an executor.
        while True:
            queue_item_resp = self.server.get_queue_item(queue_item)
            if not queue_item_resp.get("why"):
                break

        # when the queue is over, the build id can be obtained
        last_job_num = self.server.get_job_info(job_name)['lastBuild']['number']
        return last_job_num

    @catch_jenkins_error
    def get_specific_job_status(self, params, job_name):
        """

        Args:
            params:
            job_name:

        Returns:

        """
        build_id = self.build_specific_job(job_name, params)

        if build_id:
            job_status = self.get_job_result_status(job_name, build_id)

            job_status_dict = {
                "name": job_name,
                "status": job_status,
                "output": self.get_job_output_url(job_name, build_id)
            }
            return [job_status_dict]

        return []

    def get_selfbuild_job_status(self):
        """
        # 获取自编译并行任务的结果状态
        Returns:

        """
        target_trigger_job = self.get_jobs_to_create(constant.TRIGGER_TM_JOB)[0]
        build_id = self.build_specific_job(target_trigger_job)
        trigger_status = self.get_job_result_status(target_trigger_job, build_id)
        print("trigger job result status", trigger_status)

        # output example:
        # Starting building: function-item » release-manager » openeuler-202106281604 » aarch64 » 2-11 #14
        output = self.server.get_build_console_output(target_trigger_job, build_id)
        new_output = output.replace(" » ", "/").splitlines()

        job_name_id_dict = dict()
        for line in new_output:
            if "Starting building" in line:
                line_info = line.split()
                job_name = line_info[2]
                build_id = line_info[3].strip("#")
                job_name_id_dict[job_name] = build_id
        print(job_name_id_dict)

        # 根据任务名和id去轮询是否执行完成, 不断的轮询并行job的状态
        job_status_list = []
        for job_name, build_id in job_name_id_dict.items():
            job_name_status_dict = {
                "name": job_name,
                "status": self.get_job_result_status(job_name, int(build_id)),
                "output": self.get_job_output_url(job_name, build_id)
            }
            job_status_list.append(job_name_status_dict)
        return job_status_list

    def create_multi_job(self, template_job, jobs, packages, concurrency=75, retry=3):
        """
        启动
        :param jobs: 任务列表
        :param concurrency: 并发量
        :param retry: 尝试次数
        :param interval: 每次batch请求后sleep时间（秒），
        :return:
        """

        def create_multi_job_once(target_jobs, packages):
            """
            create new job once for retry
            """
            batch = math.ceil(len(target_jobs) / concurrency)
            _failed_jobs = []
            for idx in range(batch):
                works = [gevent.spawn(self.dispatch, job, template_job, packages)
                         for job in target_jobs[idx * concurrency: (idx + 1) * concurrency]]
                logger.info("%s works, %s/%s " % (len(works), idx + 1, batch))
                gevent.joinall(works)
                for work in works:
                    if work.value["result"]:
                        logger.info("job %s ... ok" % (work.value["job"]))
                    else:
                        _failed_jobs.append(work.value["job"])
                        logger.error("job %s  ... failed" % (work.value["job"]))
            return _failed_jobs

        failed_jobs = create_multi_job_once(jobs, packages)

        for index in range(retry):
            if not failed_jobs:
                break
            logger.info("%s jobs failed, retrying %s/%s" % (len(failed_jobs), index + 1, retry))
            failed_jobs = create_multi_job_once(failed_jobs, packages)

        if failed_jobs:
            logger.warning("%s failed jobs" % (len(failed_jobs)))
            logger.warning("%s %s" % (",".join(failed_jobs[:100]), "..." if len(failed_jobs) > 100 else ""))

    def dispatch(self, job, template_job, packages):
        """
        分发任务
        :param template_job: template job
        :param job: 目标任务
        :return:
        """
        temp_job_config = self.server.get_job_config(template_job)
        updated_config = self.update_config(job, temp_job_config, packages)
        create_result = self.create_new_job(job, updated_config)
        return {"job": job, "result": create_result}

    def get_job_output_url(self, job_name, build_id):
        """

        Args:
            job_name:
            build_id:

        Returns:
            hyperlinks of jenkins job output
        """
        job_build_url = self.server.build_job_url(job_name)
        output_url = None
        if job_build_url:
            base_job_url = job_build_url.rsplit('/', 1)[0]
            # convert links into hyperlinks
            output_url = "[#" + str(build_id) + "](" + base_job_url + "/" + str(build_id) + "/console)"
        return output_url

    def delete_jenkins_job(self, job_name):
        """

        Args:
            job_name:

        Returns:

        """
        job_exists = self.server.job_exists(job_name)
        if job_exists:
            self.server.delete_job(job_name)


if __name__ == '__main__':
    # 创建实例
    paral_num = 2
    build_pkgs = ["bluez", "edk2"]
    install_build_pkgs = ["bluez", "edk2", "httpd", "jetty", "libvma"]

    branch = "openEuler-20.03-LTS-SP1"
    update_time = "20210630"
    server = JenkinsJob('https://jenkins.openeuler.org', 'tuShenmei', '062725tu_TU', paral_num,
                        branch, update_time)

    server.delete_jenkins_job(constant.JENKINS_PATH_PREFIX)
    server.create_selfbuild_jenkins_jobs(build_pkgs)
    status_res = server.get_selfbuild_job_status()
    print(status_res)

    """
    1、自编译并行跑通
    2、把并行生成的结果传到华为云
    3、解析安装自编译的结果 --》结果字典
    4、创建issue
    5、install_build add 

    """
    # 1、自编译并行跑通
    #
#
#
#     _repo = "pkgship"
#     issue_num = "I3ZMLH"
#
#     s = IssueOperation(_repo, constant.GITEE_TOKEN, issue_num)
#     comment_res = s.create_jenkins_comment(status_res)
#     print(comment_res)
