from tjob.initialize.setup_envs import (
    DLC,
    _DLC_PARAMS,
    update_dlc_params,
    get_default_dlc_file,
    _clean_gpu,
    set_clear_gpu,
)
from tjob.utils import (
    dump_dlc_params,
    # read_exec_files,
    show_file,
    get_curtime,
    CommonParser,
    JobInfo,
)
from tjob.exec.pai_client import do_request
from .base_handler import BaseHandler
import os
import json
import math
import subprocess
from prettytable import PrettyTable


class PAIHandler(BaseHandler):
    def __init__(
        self,
        cluster,
        **kwargs,
    ) -> None:
        super().__init__()
        self._update_kwargs = kwargs
        dlc_params = update_dlc_params(get_default_dlc_file(cluster), **kwargs)
        # use for pyfile update kwargs
        self.dlc_params = dlc_params
        # id required after submit
        self._submit_info = JobInfo(
            dlc_params.get("name"),
            "pai",
            get_curtime(),
        )

    @classmethod
    def params_submit(cls, dlc_params) -> int:
        dump_dlc_params(dlc_params, _DLC_PARAMS)
        # priority has some issues in --params_file, here not convert
        create_cmd = f"{DLC} create job --params_file {_DLC_PARAMS}"
        ret = subprocess.Popen(
            create_cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE
        )
        return ret

    def submit(self, command: str):
        """submit job"""
        # avoid PAI not clear GPU mem
        if not os.path.exists(_clean_gpu):
            set_clear_gpu()

        command += " && " + CommonParser(_clean_gpu).command
        self.dlc_params["command"] = command
        # priority has some issues in --params_file, so sepearte it
        priority = self.dlc_params.pop("priority")
        self.dlc_params.pop("machine", None)  # params_file dont need `machine` args
        dump_dlc_params(self.dlc_params, _DLC_PARAMS)
        create_cmd = (
            f"{DLC} create job --params_file {_DLC_PARAMS} --priority {priority}"
        )
        ret = subprocess.Popen(
            create_cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE
        )
        return ret

    @staticmethod
    def pipeline_submit(pipeline_yaml: str, name: str = None):
        """submit pipeline"""
        name = name if name is not None else "pipeline-" + get_curtime("-")
        create_cmd = (
            f"{DLC} create  pipeline-run --pipeline_file {pipeline_yaml} --name {name}"
        )
        ret = subprocess.Popen(
            create_cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE
        )
        return ret

    @staticmethod
    def resubmit(name=None):
        create_cmd = f"{DLC} create job --params_file {_DLC_PARAMS}"
        if name:
            create_cmd += f" --name {name}"
        ret = subprocess.Popen(
            create_cmd, shell=True, stdin=subprocess.PIPE, stdout=subprocess.PIPE
        )
        return ret

    @staticmethod
    def show_params():
        show_file(_DLC_PARAMS)

    @staticmethod
    def stop(id, force=True):
        # `force` not expose to user
        stop_cmd = f"{DLC} stop job {id}"
        if force:
            stop_cmd += " -f"
        stop_cmd = stop_cmd.split(" ")
        ret = subprocess.call(stop_cmd)
        return ret

    @staticmethod
    def remove(id):
        # `force` not expose to user
        stop_cmd = f"{DLC} delete job --job_id {id}"
        stop_cmd = stop_cmd.split(" ")
        ret = subprocess.call(stop_cmd)
        return ret

    @staticmethod
    def show_node_status(workspace_name="a30", status="Ready", table_length=16):
        api_path = "/api/v1/data/nodeInfos"
        api_method = "GET"
        resp = do_request(
            api_product="dlc",
            api_method=api_method,
            api_path=api_path,
        )
        if status:
            assert status in ["Ready", "NotReady", "SchedulingDisabled"]
        try:
            resp_json = json.loads(resp.content)
        except Exception as ex:
            print(f"Failed to get node info: {ex}")

        table = PrettyTable()
        valid_node_cnt = 0
        table.field_names = ["nodeName", "status", "userCount", "userNames"]

        node_user_info = {}
        for item in resp_json["ClusterNodeInfos"]["items"]:
            if workspace_name == "a30" and item["nodeName"][:6] != "eflops":
                continue
            if workspace_name == "a100" and item["nodeName"][:4] != "a100":
                continue
            if status is not None and item["status"].strip() != status:
                continue
            if item["userCount"] == 0:
                table.add_row(
                    [
                        item["nodeName"],
                        item["status"],
                        item["userCount"],
                        item["userNames"],
                    ]
                )
                valid_node_cnt += 1
            else:
                node_user_info[item["nodeName"]] = item["userNames"]

        print(
            f"\n      {workspace_name.upper()} Get {valid_node_cnt} Nodes Available [0 User] "  # noqa
        )
        print(table)

        user_node_info = {}
        for node in node_user_info.keys():
            user_list = node_user_info[node]
            for user_name in user_list:
                if user_name not in user_node_info:
                    user_node_info[user_name] = []
                if "eflops" in node:
                    node = node.split("eflops")[-1]
                user_node_info[user_name].append(node)

        table_user_node = PrettyTable()
        use_node_tag = "useNodes"
        if workspace_name == "a30":
            use_node_tag += "[eflops]"
        table_user_node.field_names = ["userName", "useNodesCount", use_node_tag]

        sorted_user_node_info = dict(
            sorted(user_node_info.items(), key=lambda x: len(x[1]), reverse=True)
        )
        for user_name in sorted_user_node_info:
            if workspace_name == "a30":
                node_list = sorted([int(i) for i in user_node_info[user_name]])
            else:
                node_list = user_node_info[user_name]

            if len(node_list) > table_length:
                line_cnt = math.ceil(len(node_list) / table_length)
                for line_idx in range(line_cnt):
                    start_idx = line_idx * table_length
                    end_idx = (line_idx + 1) * table_length
                    if end_idx > len(node_list):
                        end_idx = len(node_list)
                    if line_idx == 0:
                        node_infos = str(node_list[start_idx:end_idx])[:-1] + ","
                        table_user_node.add_row([user_name, len(node_list), node_infos])
                    elif line_idx == line_cnt - 1:
                        node_infos = " " + str(node_list[start_idx:end_idx])[1:]
                        table_user_node.add_row(["", "", node_infos])
                    else:
                        node_infos = " " + str(node_list[start_idx:end_idx])[1:-1] + ","
                        table_user_node.add_row(["", "", node_infos])
            else:
                table_user_node.add_row([user_name, len(node_list), node_list])

        table_user_node.align = "l"
        print("\n")
        print(f"{' '*35} {workspace_name.upper()} User-Node Info [DLC & DSW] ")
        print(table_user_node)

    # def _safe_override_dlc_params(self, kwargs):
    #     # cmd > pyfile > dlc.params
    #     for k, v in self._update_kwargs.items():
    #         if k in kwargs and v is None:
    #             self.dlc_params[k] = kwargs[k]
