import json
import os

import pydantic
import yaml

from tjob.configs.cloud import CLOUD_DEFAULT_PATH
from tjob.initialize.setup_envs_cloud import (
    CloudJobParams,
    Jupyter,
    Framework,
    get_cloud_access,
    get_cloud_default_conf,
    get_cloud_job_default_params,
    get_default_namespace,
    set_cloud_login, Batch,
)
from tjob.handler.base_handler import BaseHandler
from tjob.utils.date_utils import get_curtime
from tjob.utils.io_utils import load_yaml


def parse_cloud_job_params(
    name,
    command,
    gpuType=None,
    count=None,
    gpuPerWorker=None,
    cpuPerWorker=None,
    memPerWorker=None,
    image=None,
    cloudCluster=None,
    shmSize=None,
    priority=None,
    params_file=None,
    jupyterEnabled=None,
    jupyterWorkingDir=None,
    jupyterServicePort=None,
    autoInstallJupyter=None,
    framework=None,
    isDev=None,
) -> CloudJobParams:
    default_params = get_cloud_job_default_params()
    updated_params = load_yaml(params_file) if params_file else {}
    default_params.update(updated_params)
    for k, v in {
        "gpuType": gpuType,
        "count": count,
        "gpuPerWorker": gpuPerWorker,
        "cpuPerWorker": cpuPerWorker,
        "memPerWorker": memPerWorker,
        "image": image,
        "name": name,
        "cloudCluster": cloudCluster,
        "shmSize": shmSize,
        "priority": priority,
        "framework": framework,
    }.items():
        if k not in default_params or v:
            default_params[k] = v
    # merge command instead of override
    # TODO: this is bug prone, refactor it!
    if command:
        if default_params.get("command"):
            default_params["command"] = command + " && " + default_params["command"]
        else:
            default_params["command"] = command
    if not default_params.get("name"):
        default_params["name"] = name
    default_conf = get_cloud_default_conf()
    if not default_params.get("gpuType") or default_params["gpuType"] == "none":
        if not (
            default_params.get("cpuPerWorker") and default_params.get("memPerWorker")
        ):
            raise ValueError(
                "cpuPerWorker and memPerWorker must be set when gpuType is not set"
            )
    else:
        if default_params["gpuType"] not in default_conf["gpu"]:
            raise ValueError(
                f"gpuType {default_params['gpuType']}"
                + f"not in {default_conf['gpu'].keys()}"
            )
        if not default_params.get("gpuPerWorker"):
            raise ValueError("gpuPerWorker must be set not zero when gpuType is set")
        if not default_params.get("cpuPerWorker"):
            default_params["cpuPerWorker"] = int(default_conf["gpu"][
                default_params["gpuType"]
            ]["cpu"]) * int(default_params.get("gpuPerWorker"))
        if not default_params.get("memPerWorker"):
            default_params["memPerWorker"] = int(default_conf["gpu"][
                default_params["gpuType"]
            ]["memory"]) * int(default_params.get("gpuPerWorker"))
    if not default_params.get("cloudCluster"):
        default_params["cloudCluster"] = default_conf["cloudCluster"]
    if not default_params.get("shmSize"):
        default_params["shmSize"] = default_params["memPerWorker"]

    # jupyter args
    if jupyterEnabled:
        # users can only setup 1 pod if jupyter is enabled
        default_params["count"]=1
        jupyter = Jupyter(
            enabled=jupyterEnabled,
            workingDir=jupyterWorkingDir if jupyterWorkingDir is not None else "/",
            port=jupyterServicePort if jupyterServicePort is not None else 8888,
            autoInstall=autoInstallJupyter if autoInstallJupyter is not None else False,
            host="localhost",
        )
        default_params["jupyter"] = jupyter.dict()

    # set jupyter host
    if default_params.get("jupyter"):
        config = get_cloud_default_conf()
        host = config.get("clusterConfig", {}).get(default_params["cloudCluster"], {}).get("labHost")
        if not host:
            raise ValueError(
                "lab host is none, you need to set labHost in cloud.config.yaml first"
            )
        default_params["jupyter"]['host'] = host

    if framework is not None:
        default_params["framework"]=framework
    elif not default_params.get("framework"):
        default_params["framework"] = Framework()

    if isDev == "true":
        default_params["isDevJob"] = "enabled"
    else:
        default_params["isDevJob"] = "disabled"

    return CloudJobParams.model_validate(default_params)


class CloudHandler(BaseHandler):
    def __init__(
        self,
    ) -> None:
        # self.cleo_command = cleo_command

        super().__init__()

    def submit(self, cj_params: CloudJobParams):
        """submit job"""
        # avoid PAI not clear GPU mem
        """
        send request to  cloud
        """
        access = get_cloud_access()
        import requests
        import re

        pattern = r'^[a-z0-9]([-a-z0-9]{0,61}[a-z0-9])?$'
        # validate name
        if not re.match(pattern, cj_params.name):
            raise ValueError(
                f"invalid name: {cj_params.name}, name's length must not exceeds 63, "
                f"and must consist of lower case alphanumeric characters, numbers or '-', "
                "and must start and end with alphanumeric character or number."  # noqa
            )

        url = (
            access["url"]
            + f"/apis/clusters/{cj_params.cloudCluster}/batch.volcano.sh"
            + f"/v1alpha1/namespaces/{get_default_namespace(access)}/jobs"
        )
        token = access["token"]
        if not token:
            token = set_cloud_login(access["username"], access["password"])
        headers = {"Authorization": f"Bearer {token}"}

        cj_dict = cj_params.to_cloud_job()

        res = requests.post(url, json=cj_dict, headers=headers)
        if not res.ok:
            if res.status_code == 409 and get_cloud_default_conf().get(
                "randomWhenConflict", False
            ):
                warn_to_user = (
                    f'{ cj_dict["metadata"]["name"] }'
                    + "exists in cloud. tjob create a new name with timestamp."
                )
                cj_params.name = cj_params.name + "-" + str(get_curtime("-"))
                cj_dict["metadata"]["name"] = cj_params.name
                print(warn_to_user)
                print("New name submit", cj_dict["metadata"]["name"])
                res = requests.post(url, json=cj_dict, headers=headers)
                if not res.ok:
                    raise ValueError(
                        f"submit job failed: {res.status_code},  {res.url},  {res.text}, {cj_dict}"  # noqa
                    )
            else:
                raise ValueError(
                    "submit job failed: "
                    + f"{res.status_code},  {res.url},  {res.text}, {cj_dict}"
                )
        return cj_params

    def show(self,
             cluster: str,
             name: str):
        """show job"""
        """
        send show job request to cloud
        """
        access = get_cloud_access()
        import requests

        namespace=get_default_namespace(access)
        url = (
            access["url"]
            + f"/apis/clusters/{cluster}/batch.volcano.sh"
            + f"/v1alpha1/namespaces/{namespace}/jobs/{name}"
        )
        token = access["token"]
        if not token:
            token = set_cloud_login(access["username"], access["password"])
        headers = {"Authorization": f"Bearer {token}"}

        res = requests.get(url, headers=headers)
        if not res.ok:
            raise ValueError(
                f"query job failed: {res.status_code},  {res.url},  {res.text}, {namespace}, {name}"  # noqa
            )
        return res.json()

    @staticmethod
    def pipeline_submit(pipeline_yaml: str, name: str = None):
        pass

    @staticmethod
    def resubmit(*args, **kwargs):
        msg = "resubmit is not supported in cloud platform"
        raise NotImplementedError(msg)

    @staticmethod
    def show_params():
        pass

    @staticmethod
    def stop(name, cloud_cluster):
        access = get_cloud_access()
        import requests

        namespace = get_default_namespace(access)
        token = access["token"]
        if not token:
            token = set_cloud_login(access["username"], access["password"])
        headers = {"Authorization": f"Bearer {token}"}
        url = (
            access["url"]
            + f"/apis/clusters/{cloud_cluster}/batch.volcano.sh"
            + f"/v1alpha1/namespaces/{get_default_namespace(access)}/jobs/{name}"
        )
        res = requests.get(url, headers=headers)
        if not res.ok:
            raise ValueError(
                f"get job {name} failed: {res.status_code},  {res.url},  {res.text},"
            )
        job_dict = res.json()
        stop_command = f"""
apiVersion: bus.volcano.sh/v1alpha1
kind: Command
metadata:
  generateName: { name }-abortjob-
  namespace: { namespace }
action: AbortJob
target:
    apiVersion: batch.volcano.sh/v1alpha1
    kind: Job
    name: { name }
    uid: {job_dict["metadata"]["uid"]}
"""
        cmd_dict = yaml.safe_load(stop_command)

        url = (
            access["url"]
            + f"/apis/clusters/{cloud_cluster}/bus.volcano.sh"
            + f"/v1alpha1/namespaces/{get_default_namespace(access)}/commands"
        )

        res = requests.post(url, json=cmd_dict, headers=headers)
        if not res.ok:
            raise ValueError(
                "stop job failed: "
                + f"{res.status_code},  {res.url},  {res.text}, {cmd_dict}"
            )
        return


if __name__ == "__main__":
    set_cloud_login("ks-test-user", "ks-test-user")
    cj_dict = load_yaml(os.path.join(CLOUD_DEFAULT_PATH, "test.job.params.yaml"))
    # conf = get_cloud_access()
    ch = CloudHandler()
    cj = parse_cloud_job_params(
        name="leili-test12",
        command="sleep infinity",
        params_file=os.path.join(CLOUD_DEFAULT_PATH, "test.job.params.yaml"),
    )
    ch.submit(cj)
    # time.sleep(10)
    ch.stop("leili-test12", cj_dict["cloudCluster"])
