from .parse_file import CommonParser, dump_dlc_params
from tjob.initialize.setup_envs import (
    _CACHE,
    get_default_dlc_file,
    map_dlcParams2opt,
    update_dlc_params,
)
import os.path as osp
from tjob.utils import save_yaml
from typing import List
import copy
import warnings


def gen_pipeline(job_list: List[CommonParser], deps: list = None) -> str:
    pipe_data = {
        "pipeline": {},
    }
    num_jobs = len(job_list)
    for i in range(num_jobs):
        dlc_kw = copy.deepcopy(job_list[i].dlc_kwargs)
        for k1, k2 in map_dlcParams2opt().items():
            v = dlc_kw.pop(k2, None)
            if v:
                dlc_kw[k1] = v
        cluster = dlc_kw.pop("cluster", "a30")
        job_dlc_params = update_dlc_params(get_default_dlc_file(cluster), **dlc_kw)
        # for k1, k2 in map_dlcParams2opt().items():
        #     v = job_dlc_params.pop(k2, None)
        #     if v:
        #         job_dlc_params[k1] = v

        # clear un-relative args, otherwise will FAILED
        job_dlc_params.pop("priority", None)
        job_dlc_params.pop("monitor", None)
        job_dlc_params.pop("cluster", None)
        job_dlc_params.pop("machine", None)

        if "data_sources" in job_dlc_params:
            warnings.warn(
                "[WRAN] tjob submit pipeline CANNOT read `data_sources` for now. "
                + "This may cause job FAILED."
            )
            job_dlc_params.pop("data_sources")
        job_command = job_list[i].command
        dump_job_dlc_file = osp.join(_CACHE, f"pipeline.job{i+1}.yaml")
        job_dlc_params["command"] = job_command
        dump_dlc_params(job_dlc_params, dump_job_dlc_file)
        pipe_data["pipeline"][f"job{i+1}"] = {
            "args_file": dump_job_dlc_file,
        }

    if deps:
        if len(deps) != len(job_list):
            _msg = "[FATAL] The Length of `orders` shoud be same as `jobs`."
            +f"But get {len(deps)} and {len(job_list)}."
            raise ValueError(_msg)

        for i, dep in enumerate(deps):
            if len(dep) > 0:
                # add require for job
                reuqire_job = [f"job{j+1}" for j in dep]
                pipe_data["pipeline"][f"job{i+1}"].update({"requires": reuqire_job})
    pipe_yaml = osp.join(_CACHE, "pipeline.submit.yaml")
    save_yaml(pipe_yaml, pipe_data)
    return pipe_yaml
