#!/usr/bin/env python3
from copy import deepcopy
import hashlib
import importlib
import importlib.util
from itertools import product
import json
import os
import re
from subprocess import check_call, check_output
import sys
from typing import List
import yaml
from loguru import logger
import pathlib
from .api import ServerHelper, TaskSpec
from pprint import pprint


def process_template(task, env):
    """
    Process the task
    1. check if _ exists, use template and update the task
    2. update the task with the environment variables
    3. leave only the _task, handle all the other keys
    parameters:
        task: dict, the task to be decoupled
        env: dict, all variables defined in the task session
    returns:
        list of tasks specs
    """
    if isinstance(task, list):
        return [process_template(t, env) for t in task]
    elif isinstance(task, dict):
        pass
    else:
        return task
    new_task = {}
    if "_" in task:
        tt = task.pop("_")
        if isinstance(tt, str):
            tt = [tt]
        for t in tt:
            new_task.update(deepcopy(env[t]))
        new_task = process_template(new_task, env)
    else:
        new_task = task
    # do template first, then replace
    for k in list(new_task.keys()):
        if isinstance(new_task[k], dict) and "_" in new_task[k]:
            new_task[k] = process_template(new_task.pop(k), env)
            # del new_task[k]
    for k, v in task.items():
        root = new_task
        ks = k.split(".")
        for kk in ks[:-1]:
            if kk not in root or not isinstance(root[kk], dict):
                root[kk] = {}
            root = root[kk]
            assert isinstance(root, dict), f"The key {k} is not a dict"
        root[ks[-1]] = v
    return new_task


@logger.catch(reraise=True)
def process_replacement(task, env):
    """
    Process the task with variables in the task.
    Replace the variables with the values in the env.
    In this way, the template can be used with different value combinations.
    """
    pattern = r"\{[^{}]+\}"

    def _replace(s, env):
        return re.sub(pattern, lambda x: str(env[x.group(0)[1:-1]]), s)

    if isinstance(task, list):
        return [process_replacement(t, env) for t in task]
    if isinstance(task, str):
        return _replace(task, env)
    if isinstance(task, dict):
        next_env = env.copy()
        for k, v in task.items():
            if k.startswith("_"):
                next_env[k] = v
        new_task = {}
        for k, v in task.items():
            if k.startswith("_") and not k.startswith("__"):
                continue
            # check replace any pattern in the string
            k = _replace(k, next_env)
            new_task[k] = process_replacement(v, next_env)
        return new_task
    return task


class AddPythonPath:
    def __init__(self, path):
        assert isinstance(path, str)
        assert os.path.exists(path)
        self.path = path

    def __enter__(self):
        # push to the front of the path rather than the back
        sys.path.insert(0, self.path)

    def __exit__(self, exc_type, exc_value, traceback):
        # remove from the front of the path rather than the back
        sys.path.remove(self.path)


def get_commit_hash(path):
    assert os.path.exists(os.path.join(path, ".git")), "Not a git repository"
    return check_output("git rev-parse HEAD".split(), text=True, cwd=path).strip()


def get_changed_files(target_dir):
    assert os.path.exists(os.path.join(target_dir, ".git")), "Not a git repository"
    changed_files = check_output(
        ("git status -s --ignored --no-renames".split(" ")),
        text=True,
        cwd=target_dir,
    ).split("\n")
    changed_files = [f[3:] for f in changed_files if f]
    return changed_files


def _get_fetch_url(path):
    assert os.path.exists(os.path.join(path, ".git")), "Not a git repository"
    out = check_output("git remote show origin -n".split(), text=True, cwd=path)
    for i in out.splitlines():
        res = re.findall(r"Push {2}URL: (.*)", i)
        if len(res) > 0:
            return res[0]
    return None


@logger.catch(reraise=True)
def process_task(task_spec, root: pathlib.Path) -> List[TaskSpec]:
    """
    Generate tasks specs for running.
    __task: path of the task to be run
    __name: Optional name of the task, stored in the DB for retrieval
    __depends: Optional list of files that the task depends on
    """
    hash_to_tasks: dict[str, dict] = {}
    all_dependencys: set[str] = set()
    assert root is not None, "Root directory is not specified"

    # parent keys are not used when calculating the task hash of child tasks
    parent_keys = {
        # child will generate multiple files, this filed specify which file to be used in parent task
        "__task_file",
        # child will generate multiple outputs, this filed specify which output to be used in parent task
        "__task_output",  # TODO to be implemented
    }

    def _update_hash(task_cfg, hash_to_tasks):
        raw_task_cfg = {}
        for k, v in task_cfg.items():
            if k.startswith("_") and not k.startswith("__"):
                continue
            if isinstance(v, dict) and ("__task" in v):
                parent_key_ = {}
                for kk in parent_keys:
                    if kk in v:
                        parent_key_[kk] = v.pop(kk)
                v = dict(__task_hash=_update_hash(v, hash_to_tasks))
                v.update(parent_key_)
            raw_task_cfg[k] = v
        hash_obj = hashlib.md5()
        hash_obj.update(json.dumps(raw_task_cfg, sort_keys=True).encode())
        with AddPythonPath(str(root)):
            task = task_cfg["__task"]
            mod_path, _ = task.rsplit(".", 1)
            spec = importlib.util.find_spec(mod_path)
            if spec is None:
                logger.error(f"The task {task} is not found")
                raise FileNotFoundError(f"The task {task_cfg['__task']} is not found")
            if not spec.origin.startswith(str(root)):
                logger.warning(
                    f"The task {task} is not in the root directory. skipping"
                )
                logger.info(f"{spec.origin} not in {root}")
            else:
                hash_obj.update(open(spec.origin, "rb").read())
                rel_path = os.path.relpath(spec.origin, str(root))
                rel_path = pathlib.Path(rel_path).as_posix()
                all_dependencys.add(rel_path)
        # hash dependency files
        for dep in task_cfg.get("__depends", []):
            dep_path = root / dep
            assert dep_path.exists(), f"The dependency {dep} is not found"
            hash_obj.update(open(dep_path, "rb").read())
            rel_path = os.path.relpath(dep_path, str(root))
            logger.info(f"Adding dependency {rel_path}")
            all_dependencys.add(rel_path)

        task_hash = hash_obj.hexdigest()
        if task_hash in hash_to_tasks:
            assert json.dumps(raw_task_cfg, sort_keys=True) == json.dumps(
                hash_to_tasks[task_hash], sort_keys=True
            ), f"The task {task_cfg} is not unique"
            logger.info(f"reusing {task_cfg['__task']}")
        hash_to_tasks[task_hash] = raw_task_cfg
        return task_hash

    def _check_git_status():
        for _ in all_dependencys:
            logger.info(f"Checking {_}")

        root_ = str(root)
        commit = get_commit_hash(root_)
        changed_files = get_changed_files(root_)
        tracking_files = check_output(
            "git ls-files --full-name".split(),
            cwd=root_,
            text=True,
        ).split("\n")
        # tracking_files = [x for x in tracking_files if x]
        clean_dir = True
        files_to_check = list(all_dependencys)
        for dep in files_to_check:
            if dep in changed_files:
                logger.error(f"{dep} not committed")
                clean_dir = False
            if dep not in tracking_files:
                logger.error(f"{dep} not tracked")
                clean_dir = False
        if not clean_dir:
            commit_confirm = input(f"Commit {commit} (y/n)? [y]")
            if commit_confirm.lower() == "y" or commit_confirm == "":
                commit_cmd = "git add".split() + files_to_check
                logger.info(f"commiting {commit_cmd}")
                if check_call(commit_cmd, cwd=root_) != 0:
                    logger.error("git add failed")
                    return False
                default_commit_message = "commit for task"
                commit_message = input(f"Commit message: [{default_commit_message}] ")
                if commit_message == "":
                    commit_message = default_commit_message
                if (
                    check_call(
                        ["git", "commit", "-m", commit_message],
                        cwd=root_,
                    )
                    != 0
                ):
                    logger.error("git commit failed")
                    return False
                if check_call("git push".split(), cwd=root_) != 0:
                    logger.error("git push failed")
                    return False
                prev_cmt = commit
                commit = get_commit_hash(root_)
                logger.info(f"commit {prev_cmt} -> {commit}")
                clean_dir = True
        else:
            logger.info("task spec is clean")
        if not clean_dir:
            raise Exception("task spec not committed")
        return commit

    for task in task_spec:
        _update_hash(task, hash_to_tasks)
    commit = _check_git_status()
    fetch_url = _get_fetch_url(root)
    tasks = []
    for k, v in hash_to_tasks.items():
        tasks.append(
            TaskSpec(
                commit=commit,
                fetch_url=fetch_url,
                task=v.pop("__task"),
                task_hash=k,
                param=v,
                tag=v.pop("__tag", []),
            )
        )
    return tasks


def submit_task(
    cfg_file: str | pathlib.Path,
    server_helper: ServerHelper | None = None,
    dry_run: bool = False,
):
    """Submit tasks to the server from a YAML config file.

    Args:
        cfg_file: Path to YAML config file containing task definitions
        server_helper: Optional ServerHelper instance (default creates new one)
    """
    if isinstance(cfg_file, str):
        cfg_file = pathlib.Path(cfg_file).absolute()
    with open(cfg_file, "r") as f:
        desc = yaml.safe_load(f)
    tasks = desc.pop("__task")
    root = desc.pop("__root", None)
    if root is None:
        root = cfg_file.parent
    elif pathlib.Path(root).is_absolute():
        root = pathlib.Path(root)
    else:
        root = cfg_file.parent / root
    root = root.resolve()
    print(root)
    # assume that broadcast can only occur in the tasks part and only the first level
    broadcasted = []
    for tid, task in enumerate(tasks):
        broad_keys = []
        for k in task.keys():
            if k.startswith("_broadcast_"):
                broad_keys.append(k)
        for values in product(*[task[k] for k in broad_keys]):
            new_task = deepcopy(task)
            new_task.update(dict(zip(broad_keys, values)))
            broadcasted.append(new_task)
    after_template = process_template(broadcasted, desc)
    pprint(after_template)
    tasks = process_task(
        process_replacement(
            after_template,
            desc,
        ),
        root,
    )
    if dry_run:
        pprint([task.model_dump() for task in tasks])
        return
    if server_helper is None:
        server_helper = ServerHelper()
    server_helper.put("/tasks", [task.model_dump() for task in tasks])


if __name__ == "__main__":
    import fire

    fire.Fire(submit_task)
