"""
This module contains the code to generate the task spec from the config file,
1. checks dependencies
2. calculates the task hash
3. generates the task configuration according to the definations in the model.py
"""

import hashlib
from typing import Dict, Any, List, Set, Tuple
import os
from loguru import logger
from subprocess import check_output, check_call
from .model import TaskSpec
from .importParser import find_all_dependencies



def get_commit_hash(root: str) -> str:
    """Get the current commit hash of the repository."""
    return check_output(["git", "rev-parse", "HEAD"], cwd=root, text=True).strip()


def get_changed_files(root: str) -> List[str]:
    """Get list of changed files in the repository."""
    return check_output(
        ["git", "diff", "--name-only"], cwd=root, text=True
    ).splitlines()


def get_fetch_url(root: str) -> str:
    """Get the fetch URL of the repository."""
    return check_output(
        ["git", "remote", "get-url", "origin"], cwd=root, text=True
    ).strip()


def check_and_commit_dependencies(root: str, dependencies: List[str]) -> bool:
    """
    Check if all dependency files are committed and clean in the repository.
    If not, attempt to add and commit them.

    Args:
        root: Root directory of the git repository
        dependencies: List of dependency file paths to check (can be absolute or relative)

    Returns:
        bool: True if all files are committed and clean, False otherwise
    """
    # Get git status information
    root = os.path.abspath(root)
    logger.info(f"Checking dependencies in {root}")
    changed_files = get_changed_files(root)
    tracking_files = check_output(
        ["git", "ls-files", "--full-name"], cwd=root, text=True
    ).splitlines()

    # Check each dependency
    clean_dir = True
    files_to_check = []

    # Convert all dependency paths to be relative to root
    relative_dependencies = []
    root_abs_path = os.path.abspath(root)
    logger.info(f"dependencies: {dependencies}")
    logger.info(f"root_abs_path: {root_abs_path}")
    for dep in dependencies:
        # Convert to absolute if not already
        abs_path = os.path.join(root, dep) if not os.path.isabs(dep) else dep

        # Make it relative to root
        try:
            # Only include files that are under the repository root
            if abs_path.startswith(root_abs_path):
                rel_path = os.path.relpath(abs_path, root_abs_path)
                relative_dependencies.append(rel_path)
            else:
                logger.warning(f"Dependency {dep} is outside repository root, skipping")
        except ValueError as e:
            logger.warning(f"Could not convert {dep} to relative path: {str(e)}")

    for dep in relative_dependencies:
        if dep in changed_files:
            logger.error(f"{dep} is not committed")
            clean_dir = False
            files_to_check.append(dep)
        if dep not in tracking_files:
            logger.error(f"{dep} is not tracked by git")
            clean_dir = False
            files_to_check.append(dep)

    if not clean_dir:
        # Ask for confirmation to commit
        commit_confirm = input(
            f"Commit changes for {len(files_to_check)} files? (y/n)? [y] "
        )
        if commit_confirm.lower() not in ["y", ""]:
            return False

        # Add files
        add_cmd = ["git", "add"] + files_to_check
        logger.info(f"Adding files: {' '.join(add_cmd)}")
        if check_call(add_cmd, cwd=root) != 0:
            logger.error("git add failed")
            return False

        # Commit changes
        default_message = "Update task dependencies"
        commit_message = input(f"Commit message: [{default_message}] ")
        if not commit_message:
            commit_message = default_message

        if check_call(["git", "commit", "-m", commit_message], cwd=root) != 0:
            logger.error("git commit failed")
            return False

        # Push changes
        if check_call(["git", "push"], cwd=root) != 0:
            logger.error("git push failed")
            return False

        logger.info("Successfully committed and pushed changes")
        return True

    logger.info("All dependencies are clean and committed")
    return True


@logger.catch(reraise=True)
def _task_spec_from_cfg(
    cfg: Dict[str, Any],
    root: str,
    parsed_tasks: Dict[str, TaskSpec] | None = None,
    dependent_files: Set[str] | None = None,
) -> TaskSpec:
    """
    Generate a task specification from a configuration dictionary.

    Args:
        cfg: Configuration dictionary containing task parameters
        root: Root directory path
        parsed_tasks: Dictionary of already parsed tasks to avoid duplicate processing
        dependent_files: List to track dependent files that need to be monitored for changes

    Returns:
        TaskSpec object containing the task configuration
    """
    root = os.path.abspath(root)
    if parsed_tasks is None:
        parsed_tasks = {}
    task_name = cfg.pop("__task__")
    task_show_name = cfg.pop("__name__", '')
    hash_obj = hashlib.md5()
    if task_name != '__collect__':
        task_path = os.path.join(*task_name.split(".")[:-1]) + ".py"
        abs_task_path = os.path.join(root, task_path)
        # Add the task file itself as a dependency
        if task_path not in dependent_files:
            dependent_files.add(os.path.join(root, task_path))

        # Add imported files as dependencies
        imported_dependencies = find_all_dependencies(abs_task_path, [root])
        imported_dependencies.append(abs_task_path)
        logger.info(f"imported_dependencies: {imported_dependencies}")

        # Make paths relative to the repository root and add to dependent_files
        for dep_path in imported_dependencies:
            try:
                dependent_files.add(dep_path)
            except ValueError:
                # Skip dependencies outside the repository root
                continue
        # Add content of all dependent files to the hash
        for dep_file in imported_dependencies:
            try:
                with open(os.path.join(root, dep_file), "rb") as f:
                    hash_obj.update(f.read())
            except (FileNotFoundError, IOError):
                logger.error(f"File not found: {dep_file}")
                raise ValueError(f"File not found: {dep_file}")

    file_params = {}
    value_params = {}
    output_params = {}

    for k, v in cfg.items():
        if k.startswith("__"):
            continue
        if isinstance(v, dict) and "__task__" in v:
            spec = _task_spec_from_cfg(v, root, parsed_tasks, dependent_files)
            if "__file__" in v:
                file_params[k] = f'{spec.task_hash}:{v["__file__"]}'
            else:
                output_params[k] = spec.task_hash
        elif isinstance(v, dict) and "__file__" in v:
            file_params[k] = v["__file__"]
        else:
            value_params[k] = v
    task_spec = TaskSpec(
        name=task_show_name,
        commit="",
        fetch_url="",
        task=task_name,
        task_hash="",
        file_params=file_params,
        value_params=value_params,
        output_params=output_params,
    )
    hash_obj.update(task_spec.model_dump_json(exclude=['name', 'commit', 'fetch_url']).encode())
    task_spec.task_hash = hash_obj.hexdigest()
    parsed_tasks[task_spec.task_hash] = task_spec
    return task_spec


def gen_task_spec(
    cfg: Dict[str, Any],
    tasks: List[str] | None = None,
    commit_changes: bool = True,
) -> Tuple[List[TaskSpec], Dict[str, TaskSpec], List[str]]:
    """
    Generate a task specification from a configuration dictionary.

    Args:
        cfg: A dictionary containing configuration data.
        model_cfg: A dictionary containing model configuration data.

    Returns:
        A dictionary containing task specification data.
    """

    root = cfg.pop("__root__")
    if not os.path.exists(root):
        raise ValueError(f"Root directory does not exist: {root}")
    # assert .git in root
    if not os.path.exists(os.path.join(root, ".git")):
        raise ValueError(f"Root directory is not a git repository: {root}")

    # check if tasks are valid
    task_specs: Dict[str, TaskSpec] = {}
    root_tasks = []
    dependent_files = set()
    if tasks is None:
        tasks = []
        for k in cfg.keys():
            if "__task__" in cfg[k] and not k.startswith("_"):
                tasks.append(k)
    for task in tasks:
        root_cfg = cfg
        for tt in task.split("."):
            root_cfg = root_cfg[tt]
        root_task_spec = _task_spec_from_cfg(root_cfg, root, task_specs, dependent_files)
        root_task_spec.name = task
        root_tasks.append(root_task_spec)

    if commit_changes:
        if not check_and_commit_dependencies(root, dependent_files):
            raise ValueError("Failed to commit dependencies")

        fetch_url = get_fetch_url(root)
        commit = get_commit_hash(root)
        for v in task_specs.values():
            v.commit = commit
            v.fetch_url = fetch_url

    return root_tasks, task_specs, list(dependent_files)


if __name__ == "__main__":
    import sys
    import tomli
    from .cfg_parser import parse_cfg

    if len(sys.argv) != 2:
        print("Usage: python -m tasks.task_gen <config_file>")
        sys.exit(1)
    cfg = tomli.load(open(sys.argv[1], "rb"))
    cfg = parse_cfg(cfg)
    root_tasks, task_specs, dependent_files = gen_task_spec(cfg)
    # print(root_tasks)
    # print(task_specs)
    # print(dependent_files)\
    for task in task_specs.values():
        print(task.model_dump_json(indent=2))
        print("-------------")
