"""Worker implementation for executing tasks in isolated environments.

This module contains the logic for running a task in an isolated environment, 
loading inputs and writing outputs according to the task specification.
"""

import importlib
import inspect
import json
import os
import asyncio
import pickle
import shutil
import time
from typing import Any
from loguru import logger
from jupyter_client import AsyncKernelManager
import nbformat
import sys
import click

from tasks.model import TaskSpec
from tasks.worker.constants import (
    DIR_ROOT, DIR_WORK, DIR_PARAMS, DIR_OUTPUTS, DIR_LOGS,
    FILE_TASK_SPEC, FILE_FINISHED, FILE_FAILED, FILE_PARAM,
    MAGIC_NUMBER, EXIT_SUCCESS, EXIT_FAILURE
)
from tasks.worker.interface import key_tag_parse, file_name_for_param, WorkspaceSpec


class ExecEnv:
    def __init__(self, pythonpath: str, cwd: str):
        self.pythonpath = pythonpath
        self.cwd = cwd
        self.previous_dir = os.getcwd()

    def __enter__(self):
        if self.pythonpath:
            sys.path.insert(0, self.pythonpath)
        if not os.path.exists(self.cwd):
            os.makedirs(self.cwd, exist_ok=True)
        if self.cwd:
            os.chdir(self.cwd)
        assert self.cwd is not None
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        if self.cwd:
            os.chdir(self.previous_dir)
        if self.pythonpath:
            sys.path.remove(self.pythonpath)


class SeqWriter:
    def __init__(self, path: str):
        self.path = path
        self.index = 0

    def write(self, output: Any):
        file_path = os.path.join(self.path, file_name_for_param(self.index))
        pickle.dump(output, open(file_path, "wb"))
        self.index += 1


class SeqReader:
    def __init__(self, path: str, check_interval: float = 1, mode: str = "pickle"):
        self.path = path
        self.end_file = os.path.join(path, FILE_FINISHED)
        self.fail_file = os.path.join(path, FILE_FAILED)
        self.index = 0
        self.check_interval = check_interval
        self.mode = mode

    async def __anext__(self):
        while True:
            files = os.listdir(self.path)
            files.sort()
            for file in files:
                file_path = os.path.join(self.path, file)
                if os.path.exists(file_path):
                    self.index += 1
                    if self.mode == "pickle":
                        return pickle.load(open(file_path, "rb"))
                    elif self.mode == "file":
                        return file_path
                    else:
                        raise ValueError(f"unknown mode: {self.mode}")
                elif os.path.exists(self.end_file):
                    raise StopAsyncIteration
                elif os.path.exists(self.fail_file):
                    raise Exception("parent task failed")
                else:
                    await asyncio.sleep(self.check_interval)
            break

    def __iter__(self):
        logger.info(f"iter self: {self}")
        assert self is not None, "self is None"
        assert self.index == 0, "index is not 0, should not reuse SeqReader"
        return self

    def __next__(self):
        try:
            return asyncio.run_coroutine_threadsafe(
                self.__anext__(), loop=asyncio.get_event_loop()
            ).result()
        except StopAsyncIteration:
            raise StopIteration


def _mid_run(loop, func, *args, **kwargs):
    asyncio.set_event_loop(loop)
    return func(*args, **kwargs)


async def worker_in_jupyter(task_root: str):
    """Execute a task in a Jupyter kernel.
    
    Args:
        task_root: Path to the task directory
    """
    logger.remove()
    logger.add(sys.stdout)
    
    workspace = WorkspaceSpec(
        task_hash=os.path.basename(task_root),
        root_dir=os.path.dirname(task_root)
    )
    
    task_spec = TaskSpec.model_validate(
        json.load(open(workspace.task_spec_path))
    )
    
    task_hash = task_spec.task_hash
    # logger.info(f"task_root: {task_root}")
    os.environ["TASK_HASH"] = task_hash
    
    with ExecEnv(workspace.repo_dir, workspace.work_dir):
        raw_args = {}
        raw_args.update(task_spec.value_params)
        
        # Load output parameters
        for k in task_spec.output_params.keys():
            k, tags = key_tag_parse(k)
            reader = SeqReader(os.path.join(workspace.params_dir, k))
            if "iter" in tags:
                raw_args[k] = reader
            else:
                raw_args[k] = await reader.__anext__()
        
        # Load file parameters
        for k in task_spec.file_params.keys():
            k, tags = key_tag_parse(k)
            raw_args[k] = os.path.join(workspace.params_dir, k, FILE_PARAM)

        output_writer = SeqWriter(workspace.outputs_dir)
        
        # Import and execute the task function
        mod_name, func_name = task_spec.task.rsplit(".", 1)
        try:
            mod = importlib.import_module(mod_name)
            func = getattr(mod, func_name)

            if inspect.iscoroutinefunction(func):
                output = await func(**raw_args)
            else:
                output = func(**raw_args)
                
            if inspect.isgenerator(output):
                for o in output:
                    output_writer.write(o)
            else:
                output_writer.write(output)
        except Exception as e:
            logger.exception(e)
            raise e
    logger.info(f"finished {task_spec.task}")


def add_log(task_root: str, msg: dict):
    """Add a log entry to the task logs.
    
    Args:
        task_root: Path to the task directory
        msg: Log message to add
    """
    workspace = WorkspaceSpec(
        task_hash=os.path.basename(task_root),
        root_dir=os.path.dirname(task_root)
    )
    
    with open(os.path.join(workspace.logs_dir, f"{time.time():.8f}.json"), "w") as f:
        json.dump(msg, f)


async def worker_in_cli(task_root: str):
    """Execute a task in a CLI environment by starting a Jupyter kernel.
    
    Args:
        task_root: Path to the task directory
        
    Returns:
        bool: True if task completed successfully, False otherwise
    """
    splits = __file__.split("/")
    mod_path = "/".join(splits[:-4])
    mod_name = ".".join(splits[-2:])[:-3]
    logger.info(f"mod_path: {mod_path}")
    logger.info(f"mod_name: {mod_name}")

    code_to_run = f"""
import sys
import asyncio
import os
sys.path.append('{mod_path}')
import {mod_name} as worker_mod
assert worker_mod.MAGIC_NUMBER=={MAGIC_NUMBER}, f'ERROR mod imported'
await worker_mod.worker_in_jupyter("{task_root}")
"""
    kernel_manager = AsyncKernelManager(kernel_name="python3")
    await kernel_manager.start_kernel()
    kernel_client = kernel_manager.client()
    kernel_client.start_channels()
    await kernel_client.wait_for_ready()
    kernel_client.execute(code_to_run)
    try:
        while True:
            msg = await kernel_client.get_iopub_msg()
            content = msg["content"]
            logger.trace(msg)
            msg_type = msg["header"]["msg_type"]
            if msg_type in {
                "execute_result",
                "display_data",
                "stream",
                "error",
            }:
                output_cell = nbformat.v4.output_from_msg(msg)
                add_log(task_root, output_cell)
                if True and output_cell["output_type"] == "stream":
                    # whether print error to stderr
                    if output_cell["name"] == "stderr":
                        print(
                            output_cell["text"],
                            file=sys.stderr,
                            flush=True,
                            end="",
                        )
                if output_cell["output_type"] == "error":
                    print("\n".join(output_cell["traceback"]))
                    logger.error("Task execution failed")
                    return False
            elif msg_type == "status":
                if content["execution_state"] == "idle":
                    break
            else:
                logger.debug(f"msg_type {msg_type} not handled")
    except KeyboardInterrupt:
        logger.debug("Interrupted by user.")
        return False
    finally:
        kernel_client.stop_channels()
        await kernel_manager.shutdown_kernel()
        logger.debug("Kernel shutdown")
    return True


async def run_task(task_dir: str) -> int:
    """Run a task in the specified directory.
    
    This is the main entry point for the worker script.
    
    Args:
        task_dir: Path to the task directory
        
    Returns:
        int: Exit code (0 for success, non-zero for failure)
    """
    succ = await worker_in_cli(task_dir)
    if not succ:
        logger.error("Task execution failed")
        return EXIT_FAILURE
    return EXIT_SUCCESS


if __name__ == "__main__":

    @click.group()
    def cli():
        """Worker CLI for managing task execution."""
        pass

    @cli.command(name="run")
    @click.argument("task_root", type=str)
    def main(task_root: str):
        """Run the worker for a specific task."""
        exit_code = asyncio.run(run_task(task_root))
        sys.exit(exit_code)

    @cli.command(name="restore")
    @click.argument("task_root", type=str)
    def restore(task_root: str):
        """Restore a task from backup."""
        shutil.rmtree(task_root)
        shutil.copytree(f"{task_root}.bak", task_root)

    @cli.command(name="backup")
    @click.argument("task_root", type=str)
    def backup(task_root: str):
        """Backup a task directory."""
        shutil.copytree(task_root, f"{task_root}.bak", dirs_exist_ok=True)

    cli()
