"""
the worker process to start the jupyter notebook, run tasks and store results

the worker will receive a commandline parameter, which is the path to the task:

/path/to/task/
    - task.json: the task specification
    - _root: the root directory of the task, down from git repo
    - _work_dir: the working directory of the task
    - _params: the input parameters for the task
        - key1:
            - 001
            - 002
            ...
            - FINISHED/FAILED
        - key2:
            - 001
            - FINISHED/FAILED
    _ _logs:
        - 1923123.12312.json
        - 1923123.12312.bin
        # the log file for the task, named with the timestamp
    _ _outputs:
        - 0001
        - 0002
        - 0003
        - FINISHED/FAILED
"""

import importlib
import inspect
import json
import os
import asyncio
import pickle
import shutil
import time
from typing import Any
from loguru import logger
from jupyter_client import AsyncKernelManager
import nbformat
import sys

from rr.api import TaskSpec
from rr.workermanager import file_name_for_param

_MAGIC_NUMBER_ = 1234567890321


class ExecEnv:
    def __init__(self, pythonpath: str, cwd: str):
        self.pythonpath = pythonpath
        self.cwd = cwd
        self.previous_dir = os.getcwd()

    def __enter__(self):
        if self.pythonpath:
            sys.path.insert(0, self.pythonpath)
        if not os.path.exists(self.cwd):
            os.makedirs(self.cwd, exist_ok=True)
        if self.cwd:
            os.chdir(self.cwd)
        assert self.cwd is not None
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        if self.cwd:
            os.chdir(self.previous_dir)
            # try:
            #     shutil.rmtree(self.cwd)
            # except Exception as e:
            #     logger.warning(f"error removing {self.cwd}: {e}")
        if self.pythonpath:
            sys.path.remove(self.pythonpath)


class SeqWriter:
    def __init__(self, path: str):
        self.path = path
        self.index = 0

    def write(self, output: Any):
        file_path = os.path.join(self.path, file_name_for_param(self.index))
        pickle.dump(output, open(file_path, "wb"))
        self.index += 1

    def __enter__(self):
        assert self.index == 0, "index is not 0"
        if not os.path.exists(self.path):
            os.makedirs(self.path, exist_ok=True)
        return self

    def __exit__(self, exc_type, exc_value, traceback):
        if exc_type is None:
            with open(os.path.join(self.path, "FINISHED"), "w") as f:
                f.write("finished")
        else:
            with open(os.path.join(self.path, "FAILED"), "w") as f:
                f.write("failed")


class SeqReader:
    def __init__(self, path: str, check_interval: float = 1):
        self.path = path
        self.end_file = os.path.join(path, "FINISHED")
        self.fail_file = os.path.join(path, "FAILED")
        self.index = 0
        self.check_interval = check_interval

    async def __anext__(self):
        while True:
            file_path = os.path.join(
                self.path, file_name_for_param(self.index)
            )
            if os.path.exists(file_path):
                self.index += 1
                return pickle.load(open(file_path, "rb"))
            elif os.path.exists(self.end_file):
                raise StopAsyncIteration
            elif os.path.exists(self.fail_file):
                raise Exception("parent task failed")
            else:
                await asyncio.sleep(self.check_interval)

    def __iter__(self):
        logger.info(f"iter self: {self}")
        assert self is not None, "self is None"
        return self

    def __next__(self):
        try:
            return asyncio.run_coroutine_threadsafe(
                self.__anext__(), loop=asyncio.get_event_loop()
            ).result()
        except StopAsyncIteration:
            raise StopIteration


async def dict_zip(**kwargs):
    last_value = {}
    iterators = {k: v for k, v in kwargs.items()}
    last_value = {k: await iterators[k].__anext__() for k in iterators.keys()}
    yield last_value.copy()
    while len(iterators) > 0:
        for k in list(iterators.keys()):
            if k not in iterators:
                continue
            try:
                last_value[k] = await iterators[k].__anext__()
                break
            except StopAsyncIteration:
                del iterators[k]
        else:
            break
        yield last_value.copy()


def _mid_run(loop, func, *args, **kwargs):
    asyncio.set_event_loop(loop)
    return func(*args, **kwargs)


async def worker_in_jupyter(task_root: str):
    logger.remove()
    logger.add(sys.stdout)
    task_spec = TaskSpec.model_validate(
        json.load(open(os.path.join(task_root, "task.json")))
    )
    task_hash = task_spec.task_hash
    # logger.info(f"task_root: {task_root}")
    os.environ["TASK_HASH"] = task_hash
    python_path = os.path.join(task_root, "_root")
    work_dir = os.path.join(task_root, "_work_dir")

    with ExecEnv(python_path, work_dir):
        req_args = {}
        iter_args = []
        raw_args = {}
        for k, v in task_spec.param.items():
            if k.startswith("__"):
                continue
            if isinstance(v, dict) and "__task_file" in v:
                raw_args[k] = os.path.join(task_root, "_params", k, "FILE")
            elif isinstance(v, dict) and "__file" in v:
                raw_args[k] = os.path.join(task_root, "_params", k, "FILE")
            elif isinstance(v, dict) and "__task_hash" in v:
                if v.get("__iter", False):
                    iter_args.append(k)
                else:
                    req_args[k] = SeqReader(
                        os.path.join(task_root, "_params", k)
                    )
            else:
                raw_args[k] = v
        logger.info(f"raw_args: {raw_args}")
        with SeqWriter(os.path.join(task_root, "_outputs")) as output_writer:
            mod_name, func_name = task_spec.task.rsplit(".", 1)
            mod = importlib.import_module(mod_name)
            func = getattr(mod, func_name)
            async for req_arg in dict_zip(**req_args):

                def _runrun():
                    _iter_args = {
                        k: SeqReader(os.path.join(task_root, "_params", k))
                        for k in iter_args
                    }
                    all_args = {**req_arg, **_iter_args, **raw_args}
                    # logger.info(f"all_args: {all_args}")
                    # check if the function is async
                    if inspect.iscoroutinefunction(func):
                        output = asyncio.run(func(**all_args))
                    else:
                        output = func(**all_args)
                    if inspect.isgenerator(output):
                        for o in output:
                            output_writer.write(o)
                    else:
                        output_writer.write(output)

                try:
                    await asyncio.to_thread(
                        _mid_run,
                        asyncio.get_running_loop(),
                        _runrun,
                    )
                except Exception as e:
                    logger.error(f"error in task {task_hash}: {e}")
                    logger.exception(e)
                    raise
    logger.info(f"finished {task_spec.task}")


def add_log(task_root: str, msg: dict):
    with open(
        os.path.join(task_root, "_logs", f"{time.time():.8f}.json"), "w"
    ) as f:
        json.dump(msg, f)


async def worker_in_cli(task_root: str):
    splits = __file__.split("/")
    mod_path = "/".join(splits[:-2])
    mod_name = ".".join(splits[-2:])[:-3]

    code_to_run = f"""
import sys
import asyncio
import os
sys.path.append('{mod_path}')
import {mod_name} as worker_mod
assert worker_mod._MAGIC_NUMBER_=={_MAGIC_NUMBER_}, f'ERROR mod imported'
await worker_mod.worker_in_jupyter("{task_root}")
"""
    kernel_manager = AsyncKernelManager(kernel_name="python3")
    await kernel_manager.start_kernel()
    kernel_client = kernel_manager.client()
    kernel_client.start_channels()
    await kernel_client.wait_for_ready()
    kernel_client.execute(code_to_run)
    try:
        while True:
            msg = await kernel_client.get_iopub_msg()
            content = msg["content"]
            logger.trace(msg)
            msg_type = msg["header"]["msg_type"]
            if msg_type in {
                "execute_result",
                "display_data",
                "stream",
                "error",
            }:
                output_cell = nbformat.v4.output_from_msg(msg)
                if True and output_cell["output_type"] == "stream":
                    # whether print error to stderr
                    if output_cell["name"] == "stderr":
                        print(
                            output_cell["text"],
                            file=sys.stderr,
                            flush=True,
                            end="",
                        )
                if output_cell["output_type"] == "error":
                    print("\n".join(output_cell["traceback"]))
                add_log(task_root, output_cell)
            elif msg_type == "status":
                if content["execution_state"] == "idle":
                    break
            else:
                logger.debug(f"msg_type {msg_type} not handled")
    except KeyboardInterrupt:
        logger.debug("Interrupted by user.")
    finally:
        kernel_client.stop_channels()
        await kernel_manager.shutdown_kernel()
        logger.debug("Kernel shutdown")


if __name__ == "__main__":
    import fire

    def main(task_root: str):
        # logger.info(f"worker_in_cli {engin_id}")
        # import shutil

        # Copy the task_root directory to task_root.bak recursively
        # shutil.copytree(task_root, f"{task_root}.bak", dirs_exist_ok=True)
        asyncio.run(worker_in_cli(task_root))

    def restore(task_root: str):
        shutil.rmtree(task_root)
        shutil.copytree(f"{task_root}.bak", task_root)

    def backup(task_root: str):
        shutil.copytree(task_root, f"{task_root}.bak", dirs_exist_ok=True)

    fire.Fire({"run": main, "restore": restore, "backup": backup})
