import asyncio
from concurrent.futures import ThreadPoolExecutor
import shutil
from .api import (
    DependencyTaskError,
    NoMoreFileException,
    ServerHelper,
    TaskSpec,
    TaskStatus,
)
from loguru import logger
import os


def file_name_for_param(index: int):
    return f"param_{index:08d}"


class WorkerManager:
    """ """

    _root_dir = os.getenv("HOME") + "/task_space"

    def __init__(self, server_helper: ServerHelper, n_workers: int = 1):
        self.server_helper = server_helper
        self.n_workers = n_workers
        self.coroutines: dict[str, asyncio.Task] = {}
        # make a thread pool to execute the tasks
        self.thread_pool = ThreadPoolExecutor(max_workers=3)

    def _task_root(self, task_hash: str):
        return os.path.join(self._root_dir, task_hash)

    async def _arg_get(self, task_hash, root_dir):
        assert not os.path.exists(os.path.join(root_dir))
        os.makedirs(root_dir, exist_ok=True)
        current_index = 0
        try:
            while True:
                target_path = os.path.join(
                    root_dir, file_name_for_param(current_index)
                )
                try:
                    await self.server_helper.download_task_arg_async(
                        task_hash,
                        current_index,
                        target_path,
                    )
                    assert os.path.exists(target_path)
                except NoMoreFileException:
                    with open(os.path.join(root_dir, "FINISHED"), "w") as f:
                        f.write("finished")
                    break
                except DependencyTaskError as e:
                    with open(os.path.join(root_dir, "FAILED"), "w") as f:
                        f.write(str(e))
                    raise
                current_index += 1
        except asyncio.CancelledError:
            logger.info(f"Cancelled _arg_get for {task_hash}")
            raise

    async def _file_get(self, file_id, root_dir):
        try:
            os.makedirs(root_dir, exist_ok=True)
            target_path = os.path.join(root_dir, "FILE")
            self.server_helper.download_file(file_id, target_path)
            logger.info(f"Downloaded file {file_id} to {target_path}")
        except Exception as e:
            with open(os.path.join(root_dir, "FAILED"), "w") as f:
                f.write(str(e))
            raise

    def _prepare_task(self, task: TaskSpec) -> list[asyncio.Task]:
        task_dir = self._task_root(task.task_hash)
        if os.path.exists(task_dir):
            logger.warning(f"Task {task.task_hash} already exists, deleting")
            shutil.rmtree(task_dir)
        os.makedirs(task_dir, exist_ok=True)
        with open(os.path.join(task_dir, "task.json"), "w") as f:
            f.write(task.model_dump_json())
        # git clone to the _root
        clone_status = os.system(
            f"git clone --no-checkout {task.fetch_url} {task_dir}/_root"
        )
        if clone_status != 0:
            raise RuntimeError(
                f"Failed to clone repository from {task.fetch_url}"
            )

        checkout_status = os.system(
            f"cd {task_dir}/_root && git checkout {task.commit}"
        )
        if checkout_status != 0:
            raise RuntimeError(
                f"Failed to checkout commit {task.commit} in {task_dir}/_root"
            )
        # prepare dirs:
        os.makedirs(os.path.join(task_dir, "_logs"), exist_ok=True)
        os.makedirs(os.path.join(task_dir, "_outputs"), exist_ok=True)
        os.makedirs(os.path.join(task_dir, "_params"), exist_ok=True)
        os.makedirs(os.path.join(task_dir, "_work_dir"), exist_ok=True)
        # prepare params:
        async_tasks = []
        sync_tasks = []
        for k, v in task.param.items():
            if isinstance(v, dict) and "__task_file" in v:
                task_hash = v["__task_hash"]
                filename = v["__task_file"]
                sync_tasks.append(
                    self._file_get(
                        f"{task_hash}_{filename}",
                        os.path.join(task_dir, "_params", k),
                    )
                )
            elif isinstance(v, dict) and "__task_hash" in v:
                # Create a coroutine to get the args from the server
                async_tasks.append(
                    self._arg_get(
                        v["__task_hash"],
                        os.path.join(task_dir, "_params", k),
                    )
                )
            elif isinstance(v, dict) and "__file" in v:
                sync_tasks.append(
                    self._file_get(
                        v["__file"],
                        os.path.join(task_dir, "_params", k),
                    )
                )
        return async_tasks, sync_tasks

    async def start_worker(self, task: TaskSpec):
        logger.info(f"Starting worker for task {task.task_hash} {task.task}")
        task_root = self._task_root(task.task_hash)
        cwd = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
        worker_process = await asyncio.create_subprocess_exec(
            "python",
            "-m",
            "rr.worker",
            "run",
            task_root,
            cwd=cwd,
        )
        status = TaskStatus.RUNNING
        try:
            while True:
                exit = True
                # check outputs/logs/files and upload them
                for file in sorted(
                    os.listdir(os.path.join(task_root, "_outputs"))
                ):
                    if file not in ["FINISHED", "FAILED"]:
                        file_path = os.path.join(task_root, "_outputs", file)
                        self.server_helper.add_file(
                            file_path,
                            task_hash=task.task_hash,
                            type_="output",
                            content_type="application/pickle",
                        )
                        os.remove(file_path)
                        exit = False
                for file in sorted(
                    os.listdir(os.path.join(task_root, "_logs"))
                ):
                    if file not in ["FINISHED", "FAILED"]:
                        file_path = os.path.join(task_root, "_logs", file)
                        self.server_helper.add_file(
                            file_path,
                            task_hash=task.task_hash,
                            type_="log",
                            content_type="text/plain",
                        )
                        os.remove(file_path)
                        exit = False
                for file in sorted(
                    os.listdir(os.path.join(task_root, "_work_dir"))
                ):
                    if file not in ["FINISHED", "FAILED"]:
                        file_path = os.path.join(task_root, "_work_dir", file)
                        self.server_helper.add_file(
                            file_path,
                            file_id=f"{task.task_hash}_{file}",
                            task_hash=task.task_hash,
                            type_="file",
                            content_type="application/raw",
                        )
                        os.remove(file_path)
                        exit = False
                if worker_process.returncode is None:
                    exit = False
                if exit:
                    break
                await asyncio.sleep(1)
            finished_file = os.path.join(task_root, "_outputs", "FINISHED")
            failed_file = os.path.join(task_root, "_outputs", "FAILED")
            assert os.path.exists(finished_file) or os.path.exists(failed_file)
            if os.path.exists(failed_file):
                status = TaskStatus.FAILED
            else:
                status = TaskStatus.COMPLETED
        except asyncio.CancelledError:
            worker_process.kill()
            status = TaskStatus.CANCELLED
        finally:
            self.server_helper.set_task_status(task.task_hash, status)
            await worker_process.wait()

    async def run_task(self, task: TaskSpec):
        """capture the cancel exception and handle it"""

        try:
            async with asyncio.TaskGroup() as tg:
                async_tasks, sync_tasks = self._prepare_task(task)
                for coroutine in async_tasks:
                    tg.create_task(coroutine)
                await asyncio.gather(*sync_tasks)
                tg.create_task(self.start_worker(task))
        except asyncio.CancelledError:
            logger.info(f"Task {task.task_hash} {task.task} cancelled")
        except Exception as e:
            logger.error(f"Task {task.task_hash} {task.task} failed: {e}")
            logger.exception(e)
        finally:
            self.coroutines.pop(task.task_hash)

    async def loop(self):
        try:
            async with asyncio.TaskGroup() as tg:
                while True:
                    if len(self.coroutines) < self.n_workers:
                        task = self.server_helper.acquire_ready_task()
                        if task is None:
                            await asyncio.sleep(1)
                            continue
                        logger.info(
                            f"Acquired task {task.task_hash} {task.task}"
                        )
                        self.coroutines[task.task_hash] = tg.create_task(
                            self.run_task(task)
                        )
                        continue
                    await asyncio.sleep(1)
                logger.info("Exiting loop")
        except asyncio.CancelledError:
            logger.info("Loop cancelled")
            self.coroutines.clear()
