"""Worker manager for executing tasks in isolated environments.

This module handles preparing workspace for tasks, starting worker processes,
and managing the lifecycle of task execution.
"""

import asyncio
from concurrent.futures import ThreadPoolExecutor
import glob
import shutil
from typing import Type, Dict, Optional
from ..model import (
    TaskSpec,
    TaskStatus,
)
from ..server_helper import ServerHelper
from tasks.worker.constants import (
    DIR_ROOT, DIR_WORK, DIR_PARAMS, DIR_OUTPUTS, DIR_LOGS, DIR_STATIC,
    FILE_TASK_SPEC, FILE_PARAM, FILE_FAILED
)
from tasks.worker.interface import (
    WorkspaceSpec, WorkerInterface, ManagerInterface, key_tag_parse, create_workspace_structure
)
from tasks.worker.local_worker import LocalWorker
from loguru import logger
import os


class TaskErrorException(Exception):
    """Exception raised when a task fails."""
    pass


class WorkerManager(ManagerInterface):
    """Manager for task workers that handles workspace preparation and monitoring."""

    _root_dir = os.getenv("HOME") + "/task_space"

    def __init__(self, server_helper: ServerHelper, n_workers: int = 1, cache_dir: str = None, worker_class: Type[WorkerInterface] = LocalWorker):
        """Initialize the worker manager.
        
        Args:
            server_helper: Helper for interacting with the task server
            n_workers: Number of concurrent workers to run
            cache_dir: Directory for caching static files
            worker_class: Worker class to use for executing tasks
        """
        self.server_helper = server_helper
        self.n_workers = n_workers
        self.worker_class = worker_class
        self.coroutines: dict[str, asyncio.Task] = {}
        self.workers: Dict[str, WorkerInterface] = {}
        self.thread_pool = ThreadPoolExecutor(max_workers=3)
        if cache_dir is None:
            cache_dir = os.path.expanduser("~/task_cache")
        self.cache_dir = cache_dir

    def get_workspace(self, task_hash: str) -> WorkspaceSpec:
        """Get workspace specification for a task.
        
        Args:
            task_hash: Task hash
            
        Returns:
            WorkspaceSpec: Workspace specification
        """
        return WorkspaceSpec(task_hash=task_hash, root_dir=self._root_dir)

    def _file_get(self, task_hash: str, _type: str, target_dir: str):
        """Download files from the server.
        
        Args:
            task_hash: Task hash
            _type: File type
            target_dir: Target directory
        """
        try:
            os.makedirs(target_dir, exist_ok=True)
            file_list = self.server_helper.list_files(task_hash, _type)
            for file in file_list:
                target_path = os.path.join(target_dir, os.path.split(file)[1])
                self.server_helper.download_file(file, target_path)
        except Exception as e:
            with open(os.path.join(target_dir, FILE_FAILED), "w") as f:
                f.write(str(e))
            raise

    def prepare_workspace(self, task: TaskSpec, workspace: WorkspaceSpec) -> None:
        """Prepare workspace for a task.
        
        Args:
            task: Task specification
            workspace: Workspace specification
        """
        task_dir = workspace.task_dir
        if os.path.exists(task_dir):
            logger.warning(f"Task {task.task_hash} already exists, deleting")
            shutil.rmtree(task_dir)
            
        # Create the basic directory structure
        create_workspace_structure(workspace)
        
        # Write task specification
        with open(workspace.task_spec_path, "w") as f:
            f.write(task.model_dump_json())
        
        # Clone the git repository
        clone_status = os.system(
            f"git clone --no-checkout {task.fetch_url} {workspace.repo_dir}"
        )
        if clone_status != 0:
            raise RuntimeError(f"Failed to clone repository from {task.fetch_url}")

        checkout_status = os.system(
            f"cd {workspace.repo_dir} && git checkout {task.commit}"
        )
        if checkout_status != 0:
            raise RuntimeError(
                f"Failed to checkout commit {task.commit} in {workspace.repo_dir}"
            )
        
        # Create symbolic link to cache directory
        os.makedirs(self.cache_dir, exist_ok=True)
        os.symlink(self.cache_dir, workspace.static_dir)
        
        # Prepare parameters
        for k, v in task.output_params.items():
            k, tags = key_tag_parse(k)
            task_hash = v
            self._file_get(
                task_hash,
                "output",
                os.path.join(workspace.params_dir, k),
            )
            
        for k, v in task.file_params.items():
            k, tags = key_tag_parse(k)
            param_dir = os.path.join(workspace.params_dir, k)
            os.makedirs(param_dir, exist_ok=True)
            
            if ":" in v:
                task_hash, filename = v.split(":", 1)
                self.server_helper.download_file(
                    f"{task_hash}/file/{filename}",
                    os.path.join(param_dir, FILE_PARAM),
                )
            else:
                self.server_helper.download_file(
                    v,
                    os.path.join(param_dir, FILE_PARAM),
                )
                
        logger.debug(f"Task {task.task_hash} {task.task} prepared")

    async def monitor_worker(self, task: TaskSpec, worker: WorkerInterface):
        """Monitor a worker's execution and process outputs.
        
        Args:
            task: Task specification
            worker: The worker instance to monitor
        """
        workspace = self.get_workspace(task.task_hash)
        status = TaskStatus.RUNNING
        
        try:
            # Start the worker
            await worker.start()
            
            # Monitor execution
            while True:
                # Upload logs
                log_files = os.listdir(workspace.logs_dir)
                log_files.sort()
                for log_file in log_files:
                    file_path = os.path.join(workspace.logs_dir, log_file)
                    self.server_helper.upload_task_log(
                        file_path, task.task_hash, log_file
                    )
                    os.remove(file_path)
                    
                # Upload outputs
                output_files = os.listdir(workspace.outputs_dir)
                output_files.sort()
                for output_file in output_files:
                    file_path = os.path.join(workspace.outputs_dir, output_file)
                    self.server_helper.upload_task_output(
                        file_path, task.task_hash, output_file
                    )
                    os.remove(file_path)
                
                # Check if task has completed
                worker_status = await worker.get_status()
                if worker_status in [TaskStatus.COMPLETED, TaskStatus.FAILED, TaskStatus.CANCELLED]:
                    status = worker_status
                    break
                
                await asyncio.sleep(1)
            
            # Wait for worker to finish
            exit_code = await worker.wait()
            logger.info(f"Worker finished with exit code {exit_code}")
            
            if exit_code != 0:
                raise TaskErrorException(f"Task {task.task_hash} {task.task} failed")
            
            # Remove symlink to _static before uploading
            if os.path.islink(workspace.static_dir):
                os.unlink(workspace.static_dir)
                logger.debug(f"Removed _static symlink for task {task.task_hash}")
            
            # Upload all generated files
            files_generated = glob.glob(workspace.work_dir + "/**", recursive=True)
            prefix_len = len(workspace.work_dir) + 1
            for file in files_generated:
                if os.path.isdir(file):
                    continue
                # Skip files in _static directory (should not exist after symlink removal, but safety check)
                relative_path = file[prefix_len:]
                if relative_path.startswith(f"{DIR_STATIC}/"):
                    continue
                self.server_helper.upload_task_file(
                    file, task.task_hash, relative_path
                )
                os.remove(file)
                
            status = TaskStatus.COMPLETED
            
        except asyncio.CancelledError:
            logger.info(f"Task {task.task_hash} {task.task} cancelled")
            await worker.cancel()
            status = TaskStatus.CANCELLED
            
        except TaskErrorException:
            logger.info("Caught TaskErrorException, setting task status to failed")
            status = TaskStatus.FAILED
            
        finally:
            # Update task status on the server
            self.server_helper.set_task_status(task.task_hash, status)
            
            # Clean up worker reference
            if task.task_hash in self.workers:
                del self.workers[task.task_hash]
                
            return status

    async def run_task(self, task: TaskSpec, worker_class: Optional[Type[WorkerInterface]] = None) -> None:
        """Run a task using the specified worker class.
        
        Args:
            task: Task specification
            worker_class: Worker class to use for executing the task (defaults to self.worker_class)
        """
        workspace = self.get_workspace(task.task_hash)
        
        try:
            # Prepare workspace
            self.prepare_workspace(task, workspace)
            
            # Create worker instance
            if worker_class is None:
                worker_class = self.worker_class
                
            worker = worker_class(workspace)
            self.workers[task.task_hash] = worker
            
            # Monitor worker execution
            async with asyncio.TaskGroup() as tg:
                tg.create_task(self.monitor_worker(task, worker))
                
        except asyncio.CancelledError:
            logger.info(f"Task {task.task_hash} {task.task} cancelled")
            if task.task_hash in self.workers:
                await self.workers[task.task_hash].cancel()
                
        except Exception as e:
            logger.error(f"Task {task.task_hash} {task.task} failed: {e}")
            logger.exception(e)
            self.server_helper.set_task_status(task.task_hash, TaskStatus.FAILED)
            
        finally:
            if task.task_hash in self.coroutines:
                self.coroutines.pop(task.task_hash)
                
        logger.info(f"Task {task.task_hash} {task.task} finished")

    async def loop(self, n_tasks=None):
        """Main loop for the worker manager.
        
        Args:
            n_tasks: Number of tasks to process before exiting
        """
        issued_tasks = 0
        try:
            async with asyncio.TaskGroup() as tg:
                while True:
                    if len(self.coroutines) < self.n_workers:
                        try:
                            ready_tasks = self.server_helper.get_ready_tasks(1)
                            if len(ready_tasks) == 0:
                                await asyncio.sleep(1)
                                continue
                            # can choose the task according to some strategy
                            task = ready_tasks[0]
                            task = self.server_helper.acquire_task(task.task_hash)
                            if task is None:
                                await asyncio.sleep(1)
                                continue
                            self.coroutines[task.task_hash] = tg.create_task(
                                self.run_task(task)
                            )
                            issued_tasks += 1
                            if n_tasks is not None and issued_tasks >= n_tasks:
                                break
                        except Exception as e:
                            logger.error(f"Error acquiring task: {e}")
                            await asyncio.sleep(1)
                        continue
                    await asyncio.sleep(1)
                logger.info("Exiting loop")
        except asyncio.CancelledError:
            logger.info("Loop cancelled")
            # Cancel all running workers
            for task_hash, worker in self.workers.items():
                await worker.cancel()
            self.coroutines.clear()


if __name__ == "__main__":
    api_url = os.getenv("API_URL", "http://localhost:9010")
    asyncio.run(WorkerManager(ServerHelper(base_url=api_url)).loop(n_tasks=1))
