import asyncio
from datetime import datetime
import os
from typing import List, Optional, Union, Dict

from fastapi import FastAPI, HTTPException, Query, Request, Response, status
from loguru import logger
from peewee import IntegrityError
from peewee import Model as PeeweeModel
from playhouse.shortcuts import model_to_dict
from pydantic import BaseModel

from .minio_file_manager import MinioFileManager

from ..model import TaskSpec, TaskStatus
from .db import (
    TaskDependencyModel,
    TaskSpecModel,
    add_task_dependencies,
    create_tables,
    db,
    get_dependent_tasks,
    get_task_dependencies,
    task_spec_to_model,
    update_task_status_atomically,
)
from ..chunk_io import ChunkIO


def peewee_to_pydantic(
    peewee_model: PeeweeModel, pydantic_model: type[BaseModel]
) -> BaseModel:
    """Convert a Peewee model to a Pydantic model using model_to_dict."""
    return pydantic_model.model_validate(model_to_dict(peewee_model))


app = FastAPI(title="Task Spec API Server")
file_manager = MinioFileManager(
    server=os.getenv("MINIO_SERVER"),
    access_key=os.getenv("MINIO_ACCESS_KEY"),
    secret_key=os.getenv("MINIO_SECRET_KEY"),
)


def extract_dependencies(task_spec: TaskSpec) -> set:
    """
    Extract all dependency task hashes from a TaskSpec.

    Args:
        task_spec: The TaskSpec to extract dependencies from

    Returns:
        set: Set of dependency task hashes
    """
    dependent_task_hashes = set()

    # Collect dependencies from file_params
    for param_value in task_spec.file_params.values():
        if ":" in param_value and not param_value.startswith(
            ":"
        ):  # Skip user uploaded files (:file_id)
            dep_task_hash = param_value.split(":", 1)[0]
            dependent_task_hashes.add(dep_task_hash)

    # Collect dependencies from output_params
    for dep_task_hash in task_spec.output_params.values():
        dependent_task_hashes.add(dep_task_hash)

    return dependent_task_hashes


@app.get("/", response_model=dict)
async def root():
    return {"message": "Hello World"}


@app.post("/task_specs/", response_model=TaskSpec)
async def create_task_spec(task_spec: TaskSpec):
    """Create a new TaskSpec in the database.

    If the task spec already exists (same task_hash),
    returns 202 with the existing task spec.
    """
    try:
        with db.atomic():
            model = task_spec_to_model(task_spec)
            model.status = TaskStatus.PENDING.value
            model.save()

            # Extract and store dependencies
            dependent_task_hashes = extract_dependencies(task_spec)
            logger.info(f"Dependent task hashes: {dependent_task_hashes}")

            # Use bulk insertion for better performance
            if dependent_task_hashes:
                add_task_dependencies(task_spec.task_hash, list(dependent_task_hashes))

            return task_spec
    except IntegrityError as e:
        # If the task already exists, return it with 202 status
        try:
            existing_model = TaskSpecModel.get(
                TaskSpecModel.task_hash == task_spec.task_hash
            )
            return Response(
                content=peewee_to_pydantic(existing_model, TaskSpec).model_dump_json(),
                status_code=status.HTTP_202_ACCEPTED,
                media_type="application/json",
            )
        except TaskSpecModel.DoesNotExist:
            # If we get here, it's a different integrity error
            raise HTTPException(status_code=400, detail=str(e))
    except Exception as e:
        logger.exception(e)
        raise HTTPException(status_code=400, detail=str(e))


@app.get("/task_specs/{task_hash}", response_model=TaskSpec)
async def get_task_spec(task_hash: str):
    """Get a TaskSpec by its task_hash."""
    try:
        model = TaskSpecModel.get(TaskSpecModel.task_hash == task_hash)
        return peewee_to_pydantic(model, TaskSpec)
    except TaskSpecModel.DoesNotExist:
        raise HTTPException(status_code=404, detail="TaskSpec not found")


@app.get("/task_specs/", response_model=List[TaskSpec])
async def list_task_specs(
    offset: int = Query(default=0, ge=0), limit: int = Query(default=10, ge=1, le=100)
):
    """Get a list of TaskSpecs with pagination."""
    try:
        models = (
            TaskSpecModel.select()
            .order_by(TaskSpecModel.created_at.desc())
            .offset(offset)
            .limit(limit)
        )
        return [peewee_to_pydantic(model, TaskSpec) for model in models]
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))


@app.get("/named_task_specs/", response_model=List[TaskSpec])
async def list_named_task_specs(
    offset: int = Query(default=0, ge=0), limit: int = Query(default=10, ge=1, le=100)
):
    """Get a list of TaskSpecs that have non-empty names with pagination."""
    try:
        models = (
            TaskSpecModel.select()
            .where(TaskSpecModel.name != "")
            .order_by(TaskSpecModel.created_at.desc())
            .offset(offset)
            .limit(limit)
        )
        return [peewee_to_pydantic(model, TaskSpec) for model in models]
    except Exception as e:
        raise HTTPException(status_code=500, detail=str(e))


@app.put("/task_status/{task_hash}", response_model=TaskStatus)
async def update_task_status(task_hash: str, status: TaskStatus):
    """Update the status of a task by its task_hash."""
    try:
        with db.atomic():
            model = TaskSpecModel.get(TaskSpecModel.task_hash == task_hash)
            model.status = status.value
            model.updated_at = datetime.now()
            model.save()
            return status
    except TaskSpecModel.DoesNotExist:
        raise HTTPException(status_code=404, detail="TaskSpec not found")
    except Exception as e:
        logger.exception(e)
        raise HTTPException(status_code=500, detail=str(e))


@app.get("/task_ready", response_model=List[TaskSpec])
async def get_ready_task(limit: int = Query(default=1, ge=1, le=10)):
    """
    Find and return a list of tasks that are ready to run.

    A task is ready if:
    1. It is in PENDING status
    2. All its dependencies (in file_params and output_params) are in COMPLETED status

    If any dependency is in FAILED status, the task will be marked as FAILED too.

    Args:
        limit: Maximum number of ready tasks to return

    Returns:
        List of TaskSpec objects that are ready to run
    """
    try:
        # Find ready task hashes
        ready_tasks = await _find_ready(limit)
        return ready_tasks
    except Exception as e:
        logger.exception(e)
        raise HTTPException(status_code=500, detail=str(e))


async def _find_ready(limit: int) -> List[TaskSpec]:
    """
    Find a specified number of tasks that are ready to run.

    Args:
        limit: Maximum number of ready tasks to find

    Returns:
        List of task_hash strings when retrieving existing READY tasks
        or list of TaskSpec objects when promoting PENDING to READY tasks
    """
    ready_tasks = []

    # First check if there are already tasks with READY status
    ready_tasks = (
        TaskSpecModel.select()
        .where(TaskSpecModel.status == TaskStatus.READY.value)
        .limit(limit)
    )

    ready_tasks = [peewee_to_pydantic(task, TaskSpec) for task in ready_tasks]
    if len(ready_tasks) >= limit:
        return ready_tasks[:limit]

    # If we need more tasks, check pending tasks
    pending_tasks = TaskSpecModel.select().where(
        TaskSpecModel.status == TaskStatus.PENDING.value
    )

    for task in pending_tasks:
        if len(ready_tasks) >= limit:
            break

        task_spec = peewee_to_pydantic(task, TaskSpec)
        is_ready, has_failed_dependency = await _check_task_dependencies(task_spec)

        # If any dependency has failed, mark this task as failed
        if has_failed_dependency:
            update_task_status_atomically(
                task_spec.task_hash,
                TaskStatus.PENDING.value,
                TaskStatus.FAILED.value,
            )

        # If all dependencies are satisfied, add to ready list
        if is_ready:
            if task.task == '__collect__':
                # Since collect task is not a real task, we directly mark it as completed
                update_task_status_atomically(
                    task_spec.task_hash,
                    TaskStatus.PENDING.value,
                    TaskStatus.COMPLETED.value,
                )
            else:
                ready_tasks.append(task)
                update_task_status_atomically(
                    task_spec.task_hash,
                    TaskStatus.PENDING.value,
                    TaskStatus.READY.value,
                )

    return ready_tasks


async def _check_task_dependencies(task_spec: TaskSpec) -> tuple[bool, bool]:
    """
    Check if all dependencies of a task are met.

    Args:
        task_spec: The TaskSpec to check dependencies for

    Returns:
        Tuple of (is_ready, has_failed_dependency)
    """
    # Get all dependent tasks from the dependency table
    dependent_task_hashes = get_task_dependencies(task_spec.task_hash)

    # Check the status of all dependent tasks
    for dep_task_hash in dependent_task_hashes:
        try:
            dep_task = TaskSpecModel.get(TaskSpecModel.task_hash == dep_task_hash)

            # If dependency is failed, task should be marked as failed
            if dep_task.status == TaskStatus.FAILED.value:
                return False, True

            # If dependency is not completed, task is not ready
            if dep_task.status != TaskStatus.COMPLETED.value:
                return False, False

        except TaskSpecModel.DoesNotExist:
            # Dependency doesn't exist, task is not ready
            return False, False

    # All dependencies are met
    return True, False


@app.post("/acquire_task/{task_hash}", response_model=Union[TaskSpec, None])
async def acquire_task(task_hash: str):
    """
    Acquire a task that is in READY status by setting its status to RUNNING.

    Args:
        task_hash: The hash of the task to acquire

    Returns:
        The TaskSpec if successfully acquired
    """
    updated = update_task_status_atomically(
        task_hash,
        TaskStatus.READY.value,
        TaskStatus.RUNNING.value,
    )
    if not updated:
        return None
    return TaskSpecModel.get(TaskSpecModel.task_hash == task_hash)


@app.delete("/task/{task_hash}")
async def delete_task(task_hash: str):
    """
    Delete a task and all tasks that depend on it.

    This deletes task records and dependency records from the database.
    Any files stored for these tasks should be removed separately.

    Args:
        task_hash: The hash of the task to delete

    Returns:
        Dict with deleted task hashes
    """
    try:
        with db.atomic():
            # Find all tasks that depend on this task (directly or indirectly)
            dependent_tasks = get_dependent_tasks(task_hash)

            # Add the original task to the set
            all_tasks_to_delete = set(dependent_tasks + [task_hash])

            # Delete all dependencies for these tasks
            TaskDependencyModel.delete().where(
                (TaskDependencyModel.task_hash.in_(all_tasks_to_delete))
            ).execute()

            # Delete the task specs
            TaskSpecModel.delete().where(
                TaskSpecModel.task_hash.in_(all_tasks_to_delete)
            ).execute()

            return {"deleted_tasks": list(all_tasks_to_delete)}
    except Exception as e:
        logger.exception(e)
        raise HTTPException(status_code=500, detail=str(e))


@app.post("/task/{task_hash}/reset", response_model=List[str])
async def reset_task(task_hash: str):
    """
    Reset a task and all tasks that depend on it.

    This sets the status of the tasks back to PENDING.
    Any files stored for these tasks should be removed separately.
    The task records and dependency records are not removed.

    Args:
        task_hash: The hash of the task to reset

    Returns:
        Dict with reset task hashes
    """
    try:
        with db.atomic():
            # Find all tasks that depend on this task (directly or indirectly)
            dependent_tasks = get_dependent_tasks(task_hash)

            # Add the original task to the set
            all_tasks_to_reset = set(dependent_tasks + [task_hash])

            # Reset the task statuses to PENDING
            TaskSpecModel.update(
                status=TaskStatus.PENDING.value, updated_at=datetime.now()
            ).where(TaskSpecModel.task_hash.in_(all_tasks_to_reset)).execute()

            return list(all_tasks_to_reset)
    except Exception as e:
        logger.exception(e)
        raise HTTPException(status_code=500, detail=str(e))


@app.get("/task/{task_hash}/dependencies", response_model=List[str])
async def get_dependencies(task_hash: str):
    """
    Get direct dependencies of a task.

    Args:
        task_hash: The hash of the task to get dependencies for

    Returns:
        List of task hashes that this task directly depends on
    """
    try:
        # Query direct dependencies from the database
        dependencies = (
            TaskDependencyModel.select(TaskDependencyModel.dependency_hash)
            .where(TaskDependencyModel.task_hash == task_hash)
            .execute()
        )
        # Extract just the dependency hash values
        direct_dependencies = [dep.dependency_hash for dep in dependencies]
        return direct_dependencies
    except Exception as e:
        logger.exception(e)
        raise HTTPException(status_code=500, detail=str(e))


temp_io_storage: Dict[str, ChunkIO] = {}
temp_storage_operate_lock = asyncio.Lock()


@app.put("/file/{path:path}")
async def put_file(
    path: str,
    start_bytes: int,
    end_bytes: int,
    request: Request,
):
    """
    Put a file into the database.

    Args:
        path: The path where the file will be stored
        start_bytes: The starting byte position of the chunk
        end_bytes: The ending byte position of the chunk
        request: The request object containing the file data
    """
    async with temp_storage_operate_lock:
        if path not in temp_io_storage:
            logger.info(f"Start writing file {path} from {start_bytes} to {end_bytes}")
            chunk_io = ChunkIO()
            # get content type from request headers
            content_type = request.headers.get(
                "Content-Type", "application/octet-stream"
            )

            task = asyncio.create_task(file_manager.write_file(chunk_io, path, content_type))
            temp_io_storage[path] = (chunk_io, task)
        else:
            chunk_io, task = temp_io_storage[path]
            logger.info(f"File {path} already in temp storage")
    data = await request.body()
    assert len(data) == end_bytes - start_bytes
    logger.info(f"Writing {len(data)} bytes to {path} from {start_bytes} to {end_bytes}")
    if len(data) == 0:
        data = None
    chunk_io.write(start_bytes, end_bytes, data)
    if data is None:
        logger.info(f"File {path} is EOF, waiting for task to complete")
        await task
        logger.info(f"File {path} task completed")
        del temp_io_storage[path]

@app.get("/file_list")
async def get_file_list(
    task_hash: str | None = None,
    _type: str | None = None,
) -> List[str]:
    """
    Get a list of files in a directory.
    """
    if task_hash is None:
        return file_manager.list_files("", recursive=False)
    if _type is None:
        return file_manager.list_files(f"{task_hash}/", recursive=True)
    return file_manager.list_files(f"{task_hash}/{_type}/", recursive=True)


@app.get("/file/{offset}/{length}/{path:path}")
async def get_file(offset: int, length: int, path: str) -> Response:
    """
    Get a file from the database.
    """
    chunk = file_manager.read_chunk(path, offset, length)
    return Response(content=chunk, media_type="application/octet-stream")


@app.delete("/file/{path:path}")
async def delete_file(path: str):
    """
    Delete a file from the database.
    """
    file_manager.delete_prefix(path)


create_tables()

# if __name__ == "__main__":
#     import uvicorn

#     uvicorn.run(app, host="0.0.0.0", port=8000)
