import os
from typing import Union, List, Set

from ..model import TaskStatus, TaskSpec
from peewee import Model, CharField, DateTimeField, AutoField, SQL, IntegerField, fn
from playhouse.postgres_ext import PostgresqlExtDatabase, JSONField

# Database connection settings
db_name = os.getenv("DB_NAME", "tasks_db")
db_user = os.getenv("DB_USER", "postgres")
db_password = os.getenv("DB_PASSWORD", "postgres")
db_host = os.getenv("DB_HOST", "localhost")
db_port = int(os.getenv("DB_PORT", "5432"))

# Initialize database connection
db = PostgresqlExtDatabase(
    db_name, user=db_user, password=db_password, host=db_host, port=db_port
)


class BaseModel(Model):
    """Base model class that sets the database."""

    class Meta:
        database = db


class TaskSpecModel(BaseModel):
    """
    Peewee model for storing TaskSpec objects in the database.

    Fields:
    - name: the name of the task, defined by the user, not unique.
    - commit: the commit hash of the code to run
    - fetch_url: the url to fetch the code from
    - task: the code to run (python function path)
    - task_hash: the hash of the task (for tracking code changes)
    - file_params: parameters passed as file paths (stored as JSON)
    - value_params: parameters passed as values (stored as JSON)
    - output_params: parameters passed as outputs from other tasks (stored as JSON)
    - status: the current status of the task
    - created_at: when the task was created
    - updated_at: when the task was last updated
    """

    id = AutoField()
    name = CharField(default='')
    commit = CharField()
    fetch_url = CharField()
    task = CharField()
    task_hash = CharField(unique=True, index=True)
    file_params = JSONField(default={})
    value_params = JSONField(default={})
    output_params = JSONField(default={})

    # Additional fields for task tracking
    status = CharField(
        choices=[(status.value, status.value) for status in TaskStatus],
        default=TaskStatus.PENDING.value,
    )
    created_at = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])
    updated_at = DateTimeField(constraints=[SQL("DEFAULT CURRENT_TIMESTAMP")])

    class Meta:
        table_name = "task_specs"
        indexes = (
            # Create an index on task_hash for faster lookups
            (("task_hash",), True),
        )


class TaskDependencyModel(BaseModel):
    """
    Peewee model for storing task dependencies.
    
    Fields:
    - task_hash: the hash of the task
    - dependency_hash: the hash of the dependency task
    """
    id = AutoField()
    task_hash = CharField(index=True)
    dependency_hash = CharField(index=True)
    
    class Meta:
        table_name = "task_dependencies"
        indexes = (
            # Create indexes on both fields for faster lookups
            (("task_hash",), False),
            (("dependency_hash",), False),
            # Create a unique compound index to prevent duplicate dependencies
            (("task_hash", "dependency_hash"), True),
        )


def create_tables() -> None:
    """Create database tables if they don't exist."""
    # List of all model classes that need tables
    model_classes = [TaskSpecModel, TaskDependencyModel]
    with db:
        for model_class in model_classes:
            if not model_class.table_exists():
                db.create_tables([model_class])
                print(f"Created {model_class.__name__} table")


def task_spec_to_model(task_spec: TaskSpec) -> TaskSpecModel:
    """Convert a TaskSpec Pydantic model to a TaskSpecModel Peewee model"""
    return TaskSpecModel(
        name=task_spec.name,
        commit=task_spec.commit,
        fetch_url=task_spec.fetch_url,
        task=task_spec.task,
        task_hash=task_spec.task_hash,
        file_params=task_spec.file_params,
        value_params=task_spec.value_params,
        output_params=task_spec.output_params,
    )


def update_task_status_atomically(
    task_hash: str, prev_status: Union[str, List[str], Set[str]], target_status: str
) -> bool:
    """
    Atomically update the status of a task if its current status matches the prev_status condition.

    This function ensures atomicity by using database features to handle concurrent updates.

    Args:
        task_hash: The hash of the task to update
        prev_status: A string or collection of strings representing allowed previous statuses
        target_status: The new status to set

    Returns:
        bool: True if the update was successful, False if the task was not found or
              the current status didn't match prev_status
    """
    if isinstance(prev_status, str):
        prev_status = [prev_status]

    with db.atomic():
        # The UPDATE query with a WHERE condition handles atomicity
        query = TaskSpecModel.update(status=target_status, updated_at=fn.now()).where(
            (TaskSpecModel.task_hash == task_hash)
            & (TaskSpecModel.status.in_(prev_status))
        )

        # Execute the query and get number of rows affected
        rows_updated = query.execute()

        # Return True if at least one row was updated
        return rows_updated > 0



def get_task_dependencies(task_hash: str) -> List[str]:
    """
    Get all dependencies for a given task using BFS traversal.
    
    Args:
        task_hash: The hash of the task to get dependencies for
        
    Returns:
        List[str]: List of all dependency task hashes (all layers)
    """
    # BFS to find all layers of dependencies
    all_dependencies = set()
    queue = [task_hash]
    visited = {task_hash}
    
    while queue:
        current_task = queue.pop(0)
        
        # Get direct dependencies
        query = TaskDependencyModel.select(
            TaskDependencyModel.dependency_hash
        ).where(
            TaskDependencyModel.task_hash == current_task
        )
        
        direct_deps = [dep.dependency_hash for dep in query]
        for dep in direct_deps:
            if dep not in visited:
                all_dependencies.add(dep)
                visited.add(dep)
                queue.append(dep)
                
    return list(all_dependencies)


def get_dependent_tasks(dependency_hash: str) -> List[str]:
    """
    Get all tasks that depend on a given task using BFS traversal.
    
    Args:
        dependency_hash: The hash of the dependency task
        
    Returns:
        List[str]: List of all task hashes that depend on this task (all layers)
    """
    # BFS to find all layers of dependent tasks
    all_dependents = set()
    queue = [dependency_hash]
    visited = {dependency_hash}
    
    while queue:
        current_dep = queue.pop(0)
        
        # Get direct dependent tasks
        query = TaskDependencyModel.select(
            TaskDependencyModel.task_hash
        ).where(
            TaskDependencyModel.dependency_hash == current_dep
        )
        
        direct_deps = [dep.task_hash for dep in query]
        for dep in direct_deps:
            if dep not in visited:
                all_dependents.add(dep)
                visited.add(dep)
                queue.append(dep)
                
    return list(all_dependents)


def add_task_dependencies(task_hash: str, dependencies: List[str]) -> bool:
    """
    Add a task and its dependencies to the database.
    
    Note: This function should be called within an existing db.atomic() context
    for transaction support.
    
    Args:
        task_hash: The hash of the task
        dependencies: List of dependency task hashes to add
        
    Returns:
        bool: True if all dependencies were added successfully, False otherwise
    """
    if not dependencies:
        return True
    
    try:
        # Prepare data for bulk insertion
        dependency_data = [
            {'task_hash': task_hash, 'dependency_hash': dep_hash}
            for dep_hash in dependencies
        ]
        
        # Use bulk insert for better performance
        TaskDependencyModel.insert_many(dependency_data).execute()
        
        return True
    except Exception as e:
        print(f"Error adding task dependencies: {e}")
        # Let the caller handle transaction rollback
        return False


def delete_task_dependencies(task_hash: str) -> bool:
    """
    Delete a task and all its dependencies from the database with transaction support.
    
    Args:
        task_hash: The hash of the task to delete
        
    Returns:
        bool: True if all dependencies were deleted successfully, False otherwise
    """
    try:
        with db.atomic():
            # Get all dependencies (all layers) to be deleted
            dependencies = get_task_dependencies(task_hash)
            
            # Delete dependency records for the task itself
            TaskDependencyModel.delete().where(
                (TaskDependencyModel.task_hash == task_hash) |
                (TaskDependencyModel.dependency_hash == task_hash)
            ).execute()
            
            # Delete dependency records for all dependent tasks
            if dependencies:
                TaskDependencyModel.delete().where(
                    (TaskDependencyModel.task_hash.in_(dependencies)) |
                    (TaskDependencyModel.dependency_hash.in_(dependencies))
                ).execute()
            
            return True
    except Exception as e:
        print(f"Error deleting task dependencies: {e}")
        # Transaction will be automatically rolled back on exception
        return False


# Initialize database connection and create tables when importing this module
if __name__ == "__main__":
    create_tables()
