"""
SQLAlchemy models for CodeMCP storage layer.

Defines database models for call trees, nodes, analysis results, and related entities.
"""

import json
from datetime import datetime
from typing import Dict, Any, List, Optional
from sqlalchemy import (
    Column, Integer, String, Text, Float, Boolean, DateTime, 
    JSON, ForeignKey, Index, UniqueConstraint
)
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import relationship, sessionmaker
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy.dialects.postgresql import UUID
import uuid

try:
    from ..core.error_handler import log_debug
except ImportError:
    # Fallback for testing
    def log_debug(msg): pass

Base = declarative_base()


class CallTreeModel(Base):
    """Database model for call trees."""
    
    __tablename__ = "call_trees"
    
    # Primary key
    id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
    
    # Tree metadata
    root_node_id = Column(String, ForeignKey("call_tree_nodes.id", ondelete="SET NULL"), nullable=True)
    analysis_type = Column(String, nullable=False, default="unknown")
    language = Column(String, nullable=False, default="unknown") 
    project_path = Column(String, nullable=True)
    
    # Tree statistics
    total_nodes = Column(Integer, default=0)
    max_depth = Column(Integer, default=0)
    
    # Timestamps
    created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
    updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
    
    # Additional metadata stored as JSON
    extra_metadata = Column(JSON, default=dict)
    
    # Relationships
    nodes = relationship("CallTreeNodeModel", back_populates="call_tree", cascade="all, delete-orphan")
    root_node = relationship("CallTreeNodeModel", foreign_keys=[root_node_id], post_update=True)
    analysis_results = relationship("AnalysisResultModel", back_populates="call_tree", cascade="all, delete-orphan")
    
    # Indexes for better query performance
    __table_args__ = (
        Index("idx_call_trees_language", "language"),
        Index("idx_call_trees_created_at", "created_at"),
        Index("idx_call_trees_project_path", "project_path"),
    )
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert model to dictionary."""
        return {
            "id": self.id,
            "root_node_id": self.root_node_id,
            "analysis_type": self.analysis_type,
            "language": self.language,
            "project_path": self.project_path,
            "total_nodes": self.total_nodes,
            "max_depth": self.max_depth,
            "created_at": self.created_at.isoformat() if self.created_at else None,
            "updated_at": self.updated_at.isoformat() if self.updated_at else None,
            "metadata": self.metadata or {}
        }


class CallTreeNodeModel(Base):
    """Database model for call tree nodes."""
    
    __tablename__ = "call_tree_nodes"
    
    # Primary key
    id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
    
    # Foreign key to call tree
    call_tree_id = Column(String, ForeignKey("call_trees.id", ondelete="CASCADE"), nullable=False)
    
    # Node identification
    name = Column(String, nullable=False)
    file_path = Column(String, nullable=False)
    line_number = Column(Integer, nullable=False)
    language = Column(String, nullable=False)
    node_type = Column(String, nullable=False, default="function")  # function, method, class, section, etc.
    
    # Function/method details
    signature = Column(Text, nullable=True)
    documentation = Column(Text, nullable=True)
    
    # Call relationships (stored as JSON arrays of node IDs)
    caller_node_ids = Column(JSON, default=list)
    callee_node_ids = Column(JSON, default=list)
    
    # Analysis metadata
    call_frequency = Column(Integer, default=0)
    call_context = Column(Text, default="")
    short_description = Column(Text, default="")
    llm_explanation = Column(Text, default="")
    semantic_tags = Column(JSON, default=list)
    complexity_score = Column(Float, default=0.0)
    
    # Timestamps
    created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
    updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
    
    # Additional metadata
    metadata = Column(JSON, default=dict)
    
    # Relationships
    call_tree = relationship("CallTreeModel", back_populates="nodes")
    
    # Indexes and constraints
    __table_args__ = (
        Index("idx_nodes_call_tree_id", "call_tree_id"),
        Index("idx_nodes_name", "name"),
        Index("idx_nodes_file_path", "file_path"),
        Index("idx_nodes_line_number", "line_number"),
        Index("idx_nodes_language", "language"),
        Index("idx_nodes_node_type", "node_type"),
        UniqueConstraint("call_tree_id", "file_path", "line_number", "name", name="uq_node_location"),
    )
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert model to dictionary."""
        return {
            "id": self.id,
            "call_tree_id": self.call_tree_id,
            "name": self.name,
            "file_path": self.file_path,
            "line_number": self.line_number,
            "language": self.language,
            "node_type": self.node_type,
            "signature": self.signature,
            "documentation": self.documentation,
            "caller_node_ids": self.caller_node_ids or [],
            "callee_node_ids": self.callee_node_ids or [],
            "call_frequency": self.call_frequency,
            "call_context": self.call_context,
            "short_description": self.short_description,
            "llm_explanation": self.llm_explanation,
            "semantic_tags": self.semantic_tags or [],
            "complexity_score": self.complexity_score,
            "created_at": self.created_at.isoformat() if self.created_at else None,
            "updated_at": self.updated_at.isoformat() if self.updated_at else None,
            "metadata": self.metadata or {}
        }


class AnalysisResultModel(Base):
    """Database model for analysis results."""
    
    __tablename__ = "analysis_results"
    
    # Primary key
    id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
    
    # Foreign key to call tree (optional)
    call_tree_id = Column(String, ForeignKey("call_trees.id", ondelete="CASCADE"), nullable=True)
    
    # Analysis identification
    analysis_type = Column(String, nullable=False)  # "call_tree", "dependency", "complexity", etc.
    target_path = Column(String, nullable=False)  # File or directory path analyzed
    language = Column(String, nullable=True)
    
    # Analysis results (stored as JSON)
    results = Column(JSON, nullable=False)
    
    # Analysis metadata
    success = Column(Boolean, default=True)
    error_message = Column(Text, nullable=True)
    processing_time_ms = Column(Float, nullable=True)
    
    # Cache and invalidation
    cache_key = Column(String, nullable=True)
    file_hash = Column(String, nullable=True)  # For cache invalidation
    dependencies_hash = Column(String, nullable=True)  # For dependency-based invalidation
    
    # Timestamps
    created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
    expires_at = Column(DateTime, nullable=True)  # For TTL-based cache expiration
    
    # Additional metadata
    metadata = Column(JSON, default=dict)
    
    # Relationships
    call_tree = relationship("CallTreeModel", back_populates="analysis_results")
    
    # Indexes and constraints
    __table_args__ = (
        Index("idx_analysis_results_type", "analysis_type"),
        Index("idx_analysis_results_target_path", "target_path"),
        Index("idx_analysis_results_language", "language"),
        Index("idx_analysis_results_cache_key", "cache_key"),
        Index("idx_analysis_results_file_hash", "file_hash"),
        Index("idx_analysis_results_created_at", "created_at"),
        Index("idx_analysis_results_expires_at", "expires_at"),
        UniqueConstraint("cache_key", name="uq_analysis_cache_key"),
    )
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert model to dictionary."""
        return {
            "id": self.id,
            "call_tree_id": self.call_tree_id,
            "analysis_type": self.analysis_type,
            "target_path": self.target_path,
            "language": self.language,
            "results": self.results,
            "success": self.success,
            "error_message": self.error_message,
            "processing_time_ms": self.processing_time_ms,
            "cache_key": self.cache_key,
            "file_hash": self.file_hash,
            "dependencies_hash": self.dependencies_hash,
            "created_at": self.created_at.isoformat() if self.created_at else None,
            "expires_at": self.expires_at.isoformat() if self.expires_at else None,
            "metadata": self.metadata or {}
        }


class FileChangeModel(Base):
    """Database model for tracking file changes for cache invalidation."""
    
    __tablename__ = "file_changes"
    
    # Primary key
    id = Column(String, primary_key=True, default=lambda: str(uuid.uuid4()))
    
    # File information
    file_path = Column(String, nullable=False)
    file_hash = Column(String, nullable=False)
    file_size = Column(Integer, nullable=False)
    file_mtime = Column(DateTime, nullable=False)
    
    # Project context
    project_path = Column(String, nullable=True)
    language = Column(String, nullable=True)
    
    # Timestamps
    created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
    updated_at = Column(DateTime, default=datetime.utcnow, onupdate=datetime.utcnow, nullable=False)
    
    # Indexes and constraints
    __table_args__ = (
        Index("idx_file_changes_file_path", "file_path"),
        Index("idx_file_changes_project_path", "project_path"),
        Index("idx_file_changes_language", "language"),
        Index("idx_file_changes_updated_at", "updated_at"),
        UniqueConstraint("file_path", name="uq_file_path"),
    )
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert model to dictionary."""
        return {
            "id": self.id,
            "file_path": self.file_path,
            "file_hash": self.file_hash,
            "file_size": self.file_size,
            "file_mtime": self.file_mtime.isoformat() if self.file_mtime else None,
            "project_path": self.project_path,
            "language": self.language,
            "created_at": self.created_at.isoformat() if self.created_at else None,
            "updated_at": self.updated_at.isoformat() if self.updated_at else None
        }


class CacheEntryModel(Base):
    """Database model for cache entries when using database-backed caching."""
    
    __tablename__ = "cache_entries"
    
    # Primary key
    cache_key = Column(String, primary_key=True)
    
    # Cache data
    cache_data = Column(Text, nullable=False)  # Serialized data
    data_type = Column(String, nullable=False)  # "json", "pickle", etc.
    
    # Cache metadata
    cache_namespace = Column(String, nullable=False, default="default")
    cache_tags = Column(JSON, default=list)  # For tag-based invalidation
    
    # TTL and expiration
    created_at = Column(DateTime, default=datetime.utcnow, nullable=False)
    expires_at = Column(DateTime, nullable=True)
    access_count = Column(Integer, default=0)
    last_accessed = Column(DateTime, default=datetime.utcnow, nullable=False)
    
    # Size tracking
    data_size_bytes = Column(Integer, nullable=False)
    
    # Indexes
    __table_args__ = (
        Index("idx_cache_entries_namespace", "cache_namespace"),
        Index("idx_cache_entries_expires_at", "expires_at"),
        Index("idx_cache_entries_last_accessed", "last_accessed"),
        Index("idx_cache_entries_created_at", "created_at"),
    )
    
    def to_dict(self) -> Dict[str, Any]:
        """Convert model to dictionary."""
        return {
            "cache_key": self.cache_key,
            "data_type": self.data_type,
            "cache_namespace": self.cache_namespace,
            "cache_tags": self.cache_tags or [],
            "created_at": self.created_at.isoformat() if self.created_at else None,
            "expires_at": self.expires_at.isoformat() if self.expires_at else None,
            "access_count": self.access_count,
            "last_accessed": self.last_accessed.isoformat() if self.last_accessed else None,
            "data_size_bytes": self.data_size_bytes
        }