"""
Optimized Directory Tree Service
Uses directory_id foreign key for efficient queries
"""

from typing import Dict, List, Optional, Any
from collections import defaultdict
from sqlalchemy.ext.asyncio import AsyncSession
from sqlalchemy import select, func, and_, or_, case
import logging

from app.models.extractor import Metric, MetricOwner
from app.models.business import TeamFileAssignment, Team, Directory, MetricTeamAssignment

logger = logging.getLogger(__name__)


class DirectoryNodeOptimized:
    """Optimized directory node using database relationships"""

    def __init__(
        self,
        directory_id: int,
        name: str,
        path: str,
        level: int = 0,
        parent_id: Optional[int] = None
    ):
        self.directory_id = directory_id
        self.name = name
        self.path = path
        self.level = level
        self.parent_id = parent_id
        self.children: List['DirectoryNodeOptimized'] = []

        # Statistics (computed from database)
        self.metric_count = 0
        self.analyzed_count = 0
        self.not_analyzed_count = 0
        self.not_needed_count = 0
        self.needs_collection_count = 0
        self.team_count = 0  # Optimized: only store count, not detailed assignments
        self.team_assignments: List[str] = []  # Keep empty for compatibility
        self.metric_types: Dict[str, int] = defaultdict(int)

    def add_child(self, child: 'DirectoryNodeOptimized'):
        """Add a child directory"""
        self.children.append(child)

    def to_dict(self, max_depth: Optional[int] = None) -> Dict[str, Any]:
        """Convert to dictionary for API response"""
        result = {
            "id": self.directory_id,
            "name": self.name,
            "path": self.path,
            "level": self.level,
            "metric_count": self.metric_count,
            "analyzed_count": self.analyzed_count,
            "not_analyzed_count": self.not_analyzed_count,
            "not_needed_count": self.not_needed_count,
            "needs_collection_count": self.needs_collection_count,
            "team_count": self.team_count,  # Optimized: include team count only
            "metric_types": dict(self.metric_types),
            "analysis_progress": self._calculate_analysis_progress()
        }

        # Include children if within depth limit
        if self.children and (max_depth is None or self.level < max_depth):
            result["children"] = [child.to_dict(max_depth) for child in self.children]

        return result

    def _calculate_analysis_progress(self) -> Dict[str, Any]:
        """Calculate analysis progress percentages"""
        if self.metric_count == 0:
            return {
                "analyzed_percentage": 0,
                "not_analyzed_percentage": 0,
                "overall_progress": 0
            }

        analyzed_percentage = (self.analyzed_count / self.metric_count) * 100
        not_analyzed_percentage = (self.not_analyzed_count / self.metric_count) * 100
        overall_progress = analyzed_percentage  # analyzed = not_needed + needs_collection

        return {
            "analyzed_percentage": round(analyzed_percentage, 1),
            "not_analyzed_percentage": round(not_analyzed_percentage, 1),
            "overall_progress": round(overall_progress, 1)
        }


class DirectoryTreeServiceOptimized:
    """Optimized directory tree service using directory_id foreign key"""

    def __init__(self):
        pass

    async def get_directory_statistics(
        self,
        extractor_db: AsyncSession,
        business_db: AsyncSession,
        directory_path: Optional[str] = None
    ) -> Dict[str, Any]:
        """
        Get directory statistics using optimized JOIN queries
        """
        try:
            logger.info(f"🔍 [Optimized] Getting directory statistics for: {directory_path or 'all'}")

            # Base query for directory statistics (only count histogram and ukm_event types)
            query = select(
                Directory.id,
                Directory.path,
                Directory.name,
                Directory.level,
                Directory.parent_id,
                func.count(Metric.id).label('total_metrics')
            ).outerjoin(
                Metric, and_(
                    Directory.id == Metric.directory_id,
                    Metric.type.in_(['histogram', 'ukm_event'])
                )
            ).group_by(
                Directory.id, Directory.path, Directory.name, Directory.level, Directory.parent_id
            )

            # Add path filter if specified
            if directory_path:
                query = query.where(Directory.path.like(f"{directory_path}%"))

            result = await extractor_db.execute(query)
            directories_data = result.fetchall()

            logger.info(f"📊 [Optimized] Retrieved {len(directories_data)} directories")

            # Build directory tree
            root_nodes = await self._build_directory_tree(directories_data, business_db, extractor_db)

            # Calculate global statistics
            stats = self._calculate_global_statistics(directories_data)

            response = {
                "success": True,
                "data": root_nodes,
                "stats": stats,
                "total_directories": len(directories_data)
            }

            return response

        except Exception as e:
            logger.error(f"❌ [Optimized] Failed to get directory statistics: {str(e)}")
            raise

    async def _build_directory_tree(
        self,
        directories_data: List,
        business_db: AsyncSession,
        extractor_db: AsyncSession
    ) -> List[Dict[str, Any]]:
        """Build directory tree structure from database results"""

        # Create directory nodes
        directory_nodes = {}
        root_nodes = []

        for dir_data in directories_data:
            node = DirectoryNodeOptimized(
                directory_id=dir_data.id,
                name=dir_data.name,
                path=dir_data.path,
                level=dir_data.level,
                parent_id=dir_data.parent_id
            )

            # Set statistics (basic metric count)
            node.metric_count = dir_data.total_metrics or 0
            # Initialize analysis counts - will be populated later
            node.not_needed_count = 0
            node.needs_collection_count = 0
            node.not_analyzed_count = 0
            node.analyzed_count = 0

            directory_nodes[dir_data.id] = node

            # Track root nodes (no parent)
            if dir_data.parent_id is None:
                root_nodes.append(node)

        # Build parent-child relationships
        for node in directory_nodes.values():
            if node.parent_id and node.parent_id in directory_nodes:
                parent = directory_nodes[node.parent_id]
                parent.add_child(node)

        # Get team counts for performance optimization (only count, not detailed assignments)
        await self._populate_team_counts(directory_nodes, business_db, extractor_db)

        # Get analysis status statistics using unified database
        await self._populate_analysis_statistics(directory_nodes, extractor_db)

        # Convert to dict format
        return [node.to_dict() for node in root_nodes]

    async def _populate_team_counts(
        self,
        directory_nodes: Dict[int, DirectoryNodeOptimized],
        business_db: AsyncSession,
        extractor_db: AsyncSession
    ):
        """Populate team assignment counts for directories (optimized for performance)"""
        try:
            # Get all directory IDs
            directory_ids = list(directory_nodes.keys())
            if not directory_ids:
                return

            logger.info(f"🔍 [TeamCounts] Populating team counts for {len(directory_ids)} directories")

            # Use SQLAlchemy ORM query for team counts per directory (optimized)
            # Now uses direct team_id field instead of joining MetricTeamAssignment
            from app.models.extractor import Metric

            team_counts_query = select(
                Directory.id,
                func.count(func.distinct(Metric.team_id)).label('team_count')
            ).outerjoin(
                Metric, and_(
                    Directory.id == Metric.directory_id,
                    Metric.type.in_(['histogram', 'ukm_event'])
                )
            ).where(
                Directory.id.in_(directory_ids)
            ).group_by(
                Directory.id
            )

            result = await business_db.execute(team_counts_query)
            team_counts_data = result.fetchall()

            # Update node team counts
            for directory_id, team_count in team_counts_data:
                if directory_id in directory_nodes:
                    node = directory_nodes[directory_id]
                    node.team_count = team_count or 0  # Update the team_count property
                    logger.debug(f"📊 [TeamCounts] Directory {directory_id}: {team_count} teams")

            # Set default team count to 0 for any directories not in the results
            for node in directory_nodes.values():
                if not hasattr(node, 'team_count'):
                    node.team_count = 0
                node.team_assignments = []  # Keep empty for compatibility

        except Exception as e:
            logger.error(f"❌ Failed to populate team counts: {e}")
            # Fallback: set team count to 0 for all nodes
            for node in directory_nodes.values():
                node.team_assignments = []

    async def _populate_team_assignments(
        self,
        directory_nodes: Dict[int, DirectoryNodeOptimized],
        business_db: AsyncSession,
        extractor_db: AsyncSession
    ):
        """Populate team assignments for directories based on metric-level assignments"""
        try:
            # Get all directory IDs
            directory_ids = list(directory_nodes.keys())
            if not directory_ids:
                return

            logger.info(f"🔍 [TeamAssignments] Populating team assignments for {len(directory_ids)} directories")

            # Get directory paths for each directory ID from business_db
            directory_query = select(Directory.id, Directory.path).where(Directory.id.in_(directory_ids))
            directory_result = await business_db.execute(directory_query)
            directory_paths = {row.id: row.path for row in directory_result.fetchall()}

            # For each directory, get team assignments from metric_team_assignments
            for directory_id, directory_path in directory_paths.items():
                if directory_id not in directory_nodes:
                    continue

                node = directory_nodes[directory_id]

                # Query metrics in this directory and their team assignments (only histogram and ukm_event types)
                # We need to query across databases - use extractor_db for metrics, business_db for team assignments
                metrics_query = select(Metric.id).where(
                    and_(
                        Metric.directory_id == directory_id,
                        Metric.type.in_(['histogram', 'ukm_event'])
                    )
                )
                metrics_result = await extractor_db.execute(metrics_query)
                metric_ids = [row.id for row in metrics_result.fetchall()]

                if not metric_ids:
                    logger.debug(f"📂 [TeamAssignments] No metrics found for directory {directory_path}")
                    continue

                # Query team assignments for these metrics from business_db
                team_assignments_query = select(
                    MetricTeamAssignment.team_name,
                    func.count(MetricTeamAssignment.metric_id).label('metric_count')
                ).where(
                    MetricTeamAssignment.metric_id.in_(metric_ids)
                ).group_by(
                    MetricTeamAssignment.team_name
                ).order_by(
                    func.count(MetricTeamAssignment.metric_id).desc()
                )

                team_result = await business_db.execute(team_assignments_query)
                team_assignments = team_result.fetchall()

                # Populate team assignments and count total assigned metrics
                total_assigned_metrics = 0
                for team_assignment in team_assignments:
                    team_name = team_assignment.team_name
                    metric_count = team_assignment.metric_count

                    if team_name:
                        node.team_assignments.append(team_name)
                        total_assigned_metrics += metric_count
                        logger.debug(f"📊 [TeamAssignments] Directory {directory_path}: Team {team_name} has {metric_count} metrics")

                # Also check for directory-level team assignment as fallback
                if not node.team_assignments:
                    dir_team_query = select(Team.name).outerjoin(
                        Team, Directory.team_id == Team.id
                    ).where(Directory.id == directory_id).limit(1)

                    dir_team_result = await business_db.execute(dir_team_query)
                    dir_team = dir_team_result.scalar_one_or_none()

                    if dir_team:
                        node.team_assignments.append(dir_team)
                        logger.debug(f"🏢 [TeamAssignments] Directory {directory_path}: Using directory-level team {dir_team}")

                logger.info(f"✅ [TeamAssignments] Directory {directory_path}: Found {len(node.team_assignments)} teams, {total_assigned_metrics} total assigned metrics")

        except Exception as e:
            logger.error(f"❌ Failed to populate team assignments: {e}")
            # Fallback: try to get directory-level assignments
            try:
                for directory_id in directory_ids:
                    if directory_id in directory_nodes:
                        node = directory_nodes[directory_id]
                        dir_team_query = select(Team.name).outerjoin(
                            Team, Directory.team_id == Team.id
                        ).where(Directory.id == directory_id).limit(1)

                        dir_team_result = await business_db.execute(dir_team_query)
                        dir_team = dir_team_result.scalar_one_or_none()

                        if dir_team and dir_team not in node.team_assignments:
                            node.team_assignments.append(dir_team)
            except Exception as fallback_e:
                logger.warning(f"⚠️ Fallback team assignment also failed: {fallback_e}")

    async def _populate_analysis_statistics(
        self,
        directory_nodes: Dict[int, DirectoryNodeOptimized],
        db: AsyncSession
    ):
        """Populate analysis status statistics for directories using unified database"""
        try:
            # Get all directory IDs
            directory_ids = list(directory_nodes.keys())
            if not directory_ids:
                return

            logger.info(f"🔍 [AnalysisStats] Populating analysis statistics for {len(directory_ids)} directories")

            # Simple and reliable approach: Query metrics analysis status by directory
            for directory_id in directory_ids:
                node = directory_nodes[directory_id]

                # Count metrics by analysis status for this directory (only histogram and ukm_event types)
                analysis_query = select(
                    Metric.analysis_status,
                    func.count(Metric.id).label('count')
                ).where(
                    and_(
                        Metric.directory_id == directory_id,
                        Metric.type.in_(['histogram', 'ukm_event'])
                    )
                ).group_by(
                    Metric.analysis_status
                )

                result = await db.execute(analysis_query)
                analysis_stats = {row.analysis_status: row.count for row in result.fetchall()}

                # Calculate totals
                analyzed_count = 0
                not_needed_count = 0
                needs_collection_count = 0
                not_analyzed_count = 0

                for status, count in analysis_stats.items():
                    if status == 'NOT_NEEDED':
                        analyzed_count += count
                        not_needed_count += count
                    elif status == 'NEEDS_COLLECTION':
                        analyzed_count += count
                        needs_collection_count += count
                    else:  # NOT_ANALYZED or null
                        not_analyzed_count += count

                # Update node statistics
                node.analyzed_count = analyzed_count
                node.not_needed_count = not_needed_count
                node.needs_collection_count = needs_collection_count
                node.not_analyzed_count = not_analyzed_count

                logger.debug(f"📊 [AnalysisStats] Directory {directory_id}: "
                            f"total={node.metric_count}, analyzed={analyzed_count}, "
                            f"not_needed={not_needed_count}, needs_collection={needs_collection_count}, "
                            f"not_analyzed={not_analyzed_count}")

        except Exception as e:
            logger.error(f"❌ Failed to populate analysis statistics: {e}")
            # Fallback: set all as not analyzed
            for node in directory_nodes.values():
                node.not_analyzed_count = node.metric_count

    def _calculate_global_statistics(self, directories_data: List) -> Dict[str, Any]:
        """Calculate global statistics across all directories"""
        total_metrics = sum(d.total_metrics or 0 for d in directories_data)
        # For now, simplified statistics since we only have basic metric counts
        total_analyzed = 0
        total_not_analyzed = total_metrics

        return {
            "total_metrics": total_metrics,
            "total_analyzed": total_analyzed,
            "total_not_analyzed": total_not_analyzed,
            "analyzed_percentage": round((total_analyzed / total_metrics * 100) if total_metrics > 0 else 0, 1),
            "total_directories": len(directories_data)
        }

    async def get_directory_details(
        self,
        extractor_db: AsyncSession,
        business_db: AsyncSession,
        directory_path: str,
        include_metrics: bool = False
    ) -> Dict[str, Any]:
        """Get detailed information for a specific directory"""
        try:
            # Get directory information
            query = select(Directory).where(Directory.path == directory_path)
            result = await extractor_db.execute(query)
            directory = result.scalar_one_or_none()

            if not directory:
                return {
                    "success": False,
                    "error": f"Directory not found: {directory_path}"
                }

            # Get metrics for this directory
            metrics_query = select(
                Metric.id,
                Metric.name,
                Metric.type,
                Metric.summary,
                Metric.units,
                Metric.component,
                Metric.filename,
                Metric.line_number,
                Metric.analysis_status,
                MetricOwner.owner_email.label('owner')
            ).outerjoin(
                MetricOwner, Metric.id == MetricOwner.metric_id
            ).where(
                Metric.directory_id == directory.id
            )

            if include_metrics:
                result = await extractor_db.execute(metrics_query)
                metrics_data = result.fetchall()
            else:
                # Just get count
                count_query = select(func.count(Metric.id)).where(Metric.directory_id == directory.id)
                result = await extractor_db.execute(count_query)
                metrics_count = result.scalar()
                metrics_data = None

            # Get team assignment
            team_name = None
            if directory.team_id:
                team_query = select(Team.name).where(Team.id == directory.team_id)
                result = await business_db.execute(team_query)
                team_name = result.scalar_one_or_none()

            return {
                "success": True,
                "directory": {
                    "id": directory.id,
                    "path": directory.path,
                    "name": directory.name,
                    "level": directory.level,
                    "team_name": team_name,
                    "assignment_confidence": directory.assignment_confidence
                },
                "metrics": [
                    {
                        "id": m.id,
                        "name": m.name,
                        "type": m.type,
                        "summary": m.summary,
                        "units": m.units,
                        "component": m.component,
                        "filename": m.filename,
                        "line_number": m.line_number,
                        "analysis_status": m.analysis_status,
                        "owner": m.owner
                    }
                    for m in metrics_data
                ] if include_metrics else None,
                "metrics_count": len(metrics_data) if metrics_data else metrics_count
            }

        except Exception as e:
            logger.error(f"❌ Failed to get directory details: {e}")
            return {
                "success": False,
                "error": str(e)
            }


# Helper function for CASE statements
def case(condition, value):
    """Helper for SQL CASE statements"""
    return func.case((condition, value))


# Create a singleton instance
directory_tree_service_optimized = DirectoryTreeServiceOptimized()