"""
Semantic search functionality with categorization support.
"""

import asyncio
from typing import List, Optional, Dict, Any
from dataclasses import dataclass

from .config import get_config, SemanticRagConfig
from .embedders import EmbedderFactory, DocumentType, TaskType
from .vector_store import QdrantVectorStore, VectorStoreSearchResult


@dataclass
class SearchResult:
    """Individual search result."""
    file_path: str
    content: str
    start_line: int
    end_line: int
    score: float
    doc_type: DocumentType


@dataclass
class CategorizedSearchResults:
    """Search results categorized by document type."""
    text_results: List[SearchResult]
    code_results: List[SearchResult]
    combined_results: List[SearchResult]
    
    @property
    def total_count(self) -> int:
        """Total number of results."""
        return len(self.text_results) + len(self.code_results)


class SemanticSearcher:
    """Main semantic search interface."""
    
    def __init__(self, config: Optional[SemanticRagConfig] = None):
        """Initialize searcher with configuration."""
        self.config = config or get_config()
        
        # Initialize embedder
        embedder_config = self.config.get_embedder_config()
        self.embedder = EmbedderFactory.create_embedder(embedder_config)
        
        # Vector store will be initialized per workspace
        self._vector_stores: Dict[str, QdrantVectorStore] = {}
    
    def _get_vector_store(self, workspace_path: str) -> QdrantVectorStore:
        """Get or create vector store for workspace."""
        if workspace_path not in self._vector_stores:
            vector_store_config = self.config.get_vector_store_config()
            vector_dimensions = EmbedderFactory.get_vector_dimensions()
            
            self._vector_stores[workspace_path] = QdrantVectorStore(
                workspace_path=workspace_path,
                url=vector_store_config.url,
                vector_dimensions=vector_dimensions,
                api_key=vector_store_config.api_key
            )
        
        return self._vector_stores[workspace_path]
    
    async def initialize_workspace(self, workspace_path: str) -> bool:
        """Initialize vector store for a workspace."""
        vector_store = self._get_vector_store(workspace_path)
        return await vector_store.initialize()
    
    async def validate_configuration(self) -> tuple[bool, Optional[str]]:
        """Validate the search configuration."""
        # Validate embedder
        embedder_valid, embedder_error = await self.embedder.validate_configuration()
        if not embedder_valid:
            return False, f"Embedder validation failed: {embedder_error}"
        
        # Validate config
        config_valid, config_error = self.config.validate_embedder_config()
        if not config_valid:
            return False, f"Configuration validation failed: {config_error}"
        
        return True, None
    
    async def search(
        self,
        query: str,
        workspace_path: str,
        max_results_code: int = 2,
        max_results_text: int = 4,
        text_min_score: float = 0.5,
        code_min_score: float = 0.3,
        directory_prefix: Optional[str] = None
    ) -> CategorizedSearchResults:
        """
        Perform categorized semantic search.
        
        Args:
            query: Search query
            workspace_path: Path to the workspace being searched
            max_results_code: Maximum code file results
            max_results_text: Maximum text file results  
            text_min_score: Minimum score for text files
            code_min_score: Minimum score for code files
            directory_prefix: Optional directory filter
            
        Returns:
            Categorized search results
        """
        # Get query embeddings for both document types
        query_embedding_code = await self.embedder.create_embeddings(
            [query], doc_type="code", task="retrieval_query"
        )
        query_embedding_text = await self.embedder.create_embeddings(
            [query], doc_type="text", task="retrieval_query"
        )
        
        if not query_embedding_code.embeddings or not query_embedding_text.embeddings:
            raise RuntimeError("Failed to generate query embeddings")
        
        query_vector_code = query_embedding_code.embeddings[0]
        query_vector_text = query_embedding_text.embeddings[0]
        
        # Get vector store
        vector_store = self._get_vector_store(workspace_path)
        
        # Search both collections concurrently
        code_search_task = vector_store.search(
            query_vector=query_vector_code,
            doc_type="code",
            directory_prefix=directory_prefix,
            min_score=code_min_score,
            max_results=max_results_code
        )
        
        text_search_task = vector_store.search(
            query_vector=query_vector_text,
            doc_type="text",
            directory_prefix=directory_prefix,
            min_score=text_min_score,
            max_results=max_results_text
        )
        
        # Wait for both searches to complete
        code_results, text_results = await asyncio.gather(
            code_search_task, text_search_task
        )
        
        # Convert to our result format
        code_search_results = [
            SearchResult(
                file_path=result.payload.filePath,
                content=result.payload.codeChunk,
                start_line=result.payload.startLine,
                end_line=result.payload.endLine,
                score=result.score,
                doc_type="code"
            )
            for result in code_results
            if result.payload
        ]
        
        text_search_results = [
            SearchResult(
                file_path=result.payload.filePath,
                content=result.payload.codeChunk,
                start_line=result.payload.startLine,
                end_line=result.payload.endLine,
                score=result.score,
                doc_type="text"
            )
            for result in text_results
            if result.payload
        ]
        
        # Combine and sort by score
        combined_results = sorted(
            code_search_results + text_search_results,
            key=lambda x: x.score,
            reverse=True
        )
        
        return CategorizedSearchResults(
            text_results=text_search_results,
            code_results=code_search_results,
            combined_results=combined_results
        )
    
    async def search_with_stats(
        self,
        query: str,
        workspace_path: str,
        max_results_code: int = 2,
        max_results_text: int = 4,
        text_min_score: float = 0.5,
        code_min_score: float = 0.3,
        directory_prefix: Optional[str] = None
    ) -> tuple[CategorizedSearchResults, Dict[str, Any]]:
        """
        Perform search with additional statistics.
        
        Returns:
            Tuple of (search_results, statistics)
        """
        results = await self.search(
            query=query,
            workspace_path=workspace_path,
            max_results_code=max_results_code,
            max_results_text=max_results_text,
            text_min_score=text_min_score,
            code_min_score=code_min_score,
            directory_prefix=directory_prefix
        )
        
        # Get collection statistics
        vector_store = self._get_vector_store(workspace_path)
        
        code_stats = await vector_store.get_collection_stats("code")
        text_stats = await vector_store.get_collection_stats("text")
        
        stats = {
            "code": {
                "totalChunks": code_stats["totalPoints"] if code_stats else 0,
                "chunksAboveThreshold": len(results.code_results),
            },
            "text": {
                "totalChunks": text_stats["totalPoints"] if text_stats else 0,
                "chunksAboveThreshold": len(results.text_results),
            }
        }
        
        return results, stats


# Convenience function for simple usage
async def semantic_search(
    query: str,
    workspace_path: str,
    max_results_code: int = 2,
    max_results_text: int = 4,
    text_min_score: float = 0.5,
    code_min_score: float = 0.3,
    directory_prefix: Optional[str] = None,
    config: Optional[SemanticRagConfig] = None
) -> CategorizedSearchResults:
    """
    Perform semantic search with default configuration.
    
    This is a convenience function that creates a searcher instance
    and performs the search in one call.
    """
    searcher = SemanticSearcher(config)
    await searcher.initialize_workspace(workspace_path)
    
    return await searcher.search(
        query=query,
        workspace_path=workspace_path,
        max_results_code=max_results_code,
        max_results_text=max_results_text,
        text_min_score=text_min_score,
        code_min_score=code_min_score,
        directory_prefix=directory_prefix
    )