"""
Qdrant vector store client implementation.
"""

import hashlib
import asyncio
from typing import List, Optional, Dict, Any, Union
from dataclasses import dataclass
from urllib.parse import urlparse
from pathlib import Path

try:
    from qdrant_client import QdrantClient
    from qdrant_client.http import models
    from qdrant_client.http.models import Distance, VectorParams
except ImportError:
    raise ImportError("qdrant-client package is required. Install with: pip install qdrant-client")

from ..embedders.base import DocumentType

# Default constants  
DEFAULT_MAX_SEARCH_RESULTS = 20
DEFAULT_SEARCH_MIN_SCORE = 0.7


@dataclass
class PointStruct:
    """Structure for a vector point."""
    id: str
    vector: List[float]
    payload: Dict[str, Any]


@dataclass
class Payload:
    """Payload structure for search results."""
    filePath: str
    codeChunk: str
    startLine: int
    endLine: int


@dataclass
class VectorStoreSearchResult:
    """Search result from vector store."""
    id: Union[str, int]
    score: float
    payload: Optional[Payload] = None


class QdrantVectorStore:
    """Qdrant vector store implementation."""
    
    def __init__(
        self, 
        workspace_path: str, 
        url: str, 
        vector_dimensions: Dict[str, int],
        api_key: Optional[str] = None
    ):
        self.qdrant_url = self._parse_qdrant_url(url)
        self.vector_dimensions = vector_dimensions
        self.distance_metric = Distance.COSINE
        
        # Create workspace ID from path
        hash_obj = hashlib.sha256(workspace_path.encode())
        self.workspace_id = f"ws-{hash_obj.hexdigest()[:16]}"
        
        # Initialize Qdrant client
        try:
            url_obj = urlparse(self.qdrant_url)
            port = url_obj.port or (443 if url_obj.scheme == "https" else 6333)
            
            self.client = QdrantClient(
                host=url_obj.hostname,
                port=port,
                https=url_obj.scheme == "https",
                api_key=api_key,
                timeout=30.0
            )
        except Exception:
            # Fallback to URL-based initialization
            self.client = QdrantClient(
                url=self.qdrant_url,
                api_key=api_key,
                timeout=30.0
            )
    
    def _parse_qdrant_url(self, url: Optional[str]) -> str:
        """Parse and validate Qdrant URL."""
        if not url or url.strip() == "":
            return "http://localhost:6333"
        
        url = url.strip()
        
        if not url.startswith(("http://", "https://")) and "://" not in url:
            return self._parse_hostname(url)
        
        try:
            urlparse(url)
            return url
        except Exception:
            return self._parse_hostname(url)
    
    def _parse_hostname(self, hostname: str) -> str:
        """Parse hostname to full URL."""
        if ":" in hostname:
            return f"http://{hostname}"
        else:
            return f"http://{hostname}:6333"
    
    def _get_collection_name(self, doc_type: DocumentType) -> str:
        """Get collection name for document type."""
        return f"{self.workspace_id}_{doc_type}"
    
    async def _get_collection_info(self, doc_type: DocumentType) -> Optional[models.CollectionInfo]:
        """Get collection information."""
        try:
            collection_name = self._get_collection_name(doc_type)
            return await asyncio.get_event_loop().run_in_executor(
                None, 
                lambda: self.client.get_collection(collection_name)
            )
        except Exception:
            return None
    
    async def initialize(self) -> bool:
        """Initialize collections for both code and text."""
        code_created = await self._initialize_collection("code")
        text_created = await self._initialize_collection("text")
        return code_created or text_created
    
    async def _initialize_collection(self, doc_type: DocumentType) -> bool:
        """Initialize a single collection."""
        collection_name = self._get_collection_name(doc_type)
        vector_size = self.vector_dimensions.get(doc_type, 4096)
        
        try:
            collection_info = await self._get_collection_info(doc_type)
            
            if collection_info is None:
                # Create new collection
                await asyncio.get_event_loop().run_in_executor(
                    None,
                    lambda: self.client.create_collection(
                        collection_name=collection_name,
                        vectors_config=VectorParams(
                            size=vector_size,
                            distance=self.distance_metric
                        )
                    )
                )
                await self._create_payload_indexes(doc_type)
                return True
            
            # Check if existing collection has correct dimensions
            existing_size = collection_info.config.params.vectors.size
            if existing_size != vector_size:
                return await self._recreate_collection_with_new_dimension(
                    doc_type, existing_size
                )
            
            await self._create_payload_indexes(doc_type)
            return False
            
        except Exception as error:
            print(f"[Qdrant] Failed to initialize collection '{collection_name}': {error}")
            raise RuntimeError(f"Failed to connect to Qdrant at {self.qdrant_url}")
    
    async def _recreate_collection_with_new_dimension(
        self, doc_type: DocumentType, existing_size: int
    ) -> bool:
        """Recreate collection with new vector dimensions."""
        collection_name = self._get_collection_name(doc_type)
        vector_size = self.vector_dimensions.get(doc_type, 4096)
        
        print(f"[Qdrant] Collection {collection_name} exists with size {existing_size}, "
              f"but expected {vector_size}. Recreating.")
        
        # Delete and recreate
        await asyncio.get_event_loop().run_in_executor(
            None,
            lambda: self.client.delete_collection(collection_name)
        )
        
        await asyncio.sleep(0.1)  # Short delay
        
        await asyncio.get_event_loop().run_in_executor(
            None,
            lambda: self.client.create_collection(
                collection_name=collection_name,
                vectors_config=VectorParams(
                    size=vector_size,
                    distance=self.distance_metric
                )
            )
        )
        
        return True
    
    async def _create_payload_indexes(self, doc_type: DocumentType) -> None:
        """Create payload indexes for efficient filtering."""
        collection_name = self._get_collection_name(doc_type)
        
        # Create indexes for path segments (for directory filtering)
        for i in range(5):
            try:
                await asyncio.get_event_loop().run_in_executor(
                    None,
                    lambda idx=i: self.client.create_payload_index(
                        collection_name=collection_name,
                        field_name=f"pathSegments.{idx}",
                        field_schema=models.PayloadSchemaType.KEYWORD
                    )
                )
            except Exception as error:
                if "already exists" not in str(error):
                    print(f"[Qdrant] Could not create payload index for {collection_name}: {error}")
    
    async def upsert_points(self, points: List[PointStruct], doc_type: DocumentType) -> None:
        """Upsert points to the collection."""
        collection_name = self._get_collection_name(doc_type)
        
        # Process points to add path segments
        processed_points = []
        for point in points:
            segments = Path(point.payload["filePath"]).parts
            path_segments = {str(i): segment for i, segment in enumerate(segments)}
            
            processed_point = models.PointStruct(
                id=point.id,
                vector=point.vector,
                payload={
                    **point.payload,
                    "pathSegments": path_segments
                }
            )
            processed_points.append(processed_point)
        
        try:
            await asyncio.get_event_loop().run_in_executor(
                None,
                lambda: self.client.upsert(
                    collection_name=collection_name,
                    points=processed_points,
                    wait=True
                )
            )
        except Exception as error:
            print(f"[Qdrant] Upsert failed for {collection_name}: {error}")
            raise
    
    def _is_payload_valid(self, payload: Optional[Dict[str, Any]]) -> bool:
        """Check if payload has required fields."""
        if not payload:
            return False
        
        required_fields = ["filePath", "codeChunk", "startLine", "endLine"]
        return all(field in payload for field in required_fields)
    
    async def search(
        self,
        query_vector: List[float],
        doc_type: DocumentType,
        directory_prefix: Optional[str] = None,
        min_score: Optional[float] = None,
        max_results: Optional[int] = None
    ) -> List[VectorStoreSearchResult]:
        """Search for similar vectors."""
        collection_name = self._get_collection_name(doc_type)
        
        # Build filter for directory prefix
        query_filter = None
        if directory_prefix:
            segments = Path(directory_prefix).parts
            conditions = []
            for i, segment in enumerate(segments):
                conditions.append(
                    models.FieldCondition(
                        key=f"pathSegments.{i}",
                        match=models.MatchValue(value=segment)
                    )
                )
            
            if conditions:
                query_filter = models.Filter(must=conditions)
        
        try:
            results = await asyncio.get_event_loop().run_in_executor(
                None,
                lambda: self.client.search(
                    collection_name=collection_name,
                    query_vector=query_vector,
                    query_filter=query_filter,
                    score_threshold=min_score or DEFAULT_SEARCH_MIN_SCORE,
                    limit=max_results or DEFAULT_MAX_SEARCH_RESULTS,
                    with_payload=True
                )
            )
            
            # Convert to our result format
            search_results = []
            for result in results:
                if self._is_payload_valid(result.payload):
                    payload = Payload(
                        filePath=result.payload["filePath"],
                        codeChunk=result.payload["codeChunk"],
                        startLine=result.payload["startLine"],
                        endLine=result.payload["endLine"]
                    )
                    search_results.append(
                        VectorStoreSearchResult(
                            id=result.id,
                            score=result.score,
                            payload=payload
                        )
                    )
            
            return search_results
            
        except Exception as error:
            print(f"[Qdrant] Search failed for {collection_name}: {error}")
            raise
    
    async def search_with_stats(
        self,
        query_vector: List[float],
        doc_type: DocumentType,
        directory_prefix: Optional[str] = None,
        min_score: Optional[float] = None,
        max_results: Optional[int] = None
    ) -> Dict[str, Any]:
        """Search with additional statistics."""
        results = await self.search(
            query_vector, doc_type, directory_prefix, min_score, max_results
        )
        
        # Get collection stats
        stats = await self.get_collection_stats(doc_type)
        total_chunks = stats["totalPoints"] if stats else 0
        
        # Count chunks above threshold
        threshold = min_score or DEFAULT_SEARCH_MIN_SCORE
        chunks_above_threshold = len([r for r in results if r.score >= threshold])
        
        return {
            "results": results,
            "totalChunks": total_chunks,
            "chunksAboveThreshold": chunks_above_threshold
        }
    
    async def get_collection_stats(self, doc_type: DocumentType) -> Optional[Dict[str, int]]:
        """Get collection statistics."""
        try:
            collection_info = await self._get_collection_info(doc_type)
            if not collection_info:
                return None
            
            return {
                "totalPoints": collection_info.points_count or 0,
                "vectorCount": collection_info.vectors_count or 0
            }
        except Exception as error:
            print(f"[Qdrant] Failed to get collection stats for {doc_type}: {error}")
            return None
    
    async def delete_points_by_file_paths(
        self, file_paths: List[str], doc_type: DocumentType
    ) -> None:
        """Delete points by file paths."""
        if not file_paths:
            return
        
        collection_name = self._get_collection_name(doc_type)
        
        conditions = [
            models.FieldCondition(
                key="filePath",
                match=models.MatchValue(value=fp)
            )
            for fp in file_paths
        ]
        
        await asyncio.get_event_loop().run_in_executor(
            None,
            lambda: self.client.delete(
                collection_name=collection_name,
                points_selector=models.FilterSelector(
                    filter=models.Filter(should=conditions)
                ),
                wait=True
            )
        )
    
    async def delete_collection(self) -> None:
        """Delete all collections."""
        await self._delete_collection("code")
        await self._delete_collection("text")
    
    async def _delete_collection(self, doc_type: DocumentType) -> None:
        """Delete a single collection."""
        collection_name = self._get_collection_name(doc_type)
        
        if await self.collection_exists(doc_type):
            await asyncio.get_event_loop().run_in_executor(
                None,
                lambda: self.client.delete_collection(collection_name)
            )
    
    async def collection_exists(self, doc_type: DocumentType) -> bool:
        """Check if collection exists."""
        collection_info = await self._get_collection_info(doc_type)
        return collection_info is not None