from pathlib import Path
from typing import Dict, List, Union
import numpy as np
import json
from pymilvus import (
    connections,
    utility,
    FieldSchema,
    CollectionSchema,
    DataType,
    Collection,
    MilvusClient
)
from .base import BaseCoinDatabase
import os
import logging  # No need to reconfigure logging

class MilvusCoinDatabase(BaseCoinDatabase):
    """基于 Milvus 的硬币数据库实现"""
    
    def __init__(self):
        self.collection_name = "coin_features"
        self.dim = 2048  # ResNet50 特征维度
        
        # 初始化连接
        self._connect()

    def _connect(self):
        """建立到 Milvus 的连接"""
        try:
            # 配置 Milvus 连接
            connections.connect(
                alias="default",
                host="127.0.0.1",  # 本地 Milvus 服务地址
                port="19530"        # Milvus 默认端口
            )
            self.client = MilvusClient()
            logging.info("Connected to Milvus")
        except Exception as e:
            logging.error(f"Error connecting to Milvus: {e}")
            raise

    def __del__(self):
        """析构函数，确保关闭连接"""
        try:
            if hasattr(self, 'client'):
                self.client.close()
        except Exception as e:
            print(f"Error closing Milvus connection: {e}")

    def ensure_collection(self):
        """确保集合存在并正确配置"""
        try:
            if not utility.has_collection(self.collection_name):
                # 创建集合
                fields = [
                    FieldSchema(name="id", dtype=DataType.INT64, is_primary=True, auto_id=False),
                    FieldSchema(name="vector", dtype=DataType.FLOAT_VECTOR, dim=self.dim),
                    FieldSchema(name="metadata", dtype=DataType.JSON)
                ]
                schema = CollectionSchema(fields=fields, description="Coin features collection")
                collection = Collection(name=self.collection_name, schema=schema)
                
                # 创建索引
                index_params = {
                    "metric_type": "IP",
                    "index_type": "IVF_FLAT",
                    "params": {"nlist": 128}
                }
                collection.create_index(field_name="vector", index_params=index_params)
                print(f"Created new collection: {self.collection_name}")
            else:
                print(f"Collection {self.collection_name} already exists")
                
            # 加载集合的新方法
            collection = Collection(self.collection_name)
            collection.load()
            print(f"Collection {self.collection_name} is loaded and ready")
            
        except Exception as e:
            print(f"Error ensuring collection: {e}")
            raise

    def add_coin(self, coin_id: str, front_features: np.ndarray, back_features: np.ndarray,
                front_path: Path, back_path: Path, annotations: dict = None):
        # 生成唯一的数字ID
        front_id = abs(int(hash(f"{coin_id}_front") % (2**63)))  # 使用绝对值确保是正数
        back_id = abs(int(hash(f"{coin_id}_back") % (2**63)))
        
        # 插入正面特征
        self.client.insert(
            collection_name=self.collection_name,
            data=[{
                "id": front_id,
                "vector": front_features.tolist(),
                "metadata": json.dumps({
                    "side": "front",
                    "original_id": coin_id
                })
            }]
        )
        
        # 插入背面特征
        self.client.insert(
            collection_name=self.collection_name,
            data=[{
                "id": back_id,
                "vector": back_features.tolist(),
                "metadata": json.dumps({
                    "side": "back",
                    "original_id": coin_id
                })
            }]
        )

    def find_similar_coins(self, query_front_features: np.ndarray, query_back_features: np.ndarray,
                         top_k: int = 5) -> List[Dict[str, Union[str, float, dict]]]:
        try:
            # 搜索正面
            front_results = self.client.search(
                collection_name=self.collection_name,
                data=[query_front_features.tolist()],
                limit=top_k,
                output_fields=["metadata"],
                search_params={
                    "metric_type": "IP",
                    "params": {"nprobe": 10}
                }
            )

            # 搜索背面
            back_results = self.client.search(
                collection_name=self.collection_name,
                data=[query_back_features.tolist()],
                limit=top_k,
                output_fields=["metadata"],
                search_params={
                    "metric_type": "IP",
                    "params": {"nprobe": 10}
                }
            )

            # 合并结果
            results = []
            for front_hit in front_results[0]:
                try:
                    # 从返回的字典结构中正确获取元数据
                    metadata = json.loads(front_hit['entity']['metadata'])
                    if 'side' not in metadata or metadata['side'] != 'front':
                        continue
                        
                    coin_id = metadata.get("original_id")
                    if not coin_id:
                        continue
                    
                    # 查找对应的背面结果
                    back_hit = next(
                        (hit for hit in back_results[0] 
                            if json.loads(hit['entity']['metadata']).get("original_id") == coin_id 
                            and json.loads(hit['entity']['metadata']).get("side") == "back"),
                        None
                    )
                    
                    if not back_hit:
                        continue

                    result = {
                        'coin_id': coin_id,
                        'front_similarity': float(front_hit['distance']),  # 使用 distance 而不是 score
                        'back_similarity': float(back_hit['distance']),
                        'combined_similarity': (float(front_hit['distance']) + float(back_hit['distance'])) / 2,
                    }       
                    results.append(result)
                    
                except Exception as e:
                    print(f"Error processing hit: {e}")
                    continue
                
                # 如果两面匹配的结果不足，添加单面最相似的结果
            if len(results) < top_k:
                # 添加正面最相似的结果
                for front_hit in front_results[0]:
                    try:
                        front_metadata = json.loads(front_hit['entity']['metadata'])
                        if 'side' not in front_metadata or front_metadata['side'] != 'front':
                            continue
                    
                        coin_id = front_metadata.get("original_id")
                    
                        result = {
                            'coin_id': coin_id,
                            'front_similarity': float(front_hit['distance']),
                            'back_similarity': None,
                            'combined_similarity': float(front_hit['distance']/2),
                            'match_type': 'front_only'
                        }
                        results.append(result)
                        
                        if len(results) >= top_k//2:
                            break
                    
                    except Exception as e:
                        print(f"Error processing front-only hit: {e}")
                        continue
                
                # 如果还不够，添加背面最相似的结果
                if len(results) < top_k//2:
                    for back_hit in back_results[0]:
                        try:
                            back_metadata = json.loads(back_hit['entity']['metadata'])
                            if 'side' not in back_metadata or back_metadata['side'] != 'back':
                                continue
                            
                            coin_id = back_metadata.get("original_id")
                            
                            result = {
                                'coin_id': coin_id,
                                'front_similarity': None,
                                'back_similarity': float(back_hit['distance']),
                                'combined_similarity': float(back_hit['distance']/2),
                                'match_type': 'back_only'
                            }
                            results.append(result)
                            
                            if len(results) >= top_k//2:
                                break
                            
                        except Exception as e:
                            print(f"Error processing back-only hit: {e}")
                            continue    
            

            # 按相似度排序
            results.sort(key=lambda x: x['combined_similarity'], reverse=True)
            return results[:top_k]
            
        except Exception as e:
            print(f"Search error: {e}")
            return []

    def save_to_file(self, file_path: str):
        # 保存辅助数据（图片路径和标注）
        print("save_to_file")

    def load_from_file(self, file_path: str):
        print("load_from_file")