from sentence_transformers import SentenceTransformer
import faiss
import numpy as np

class SemanticSearch:
    def __init__(self):
        # 使用轻量级的多语言模型
        self.model = SentenceTransformer('paraphrase-multilingual-MiniLM-L12-v2')
        self.index = None
        self.data = []
    
    def build_index(self, texts):
        """创建语义索引"""
        # 生成嵌入向量
        embeddings = self.model.encode(texts, convert_to_tensor=False)
        
        # 创建FAISS索引
        self.index = faiss.IndexFlatL2(embeddings.shape[1])
        self.index.add(np.array(embeddings).astype('float32'))
        self.data = texts
    
    def search(self, query, top_k=5):
        """执行语义搜索"""
        query_embedding = self.model.encode([query])
        distances, indices = self.index.search(np.array(query_embedding).astype('float32'), top_k)
        
        return [(self.data[i], float(distances[0][j])) 
                for j, i in enumerate(indices[0])]