from langchain.tools import BaseTool
from pydantic import BaseModel, Field
from typing import Type,Optional
from langchain_community.document_loaders import DirectoryLoader
from langchain_community.vectorstores import Chroma
from langchain_ollama import OllamaEmbeddings
from tools.log_handler import logger
import yaml
from config.PathConfig import OLLAMA_CONFIG

# 定义输入模型
class LocalSearchInput(BaseModel):
    query: str = Field(..., description="本地文档搜索的查询内容")

# 从 yaml 文件加载配置
with open(OLLAMA_CONFIG, "r", encoding="utf-8") as f:
    config = yaml.safe_load(f)

# 本地文档搜索工具
class LocalSearchTool(BaseTool):
    name: str = "local_search"
    description: str = "从本地文档知识库检索信息，适用于需要本地数据的查询"
    args_schema: Type[BaseModel] = LocalSearchInput

    # 显式声明实例属性
    embeddings: Optional[OllamaEmbeddings] = None
    vectorstore: Optional[Chroma] = None

    def __init__(self):
        super().__init__()
        # 初始化嵌入和向量存储
        try:
            self.embeddings = OllamaEmbeddings(model=config["ollama"]["EMBED_MODEL"],
                                               base_url=config["ollama"]["host"])
            
            loader = DirectoryLoader("data/docs", glob="*.txt")
            docs = loader.load()
            self.vectorstore = Chroma.from_documents(docs, self.embeddings)
            logger.info("LocalSearchTool 初始化完成，文档加载成功")
        except Exception as e:
            logger.error(f"LocalSearchTool 初始化失败: {e}")
            raise

    def _run(self, query: str) -> str:
        """执行本地文档搜索并返回结果"""
        try:
            results = self.vectorstore.similarity_search(query, k=3)
            logger.info(f"文档检索结果：{results}")
            return "\n".join([doc.page_content for doc in results])
        except Exception as e:
            logger.error(f"文档检索失败: {e}")
            return "检索出错"