"""
研究核心模块依赖注入
管理研究核心模块的依赖关系，减少循环导入
"""

from typing import Optional, Any
from functools import lru_cache

# 组件缓存
_llm_instance = None
_search_tool_instance = None
_kg_tool_instance = None
_prompts = {}

@lru_cache(maxsize=1)
def get_llm():
    """获取LLM实例"""
    from src.research_core.model import get_llm
    return get_llm()

@lru_cache(maxsize=1)
def get_search_tool():
    """获取搜索工具实例"""
    from src.tools.search_tool import MultimodalSearchTool
    return MultimodalSearchTool()

@lru_cache(maxsize=1)
def get_kg_tool():
    """获取知识图谱工具实例"""
    from src.tools.knowledge_graph_tool import query_knowledge_graph
    return query_knowledge_graph

def get_prompt(prompt_name: str):
    """获取提示模板"""
    if prompt_name in _prompts:
        return _prompts[prompt_name]
    
    if prompt_name == "search":
        from src.research_core.prompts import SEARCH_PROMPT
        _prompts[prompt_name] = SEARCH_PROMPT
        return SEARCH_PROMPT
    elif prompt_name == "reflection":
        from src.research_core.prompts import REFLECTION_PROMPT
        _prompts[prompt_name] = REFLECTION_PROMPT
        return REFLECTION_PROMPT
    elif prompt_name == "final_answer":
        from src.research_core.prompts import FINAL_ANSWER_PROMPT
        _prompts[prompt_name] = FINAL_ANSWER_PROMPT
        return FINAL_ANSWER_PROMPT
    else:
        raise ValueError(f"未知的提示模板: {prompt_name}")

def get_research_dependencies():
    """获取研究模块的所有依赖"""
    return {
        "llm": get_llm(),
        "search_tool": get_search_tool(),
        "kg_tool": get_kg_tool(),
        "search_prompt": get_prompt("search"),
        "reflection_prompt": get_prompt("reflection"),
        "final_answer_prompt": get_prompt("final_answer")
    }