from typing import TypedDict
from pydantic import BaseModel, Field
from langgraph.graph import StateGraph
from src.common.logger import getLogger
from langgraph.constants import START, END
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.output_parsers import StrOutputParser

logger = getLogger()

class SearchState(TypedDict):
    query: str
    document: str
    answer: str

class SearchAgent:

    def __init__(self, llm_model, tool):
        self.llm_model = llm_model
        self.tool = tool

    def execute_node(self, state: SearchState):
        logger.info("SearchAgent execute_node start")
        query = state["query"]
        result = self.tool.func(query)
        logger.info(f"SearchAgent execute_node result len: {len(result)}")
        return { "document": result }

    def generate_node(self, state: SearchState):
        logger.info("SearchAgent generate_node start")
        generate_template = """
            你是一位问题回答高手，根据提供的上下文和用户的输入问题回答问题。
            
            上下文：{context}
            用户输入的问题：{question}
            
            必须用中文详尽的回答。
        """
        generate_prompt = ChatPromptTemplate.from_template(generate_template)
        generate_chain = generate_prompt | self.llm_model | StrOutputParser()
        generate_result = generate_chain.invoke({ "context": state["document"], "question": state["query"] })
        logger.info(f"SearchAgent generate_node generate_result len: {len(generate_result)}")
        return { "answer": generate_result }

    def build_graph(self):
        logger.info("SearchAgent build_graph start")
        graph = StateGraph(SearchState)
        graph.add_node("execute", self.execute_node)
        graph.add_node("generate", self.generate_node)

        graph.add_edge(START, "execute")
        graph.add_edge("execute", "generate")
        graph.add_edge("generate", END)
        return graph.compile()

    def invoke(self, query):
        logger.info(f"SearchAgent invoke query: {query}")
        workflow = self.build_graph()
        response = workflow.invoke({ "query": query })
        logger.info(f"SearchAgent invoke response len: {len(response)}")
        return { "document": response.get("report", None), "answer": response.get("answer", None) }
