from typing import List, Dict, Union, Any, Optional

from langchain.chains.combine_documents import create_stuff_documents_chain
from langchain_core.callbacks import CallbackManagerForChainRun
from langchain_core.messages import AIMessage
from langchain_core.runnables import RunnableConfig

from ai_engine.car_wrap.prompts import CHAT_COMPLETION_PROMPT
from ai_engine.common.ai_common import trace_context
from ai_engine.core.base.base_chain import BaseChain
from ai_engine.core.model.chat import CompletionRequest
from ai_engine.core.vectorstore.vector_data.knowledge.knowledge_vs import KnowledgeVs


def get_ai_message(ai_message: AIMessage) -> Union[str, List[Union[str, Dict]]]:
    if isinstance(ai_message, AIMessage):
        return ai_message.content
    else:
        raise TypeError("响应参数不是AIMessage类型")


def modify_prompt_flg(prompt: str, is_modify_prompt: bool) -> bool:
    """判断是否需要走改写链"""
    if is_modify_prompt:
        return True
    return False


def check_prompt_keyword(prompt: str, keywords: []) -> bool:
    """判断问题是否包含关键字"""
    if any(keyword in prompt for keyword in keywords):
        return True
    return False


class ChatCompletionChain(BaseChain):
    # 知识库操作
    knowledge_vs: KnowledgeVs
    request: CompletionRequest

    def _call(
            self,
            inputs: Dict[str, Any],
            run_manager: Optional[CallbackManagerForChainRun] = None,
    ) -> Dict[str, Any]:
        _run_manager = run_manager or CallbackManagerForChainRun.get_noop_manager()
        question = inputs["question"]
        # 设置调用跟踪request_id
        if self.request.request_id:
            trace_context.set(self.request.request_id)
        docs = self.knowledge_vs.similarity_search(question, self.request, _run_manager)
        output: Dict[str, Any] = {}

        # 设置上下文参数
        new_inputs = inputs.copy()
        new_inputs["context"] = docs
        qa_chain = create_stuff_documents_chain(self.llm, CHAT_COMPLETION_PROMPT.PROMPT)
        answer = qa_chain.invoke(input=new_inputs,
                                 config=RunnableConfig(callbacks=_run_manager.get_child()))
        output[self.output_key] = answer
        return output
