"""Chain for question-answering against a vector database."""
from __future__ import annotations

import inspect
import warnings
from abc import abstractmethod
from typing import Any, Dict, List, Optional

from langchain.chains.retrieval_qa.base import RetrievalQA, BaseRetrievalQA
from langchain_core.callbacks import (
    AsyncCallbackManagerForChainRun,
    CallbackManagerForChainRun,
    Callbacks,
)
from langchain_core.documents import Document
from langchain_core.language_models import BaseLanguageModel
from langchain_core.prompts import PromptTemplate
from langchain_core.pydantic_v1 import Extra, Field, root_validator
from langchain_core.retrievers import BaseRetriever
from langchain_core.runnables import RunnableConfig
from langchain_core.vectorstores import VectorStore

from langchain.chains.base import Chain
from langchain.chains.combine_documents.base import BaseCombineDocumentsChain
from langchain.chains.combine_documents.stuff import StuffDocumentsChain
from langchain.chains.llm import LLMChain
from langchain.chains.question_answering import load_qa_chain
from langchain.chains.question_answering.stuff_prompt import PROMPT_SELECTOR
# 配置的模版
PROMPT = PromptTemplate.from_template("""
        你是一个对用户友好的bot，汇总上下文信息回答用户的提问，如果涉及内容较多，可以使用数字列表清晰化表达。
        上下文信息：{context}
        用户提问：{question}
        """)
# 代码实际使用的模版
current_template: str = None


class RetrievalQAChain(RetrievalQA):
    """
    从写了父类的_call方法， 令返回值可以序列化
    """

    def _call(
            self,
            inputs: Dict[str, Any],
            run_manager: Optional[CallbackManagerForChainRun] = None,
    ) -> Dict[str, Any]:
        response = super()._call(inputs, run_manager=run_manager)
        response['source_documents'] = [doc.page_content for doc in response['source_documents']]
        global current_template
        response['template'] = current_template.template
        return response

    async def _acall(
            self,
            inputs: Dict[str, Any],
            run_manager: Optional[AsyncCallbackManagerForChainRun] = None,
    ) -> Dict[str, Any]:
        response = super()._acall(inputs, run_manager)
        response['source_documents'] = [doc.page_content for doc in response['source_documents']]
        global current_template
        response['template'] = current_template.template
        return response

    @classmethod
    def from_llm(cls, llm: BaseLanguageModel, prompt: Optional[PromptTemplate] = None, callbacks: Callbacks = None,
                 llm_chain_kwargs: Optional[dict] = None, **kwargs: Any) -> BaseRetrievalQA:
        global current_template
        current_template = prompt or PROMPT

        return super().from_llm(llm=llm, prompt=current_template, callbacks=callbacks,
                                llm_chain_kwargs=llm_chain_kwargs,
                                **kwargs)
