# 安装依赖 pip install langchain_community
import os
import bs4
from langchain.chains.combine_documents import create_stuff_documents_chain
from langchain.chains.history_aware_retriever import create_history_aware_retriever
from langchain.chains.retrieval import create_retrieval_chain
from langchain_chroma import Chroma

from langchain_community.document_loaders import WebBaseLoader
from langchain_community.chat_message_histories import ChatMessageHistory
from langchain_community.embeddings import OpenAIEmbeddings
from langchain_community.tools import TavilySearchResults
from langchain_core.messages import HumanMessage
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableWithMessageHistory
from langchain_openai import AzureChatOpenAI, AzureOpenAIEmbeddings
from langchain_text_splitters import RecursiveCharacterTextSplitter
from langgraph.prebuilt import chat_agent_executor

os.environ["LANGCHAIN_TRACING_V2"] = "true"
os.environ["LANGCHAIN_API_KEY"] = "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"

os.environ["AZURE_OPENAI_ENDPOINT"] = "http://menshen.test.xdf.cn"
# os.environ["OPENAI_API_BASE"] = "http://menshen.test.xdf.cn"
os.environ["OPENAI_API_KEY"] = "c8575027653b42b1b47747f0b4ab135b"
os.environ["OPENAI_API_TYPE"] = "azure"
os.environ["OPENAI_API_VERSION"] = "2023-05-15"

llm = AzureChatOpenAI(
    deployment_name="gpt-4o",
    model_name="gpt-4o",
    temperature=0
)

# 获取网页内容
loader = WebBaseLoader(
    web_path="https://api-docs.deepseek.com/zh-cn/",
    bs_kwargs=dict(
        parse_only=bs4.SoupStrainer(class_='row')
    )
)
docs = loader.load()

# 文本切割
splitter = RecursiveCharacterTextSplitter(chunk_size=1000, chunk_overlap=100)
split_docs = splitter.split_documents(docs)

# 向量化存储
vector_store = Chroma.from_documents(split_docs, embedding=AzureOpenAIEmbeddings())
retriever = vector_store.as_retriever()

system_prompt = """
    使用检索器进行内容搜索,如果没有搜索到，你就说:我他妈的不知道
    {context}
"""

prompt = ChatPromptTemplate.from_messages(
    [
        ('system', system_prompt),
        MessagesPlaceholder("chat_history"),
        ('human', '{input}'),
    ]
)

chain1 = create_stuff_documents_chain(llm, prompt)

history_message = """
    把历史聊天内容做为上下文，回答问题
"""

history_prompt = ChatPromptTemplate.from_messages(
    [
        ('system', history_message),
        MessagesPlaceholder("chat_history"),
        ('human', '{input}'),
    ]
)

# 创建子链 我试了一下即使不用 history_retriever 也能正常执行，后来知道了create_history_aware_retriever
# 是为了让检索器也能感知到历史聊天记录，检索时会把历史聊天记录放到上下文中，让检索更加准确
history_retriever = create_history_aware_retriever(llm, retriever, history_prompt)

# 保持问答的历史记录
store = {}


def get_session_history(session_id: str):
    if session_id not in store:
        store[session_id] = ChatMessageHistory()
    return store[session_id]


# 创建父链
chain = create_retrieval_chain(history_retriever, chain1)

result_chain = RunnableWithMessageHistory(
    chain,
    get_session_history,
    input_messages_key="input",
    history_messages_key="chat_history",
    output_messages_key="answer",
)

config = {'configurable': {'session_id': 'zs123'}}

# 第一轮对话
resp = result_chain.invoke(
    {"input": "deepseek API 是什么"},
    config
)
print(resp['answer'])

# 第二轮对话
resp = result_chain.invoke(
    {"input": "我上个问题问了什么"},
    {'configurable': {'session_id': 'zs123'}}
)
print(resp['answer'])
