import os

from langchain_community.chat_message_histories import SQLChatMessageHistory
from langchain_core.chat_history import InMemoryChatMessageHistory
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableWithMessageHistory, RunnablePassthrough
from langchain_openai import ChatOpenAI
import gradio as gr

os.environ["DASHSCOPE_API_KEY"] = "sk-******"
api_key = os.getenv("DASHSCOPE_API_KEY")
if not api_key:
    raise ValueError("DASHSCOPE_API_KEY environment variable not set!")

llm = ChatOpenAI(model='qwen-plus',
                 api_key=os.getenv("DASHSCOPE_API_KEY"),
                 base_url="https://dashscope.aliyuncs.com/compatible-mode/v1",
                 temperature=0.1)

prompt = ChatPromptTemplate.from_messages([
    #  消息返回的是dict结构时，结合历史记录，需要动态传system_message
    # ("system", "你是一个乐于助人的助手，尽你所能回答所有问题，提供的聊天历史包含与你对话用户的相关信息"),
    ("system", "{system_message}"),
    MessagesPlaceholder(variable_name="chat_history", optional=True),  # 运行时填充实际消息
    # ("placeholder","chat_history"),
    ("user", "{input}")
])

chain = prompt | llm  # 基础链

# 聊天记录存在关系型数据库，或者redis

store = {}  # 用户聊天记录存储，key一个会话存一份


def get_session_history(session_id: str):
    """从关系型数据库中返回当前会话的所有历史消息"""
    return SQLChatMessageHistory(
        session_id=session_id,
        connection_string="mysql+pymysql://root:******@localhost:13306/chat_history?charset=utf8mb4",
        table_name="chat_history_message",
    )


chat_with_message_history = RunnableWithMessageHistory(
    chain,
    get_session_history,
    input_messages_key='input',
    history_messages_key='chat_history',
)


# 提取指定条数的消息列表
def summarize_history(current_input):
    session_id = current_input['config']['configurable']['session_id']
    if not session_id:
        raise ValueError("session_id is required in config.configurable")
    chat_history = get_session_history(session_id)

    stored_messages = chat_history.messages
    # 获取当前会话id的所有历史聊天记录

    if len(stored_messages) >= 2:
        # 截取最后2条消息
        last_two_message = stored_messages[-2:]
        message_to_summarize = stored_messages[:-2]  # 除了最后两条消息外的其他消息

    # 让大模型生成指定消息的摘要
    summarize_prompt = ChatPromptTemplate.from_messages([
        ("system", "请将以下对话历史压缩为一条保留关键信息的摘要信息"),
        MessagesPlaceholder(variable_name="chat_history", Optional=True),
        ("human", "请生成包含上述对话核心内容的摘要，保留重要事实和决策"),
    ])

    summarize_chain = summarize_prompt | llm
    summary_message = summarize_chain.invoke({'chat_history': message_to_summarize})

    # 会删除数据库中的历史记录
    # chat_history.clear()
    # chat_history.add_message(summary_message)
    # for msg in last_two_message:
    #     chat_history.add_message(msg)
    return {
        "original_messages": last_two_message,  # 保留的原始消息
        "summary": summary_message,  # 生成的摘要消息
    }
    return True


# result = chat_with_message_history.invoke({'input': '你好，我是lp'}, config={"configurable": {"session_id": "user123"}})
# print(result)
#
# result1 = chat_with_message_history.invoke({'input': '我的名字叫什么'}, config={"configurable": {"session_id": "user123"}})
# print(result1)

# 将输入数据完整的传入到下一个节点，方法允许在保留原始输入的同时，通过指定键值对将额外的信息添加到传递的数据中
# final_chain = (RunnablePassthrough.assign(message_summarize=summarize_history) | chat_with_message_history)

# input:原来的，messages_summarized=summarize_messages 函数执行的结果
final_chain = (RunnablePassthrough.assign(message_summarized=summarize_history)
               | RunnablePassthrough.assign(
            input=lambda x: x['input'],
            chat_history=lambda x: x['message_summarized']['original_messages'],
            system_message=lambda x: f"你是一个乐于助人的助手，尽你所能回答所有问题，摘要：{x['message_summarized']['summary'].content}" if x[
                'message_summarized'].get('summary') else "无摘要"
        ) | chat_with_message_history)

result = final_chain.invoke({'input': '你好，我是lp', "config": {"configurable": {"session_id": "user123"}}},
                            config={"configurable": {"session_id": "user123"}})
print(result)

result1 = final_chain.invoke({'input': '我的名字叫什么', "config": {"configurable": {"session_id": "user123"}}},
                             config={"configurable": {"session_id": "user123"}})
print(result1)


def add_message(chat_history, user_message):
    if user_message:
        chat_history.append({"role": "user", "content": user_message})
    return chat_history, ''


def execute_chain(chat_history):
    input = chat_history[-1]  # 最新一条消息
    result = final_chain.invoke({'input': input, "config": {"configurable": {"session_id": "user123"}}},
                                config={'configurable': {"session_id": "user123"}})
    chat_history.append({"role": "assistant", "content": result.content})
    return chat_history


with gr.Blocks(title="多聊天机器人", theme=gr.themes.Soft()) as block:
    chatbot = gr.Chatbot(type='messages', height=500, label="Chat with Bot")

    with gr.Row():
        # 文字输入区域
        with gr.Column(scale=4):
            user_input = gr.Textbox(label="Message", placeholder="Enter your message here...", max_lines=5)

            submit_btn = gr.Button("Send", variant="primary")

        with gr.Column(scale=1):
            audio_input = gr.Audio(sources=["microphone"], type="filepath", label="Record Audio", format='wav')

    chat_message = user_input.submit(add_message, [chatbot, user_input], [chatbot, user_input])  # 用户按回车提交消息
    chat_message.then(execute_chain, chatbot, chatbot)  # 执行对话
    submit_btn.click(add_message, [chatbot, user_input], [chatbot, user_input])\
        .then(execute_chain, chatbot, chatbot)  # 用户点击按钮提交消息


if __name__ == '__main__':
    block.launch()
