import json

import uvicorn
from fastapi import FastAPI
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.messages import BaseMessage, messages_from_dict, message_to_dict
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableWithMessageHistory, ConfigurableFieldSpec
from langchain_redis import RedisChatMessageHistory
from langserve import add_routes
from redis import Redis

from langChain.config import model, embedding

redis_url = "redis://:@127.0.0.1:6379/12"


class RedisChatMessageHistory(BaseChatMessageHistory):
    def __init__(self, user_id: str, session_id: str):
        self.user_id = user_id
        self.session_id = session_id
        self.redis_client = Redis.from_url(redis_url)

    @property
    def messages(self):
        store_data = self.redis_client.hget(self.user_id, self.session_id)
        if store_data is None:
            return []
        store_data = json.loads(store_data)
        msgs = messages_from_dict(store_data)
        return msgs

    def add_message(self, message: BaseMessage) -> None:
        history_messages = self.messages
        history_messages.append(message)
        msgs = [message_to_dict(msg) for msg in history_messages]
        values = json.dumps(msgs)
        self.redis_client.hset(self.user_id, self.session_id, values)

    def clear(self) -> None:
        self.redis_client.hdel(self.user_id, self.session_id)


# 1. 创建提示模板 (支持上下文)
prompt = ChatPromptTemplate.from_messages([
    ("system", "你是一个专业的人工智能助手"),
    MessagesPlaceholder(variable_name="history"),  # 历史消息占位符
    ("human", "{input}")
])
# 3. 构建处理链
chain = prompt | model

store = {}


def get_session_history(user_id: str, session_id: str) -> BaseChatMessageHistory:
    """获取或创建文件存储的会话历史"""
    return RedisChatMessageHistory(user_id, session_id)


# 4. 添加历史记录管理
chain_with_history = RunnableWithMessageHistory(
    chain,
    get_session_history,
    input_messages_key="input",
    history_messages_key="history",
    history_factory_config=[
        ConfigurableFieldSpec(id="user_id", annotation=str, name="user_id", description="用户id", default="",
                              is_shared=True, ),
        ConfigurableFieldSpec(id="session_id", annotation=str, name="session_id", description="用户会话id", default="",
                              is_shared=True, ),
    ]
)

# 5. 创建LangServe应用
app = FastAPI(title="聊天机器人", version="1.0")

# 添加路由 (支持流式)
add_routes(
    app,
    chain_with_history,
    path="/chat",
    enable_feedback_endpoint=True,
    enabled_endpoints=("invoke", "batch", "stream", "config_hashes"),
)

if __name__ == "__main__":
    # 启动服务 (访问 http://localhost:8000/chat/playground)
    uvicorn.run(app, host="0.0.0.0", port=8000)
