import uvicorn
from fastapi import FastAPI
from langchain_community.chat_models import ChatTongyi
from langchain_core.chat_history import BaseChatMessageHistory
from langchain_core.messages import message_to_dict, messages_from_dict
from langchain_core.output_parsers import StrOutputParser
from langserve import add_routes
import os
import json
from pathlib import Path
from langchain_core.prompts import ChatPromptTemplate, MessagesPlaceholder
from langchain_core.runnables import RunnableWithMessageHistory

# 创建历史记录存储目录
HISTORY_DIR = Path("chat_histories")
HISTORY_DIR.mkdir(exist_ok=True)


# 文件存储的聊天历史类
class FileChatMessageHistory(BaseChatMessageHistory):
    def __init__(self, session_id: str):
        self.session_id = session_id
        self.file_path = HISTORY_DIR / f"{session_id}.json"
        # 如果文件不存在则创建空文件
        if not self.file_path.exists():
            with open(self.file_path, "w") as f:
                json.dump([], f)

    @property
    def messages(self):
        """从文件加载消息历史"""
        with open(self.file_path, "r") as f:
            data = json.load(f)
        return messages_from_dict(data)

    def add_message(self, message):
        """添加消息到历史记录并保存到文件"""
        current_messages = self.messages
        current_messages.append(message)
        self._save_messages(current_messages)

    def clear(self):
        """清空历史记录"""
        self._save_messages([])

    def _save_messages(self, messages):
        """保存消息到文"""
        with open(self.file_path, "w") as f:
            json.dump([message_to_dict(msg) for msg in messages], f, indent=2)


# 模型
model = ChatTongyi(
    model_name="qwen-turbo",  # 可选："qwen-plus"、"qwen-max"等
    temperature=0.7,  # 控制生成多样性
    api_key=os.getenv("DASHSCOPE_API_KEY"),
    streaming=True  # 启用流式输出
)

# 1. 创建提示模板 (支持上下文)
prompt = ChatPromptTemplate.from_messages([
    ("system", "你是一个专业的人工智能助手"),
    MessagesPlaceholder(variable_name="history"),  # 历史消息占位符
    ("human", "{input}")
])
# 3. 构建处理链
chain = prompt | model

store = {}


def get_session_history(session_id: str) -> BaseChatMessageHistory:
    """获取或创建文件存储的会话历史"""
    return FileChatMessageHistory(session_id)


# 4. 添加历史记录管理
chain_with_history = RunnableWithMessageHistory(
    chain,
    get_session_history,
    input_messages_key="input",
    history_messages_key="history"
)

# 5. 创建LangServe应用
app = FastAPI(title="聊天机器人", version="1.0")

# 添加路由 (支持流式)
add_routes(
    app,
    chain_with_history,
    path="/chat",
    enable_feedback_endpoint=True,
    enabled_endpoints=("invoke", "batch", "stream", "config_hashes"),
)


# 添加管理端点
@app.get("/sessions", tags=["会话管理"], summary="列出所有会话ID")
def list_sessions():
    """列出所有会话ID"""
    return [f.stem for f in HISTORY_DIR.glob("\*.json")]


@app.delete("/sessions/{session_id}", tags=["会话管理"], summary="删除会话以及历史记录")
def delete_session(session_id: str):
    """删除指定会话历史"""
    file_path = HISTORY_DIR / f"{session_id}.json"
    if file_path.exists():
        file_path.unlink()
        return {"status": "deleted", "session_id": session_id}
    return {"status": "not_found"}


if __name__ == "__main__":
    # 启动服务 (访问 http://localhost:8000/chat/playground)
    uvicorn.run(app, host="0.0.0.0", port=8000)
