# api/chat.py
import uuid

from fastapi import APIRouter, WebSocket
from typing import Generator
import asyncio
import json
import time


from langchain.chains.summarize.map_reduce_prompt import prompt_template
from openai import OpenAI
from sqlalchemy import func
from sqlalchemy.ext.asyncio import async_session
from sqlmodel import select

from app.api.deps import SessionDep
from app.models.debate_models import DebateTurn
from app.models.user_models import User

router = APIRouter()
client = OpenAI(base_url="http://10.2.8.77:3000/v1", api_key="sk-93nWYhI8SrnXad5m9932CeBdDeDf4233B21d93D217095f22")

# 提示词模板
prompt_template = """
你是一个辩手。
你的任务是根据下面给定的已知信息与用户进行辩论。

已知信息：
{context}  # 检索到的原始文档

用户发言：
{query}  # 用户的提问/发言

辩论时请使用中文。
"""


def build_prompt(prompt_template, **kwargs):  # 提示词模板填充赋值
    # 创建空字典存储处理后的输入参数
    inputs = {}
    # 遍历所有关键字参数（k=参数名，v=参数值）
    for k, v in kwargs.items():
        # 检查当前参数值是否为纯字符串列表
        if isinstance(v, list) and all(isinstance(elem, str) for elem in v):
            # 如果是字符串列表，用双换行符连接列表元素（创建段落分隔效果）
            val = '\n\n'.join(v)
        else:
            # 否则，就保持其他类型值不变（数字、布尔值、对象等）
            val = v
        # 将处理后的值存入字典（保持原始参数名称作为键）
        inputs[k] = val
    # 使用字符串格式化方法，用处理后的参数替换模板中的占位符
    return prompt_template.format(**inputs)


async def llm_streamer(messages: list) -> Generator[str, None, None]:
    # print(messages)
    response = client.chat.completions.create(
        model="DeepSeek-R1",
        messages=messages,
        stream=True
    )

    for chunk in response:
        if chunk.choices[0].delta.content:
            yield chunk.choices[0].delta.content
            await asyncio.sleep(0.1)


@router.websocket("/stream/{session_id}")
async def websocket_chat(websocket: WebSocket, session_id: str, session: SessionDep):
    await websocket.accept()

    try:
        while True:
            data = await websocket.receive_text()
            message = json.loads(data)

            # 发送开始信号
            await websocket.send_json({
                "type": "start",
                "content": ""
            })
            ai_response = ""

            #根据session_id查询本次会话中的会话记录，并按顺序加入到messages中传入大模型
            stmt = select(DebateTurn).where(
                DebateTurn.session_id == uuid.UUID(session_id)
            ).order_by(DebateTurn.turn_index)
            result = session.exec(stmt)
            history_turns = result.all()

            messages = [{"role": "system", "content": "你是一个辩论教练，请对user内容进行深度点评，并结合上下文"}]
            for turn in history_turns:
                role = "assistant" if turn.speaker == "ai" else "user"
                messages.append({
                    "role": role,
                    "content": turn.content
                })
            #加入本轮用户的输入
            messages.append({
                "role": "user",
                "content": message["message"]
            })

            # 流式生成响应
            async for chunk in llm_streamer(messages):
                ai_response += chunk  # 收集完整回复
                await websocket.send_json({
                    "type": "stream",
                    "content": chunk
                })

            # 发送结束信号
            await websocket.send_json({
                "type": "end",
                "content": ""
            })
            stmt = select(func.count()).select_from(DebateTurn).where(DebateTurn.session_id == uuid.UUID(session_id))
            result = session.execute(stmt)
            turn_count = result.scalar_one()  # 当前已有的轮数
            next_index = turn_count  # 新的 user 是第 next_index 条

        # 保存聊天记录
            user_turn = DebateTurn(
                session_id=uuid.UUID(session_id),
                speaker="user",
                content=message["message"],
                turn_index=next_index
            )
            session.add(user_turn)

            ai_turn = DebateTurn(
                session_id=uuid.UUID(session_id),
                speaker="ai",
                content=ai_response,
                turn_index=next_index + 1
            )
            session.add(ai_turn)
            session.commit()

    except Exception as e:
        await websocket.send_json({
            "type": "error",
            "content": f"处理错误: {str(e)}"
        })
    finally:
        await websocket.close()




