import { NextRequest, NextResponse } from "next/server";
import { auth } from "@clerk/nextjs";
import { prisma } from "@/lib/prisma";
import OpenAI from "openai";
import { StreamResponseEventType } from "@/types";
import { DEFAULT_SYSTEM_PROMPT } from "@/lib/constants";

// 创建OpenAI客户端
const openai = new OpenAI({
  apiKey: process.env.OPENAI_API_KEY,
});

// 向量相似度搜索函数（简化版，实际项目中需要实现向量搜索）
async function searchRelevantDocuments(knowledgeBaseId: string, query: string) {
  // 这里应该实现基于向量的相似度搜索
  // 简化版：随机返回一些文档内容作为上下文
  const documents = await prisma.document.findMany({
    where: {
      knowledgeBaseId,
    },
    take: 3, // 限制返回3个文档
  });

  return documents.map((doc) => ({
    id: doc.id,
    name: doc.name,
    content: doc.content.slice(0, 500) + "...", // 截取内容前500个字符
  }));
}

// 流式聊天完成
export async function POST(
  req: NextRequest,
  { params }: { params: { id: string } }
) {
  try {
    const { userId } = auth();

    // 检查用户认证
    if (!userId) {
      return NextResponse.json(
        { success: false, error: "未授权" },
        { status: 401 }
      );
    }

    const { id } = params;
    const body = await req.json();
    const { message } = body;

    // 验证请求数据
    if (!message || typeof message !== "string") {
      return NextResponse.json(
        { success: false, error: "消息内容是必填项" },
        { status: 400 }
      );
    }

    // 检查聊天是否存在
    const chat = await prisma.chat.findUnique({
      where: { id },
      include: {
        knowledgeBase: true,
        messages: {
          orderBy: {
            createdAt: "asc",
          },
          take: 10, // 获取最近10条消息
        },
      },
    });

    if (!chat) {
      return NextResponse.json(
        { success: false, error: "聊天不存在" },
        { status: 404 }
      );
    }

    // 检查是否是当前用户的聊天
    const user = await prisma.user.findUnique({
      where: { clerkId: userId },
    });

    if (!user || chat.knowledgeBase.userId !== user.id) {
      return NextResponse.json(
        { success: false, error: "无权访问此聊天" },
        { status: 403 }
      );
    }

    // 保存用户消息
    const userMessage = await prisma.message.create({
      data: {
        content: message,
        role: "user",
        chatId: id,
      },
    });

    // 搜索相关文档
    const relevantDocs = await searchRelevantDocuments(
      chat.knowledgeBaseId,
      message
    );

    // 构建上下文
    const context = relevantDocs
      .map((doc) => `文档: ${doc.name}\n内容: ${doc.content}`)
      .join("\n\n");

    // 构建消息历史
    const messageHistory = chat.messages.map((msg) => ({
      role: msg.role === "user" ? "user" : "assistant",
      content: msg.content,
    }));

    // 添加用户的新消息
    messageHistory.push({
      role: "user",
      content: message,
    });

    // 准备创建流式响应
    const encoder = new TextEncoder();
    const customReadable = new ReadableStream({
      async start(controller) {
        try {
          // 开始事件
          controller.enqueue(
            encoder.encode(
              `data: ${JSON.stringify({
                type: StreamResponseEventType.Start,
                messageId: "temp-id", // 实际项目中应该是真实的消息ID
              })}\n\n`
            )
          );

          // 创建OpenAI流式响应
          const stream = await openai.chat.completions.create({
            model: "gpt-3.5-turbo",
            messages: [
              {
                role: "system",
                content: `${DEFAULT_SYSTEM_PROMPT}\n\n以下是知识库的相关内容:\n${context}`,
              },
              ...messageHistory,
            ],
            stream: true,
          });

          let assistantResponse = "";

          // 处理流式响应
          for await (const chunk of stream) {
            const content = chunk.choices[0]?.delta?.content || "";
            if (content) {
              assistantResponse += content;
              // 发送token事件
              controller.enqueue(
                encoder.encode(
                  `data: ${JSON.stringify({
                    type: StreamResponseEventType.Token,
                    token: content,
                  })}\n\n`
                )
              );
            }
          }

          // 保存助手回复
          await prisma.message.create({
            data: {
              content: assistantResponse,
              role: "assistant",
              chatId: id,
            },
          });

          // 结束事件
          controller.enqueue(
            encoder.encode(
              `data: ${JSON.stringify({
                type: StreamResponseEventType.End,
              })}\n\n`
            )
          );
          
          controller.close();
        } catch (error) {
          console.error("流式响应错误:", error);
          // 错误事件
          controller.enqueue(
            encoder.encode(
              `data: ${JSON.stringify({
                type: StreamResponseEventType.Error,
                error: "生成响应时出错",
              })}\n\n`
            )
          );
          controller.close();
        }
      },
    });

    // 返回流式响应
    return new Response(customReadable, {
      headers: {
        "Content-Type": "text/event-stream",
        "Cache-Control": "no-cache",
        Connection: "keep-alive",
      },
    });
  } catch (error) {
    console.error("聊天完成请求失败:", error);
    return NextResponse.json(
      { 
        success: false, 
        error: "聊天完成请求失败",
        details: error instanceof Error ? error.message : String(error),
      },
      { status: 500 }
    );
  }
} 