import { NextRequest, NextResponse } from "next/server";
import { getCurrentUserId } from "@/lib/auth";
import { prisma } from "@/lib/prisma";
import { getModelSettings } from "@/lib/model-settings";
import axios from "axios";
import { searchKnowledgeBase } from "@/lib/vector-search";

export const maxDuration = 300; // 设置最大处理时间

// 自定义流式文本响应类，替代ai库的StreamingTextResponse
class CustomStreamingResponse extends Response {
  constructor(stream: ReadableStream, options?: { status?: number; headers?: HeadersInit }) {
    const { status, headers } = options || {};
    
    super(stream, {
      status: status || 200,
      headers: {
        'Content-Type': 'text/plain; charset=utf-8',
        ...headers,
      },
    });
  }
}

export async function POST(req: NextRequest) {
  try {
    const userId = getCurrentUserId();

    // 验证用户认证
    if (!userId) {
      return NextResponse.json(
        { success: false, error: "未授权" },
        { status: 401 }
      );
    }

    // 获取请求体
    const body = await req.json();
    const { message, chatId, knowledgeBaseId } = body;

    // 验证请求参数
    if (!message || typeof message !== "string") {
      return NextResponse.json(
        { success: false, error: "消息不能为空" },
        { status: 400 }
      );
    }

    if (!knowledgeBaseId) {
      return NextResponse.json(
        { success: false, error: "知识库ID不能为空" },
        { status: 400 }
      );
    }

    // 获取用户
    const user = await prisma.user.findUnique({
      where: { clerkId: userId },
    });

    if (!user) {
      return NextResponse.json(
        { success: false, error: "用户不存在" },
        { status: 404 }
      );
    }

    // 验证知识库是否属于该用户
    const knowledgeBase = await prisma.knowledgeBase.findFirst({
      where: {
        id: knowledgeBaseId,
        userId: user.id,
      },
    });

    if (!knowledgeBase) {
      return NextResponse.json(
        { success: false, error: "知识库不存在或无权访问" },
        { status: 404 }
      );
    }

    // 获取或创建聊天
    let chat;
    if (chatId) {
      // 验证聊天是否属于该知识库
      chat = await prisma.chat.findFirst({
        where: {
          id: chatId,
          knowledgeBaseId,
        },
        include: {
          messages: {
            orderBy: {
              createdAt: "asc",
            },
          },
        },
      });

      if (!chat) {
        return NextResponse.json(
          { success: false, error: "聊天不存在或无权访问" },
          { status: 404 }
        );
      }
    } else {
      // 创建新聊天
      chat = await prisma.chat.create({
        data: {
          knowledgeBaseId,
          title: message.slice(0, 50) + (message.length > 50 ? "..." : ""),
        },
        include: {
          messages: true,
        },
      });
    }

    // 保存用户消息
    const userMessage = await prisma.message.create({
      data: {
        content: message,
        role: "user",
        chatId: chat.id,
      },
    });

    // 获取模型设置
    const modelSettings = await getModelSettings();
    const { chatModel } = modelSettings;

    // 获取聊天历史
    const chatHistory = chat.messages.map((msg: any) => ({
      role: msg.role,
      content: msg.content,
    }));

    // 搜索知识库中的相关内容
    const searchResults = await searchKnowledgeBase(message, knowledgeBaseId);

    // 构建系统提示词
    let systemPrompt = `你是一个智能助手，能够根据提供的知识库内容回答问题。
只回答与知识库内容相关的问题。如果问题不在知识库范围内，请礼貌回复你没有相关信息。
始终用中文回答，除非用户明确要求使用其他语言。
回答时引用相关的知识库内容作为依据。`;

    // 添加知识库内容
    let context = "";
    if (searchResults.length > 0) {
      context = "以下是来自知识库的相关信息：\n\n";
      searchResults.forEach((result, index) => {
        context += `[${index + 1}] ${result.content}\n\n`;
      });
    } else {
      context = "知识库中没有找到与此问题相关的信息。";
    }

    // 创建完整的系统提示
    const fullSystemPrompt = `${systemPrompt}\n\n${context}`;

    // 构建消息数组
    const messages = [
      { role: "system", content: fullSystemPrompt },
      ...chatHistory,
      { role: "user", content: message },
    ];

    // 根据模型类型选择响应方式
    if (chatModel.type === "openai") {
      // 使用OpenAI流式输出
      const response = await fetch(chatModel.apiUrl, {
        method: "POST",
        headers: {
          "Content-Type": "application/json",
          Authorization: `Bearer ${chatModel.apiKey}`,
        },
        body: JSON.stringify({
          model: chatModel.model,
          messages,
          temperature: chatModel.temperature,
          top_p: chatModel.topP,
          max_tokens: chatModel.maxTokens,
          stream: true,
        }),
      });

      // 处理OpenAI流式响应
      const processStream = new TransformStream();
      const writer = processStream.writable.getWriter();
      
      // 创建响应流
      const responseStream = new CustomStreamingResponse(processStream.readable);
      
      // 处理流式数据并保存完整响应
      let fullText = '';
      
      // 启动异步处理流程
      (async () => {
        const reader = response.body?.getReader();
        const decoder = new TextDecoder();
        
        try {
          if (!reader) throw new Error("没有响应流");
          
          while (true) {
            const { done, value } = await reader.read();
            if (done) break;
            
            const chunk = decoder.decode(value, { stream: true });
            
            // 处理OpenAI的SSE格式
            const lines = chunk.split('\n').filter(line => line.trim() !== '');
            
            for (const line of lines) {
              if (line.startsWith('data: ')) {
                const data = line.slice(6);
                
                if (data === '[DONE]') continue;
                
                try {
                  const json = JSON.parse(data);
                  const content = json.choices[0]?.delta?.content || '';
                  
                  if (content) {
                    fullText += content;
                    await writer.write(new TextEncoder().encode(content));
                  }
                } catch (e) {
                  console.error('解析OpenAI流数据错误:', e);
                }
              }
            }
          }
        } catch (error) {
          console.error('处理OpenAI流错误:', error);
        } finally {
          // 关闭流并保存完整响应
          writer.close();
          
          try {
            if (fullText) {
              await prisma.message.create({
                data: {
                  content: fullText,
                  role: "assistant",
                  chatId: chat.id,
                },
              });
            }
          } catch (saveError) {
            console.error('保存响应错误:', saveError);
          }
        }
      })();
      
      return responseStream;
    } else {
      // 自定义模型请求
      const response = await axios.post(
        chatModel.apiUrl,
        {
          model: chatModel.model,
          messages,
          temperature: chatModel.temperature,
          top_p: chatModel.topP,
          max_tokens: chatModel.maxTokens,
        },
        {
          headers: {
            "Content-Type": "application/json",
            Authorization: `Bearer ${chatModel.apiKey}`,
          },
          responseType: "stream",
        }
      );

      // 创建自定义流
      const customStream = new ReadableStream({
        async start(controller) {
          let fullText = '';
          
          try {
            for await (const chunk of response.data) {
              const text = chunk.toString();
              fullText += text;
              controller.enqueue(new TextEncoder().encode(text));
            }
            
            controller.close();
            
            // 保存完整的响应
            await prisma.message.create({
              data: {
                content: fullText,
                role: "assistant",
                chatId: chat.id,
              },
            });
          } catch (error) {
            console.error("流处理或保存回复错误:", error);
            controller.close();
          }
        }
      });

      // 返回流式响应
      return new CustomStreamingResponse(customStream);
    }
  } catch (error) {
    console.error("聊天处理错误:", error);
    return NextResponse.json(
      { success: false, error: "处理聊天请求时出错" },
      { status: 500 }
    );
  }
} 