import { NextRequest, NextResponse } from "next/server";
import { readFileSync } from "fs";
import * as path from "path";

// 超时设置（毫秒）
const TIMEOUT = 60000;

// 聊天处理函数
export async function POST(req: NextRequest) {
  try {
    // 解析请求
    const body = await req.json();
    const { messages, streamingEnabled = true } = body;
    
    // 验证消息数组
    if (!messages || !Array.isArray(messages) || messages.length === 0) {
      return NextResponse.json({ error: "消息数组无效或为空" }, { status: 400 });
    }

    // 获取API设置
    let apiEndpoint, model;
    try {
      // 读取设置文件
      const settingsPath = path.resolve(process.cwd(), "settings.json");
      const settingsData = readFileSync(settingsPath, "utf8");
      const settings = JSON.parse(settingsData);
      
      // 使用完整的API端点URL，确保末尾没有多余的斜杠
      apiEndpoint = settings.apiEndpoint || "http://localhost:11434/api/chat";
      if (apiEndpoint.endsWith("/")) {
        apiEndpoint = apiEndpoint.slice(0, -1);
      }
      
      // 确保API端点包含/api/chat路径
      if (!apiEndpoint.endsWith("/api/chat")) {
        // 如果是基础URL，添加/api/chat
        if (apiEndpoint.endsWith("/api")) {
          apiEndpoint = `${apiEndpoint}/chat`;
        } else if (!apiEndpoint.includes("/api/")) {
          apiEndpoint = `${apiEndpoint}/api/chat`;
        }
      }
      
      model = settings.model || "deepseek-r1:1.5b";
    } catch (error) {
      console.error("加载设置失败，使用默认值:", error);
      apiEndpoint = "http://localhost:11434/api/chat";
      model = "deepseek-r1:1.5b";
    }
    
    console.log(`使用模型: ${model}，流式显示: ${streamingEnabled}`);
    console.log(`API端点: ${apiEndpoint}`);
    
    // 构建Ollama请求
    const ollamaRequest = {
      model,
      messages,
      stream: streamingEnabled,
      options: {
        temperature: 0.7,
        top_p: 0.9,
      }
    };
    
    // 处理流式响应
    if (streamingEnabled) {
      // 创建转换流处理Ollama响应
      const stream = new TransformStream();
      const writer = stream.writable.getWriter();
      
      try {
        // 发送请求到Ollama API
        const response = await fetch(apiEndpoint, {
          method: "POST",
          headers: {
            "Content-Type": "application/json",
          },
          body: JSON.stringify(ollamaRequest),
          signal: AbortSignal.timeout(TIMEOUT),
        });
        
        if (!response.ok) {
          throw new Error(`Ollama API错误: ${response.status} ${response.statusText}`);
        }
        
        // 确保响应体存在
        const responseBody = response.body;
        if (!responseBody) {
          throw new Error("响应体为空");
        }
        
        // 将Ollama的响应直接传递给客户端，不做任何处理
        console.log("开始转发Ollama流式响应");
        
        // 直接转发流
        (async () => {
          try {
            const reader = responseBody.getReader();
            
            while (true) {
              const { done, value } = await reader.read();
              
              if (done) {
                console.log("Ollama流结束");
                break;
              }
              
              // 直接写入chunk，不做任何处理
              await writer.write(value);
            }
          } catch (streamError) {
            console.error("流处理错误:", streamError);
            
            try {
              // 错误时发送错误信息
              const errorMsg = JSON.stringify({
                error: streamError instanceof Error ? streamError.message : "处理流时出错"
              });
              await writer.write(new TextEncoder().encode(errorMsg));
            } catch (writeError) {
              console.error("写入错误响应失败:", writeError);
            }
          } finally {
            try {
              await writer.close();
            } catch (closeError) {
              console.error("关闭写入器错误:", closeError);
            }
          }
        })();
        
        // 返回流式响应
        return new NextResponse(stream.readable, {
          headers: {
            "Content-Type": "application/json",
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
          },
        });
      } catch (error) {
        // 关闭写入器，返回错误
        try {
          console.error("流式请求失败:", error);
          const errorMsg = JSON.stringify({
            error: error instanceof Error ? error.message : "请求Ollama API时出错"
          });
          await writer.write(new TextEncoder().encode(errorMsg));
          await writer.close();
        } catch (closeError) {
          console.error("关闭写入器错误:", closeError);
        }
        
        return new NextResponse(stream.readable, {
          headers: {
            "Content-Type": "application/json",
            "Cache-Control": "no-cache",
            "Connection": "keep-alive",
          },
        });
      }
    } else {
      // 非流式响应处理
      try {
        // 发送请求到Ollama API
        const response = await fetch(apiEndpoint, {
          method: "POST",
          headers: {
            "Content-Type": "application/json",
          },
          body: JSON.stringify({
            ...ollamaRequest,
            stream: false,
          }),
          signal: AbortSignal.timeout(TIMEOUT),
        });
        
        if (!response.ok) {
          throw new Error(`Ollama API错误: ${response.status} ${response.statusText}`);
        }
        
        // 处理标准响应
        const data = await response.json();
        return NextResponse.json({ message: data.message });
      } catch (error) {
        console.error("标准请求失败:", error);
        return NextResponse.json(
          { error: error instanceof Error ? error.message : "请求Ollama API时出错" },
          { status: 500 }
        );
      }
    }
  } catch (error) {
    console.error("处理请求时出错:", error);
    return NextResponse.json(
      { error: error instanceof Error ? error.message : "处理请求时出错" },
      { status: 500 }
    );
  }
} 