import { createParser } from "eventsource-parser";
import { NextRequest } from "next/server";
import { requestOpenai } from "../common";

async function createStream(req: NextRequest) {
  const encoder = new TextEncoder();
  const decoder = new TextDecoder();
  const res = await requestOpenai(req);
  if (!res.ok) {
    const text = await res.text();
    if (text.includes("invalid_api_key")) {
      return "错误代码180822：秘钥错误";
    } else if (text.includes("context_length_exceeded")) {
      return "错误代码180822：上下文传输过长，清理记录后再提问";
    } else if (text.includes("rate_limit_reached")) {
      return "错误代码180822：同时访问人数过多，请等待一会再提问";
    } else if (text.includes("access_terminated")) {
      return "错误代码180822：秘钥被封禁";
    } else if (text.includes("no_api_key")) {
      return "错误代码180822：秘钥错误";
    } else if (text.includes("insufficient_quota")) {
      return "错误代码180822：秘钥余额不足";
    } else if (text.includes("account_deactivated")) {
      return "错误代码180822：账户被禁用";
    } else if (text.includes("model_overloaded")) {
      return "错误代码180822：OpenAI模型超负荷，请重新发起请求";
    } else if (!text) {
      return "错误代码180822：OpenAI服务器访问超时或未知类型错误";
    } else {
      return "错误代码180822：OpenAI服务器故障" + text;
    }
  }

  const stream = new ReadableStream({
    async start(controller) {
      function onParse(event: any) {
        if (event.type === "event") {
          const data = event.data;
          // https://beta.openai.com/docs/api-reference/completions/create#completions/create-stream
          if (data === "[DONE]") {
            controller.close();
            return;
          }
          try {
            const json = JSON.parse(data);
            const text = json.choices[0].delta.content;
            const queue = encoder.encode(text);
            controller.enqueue(queue);
          } catch (e) {
            console.log("[catch]", e);
            controller.error(e);
          }
        }
      }

      const parser = createParser(onParse);
      for await (const chunk of res.body as any) {
        parser.feed(decoder.decode(chunk));
      }
    },
  });
  return stream;
}

export async function POST(req: NextRequest) {
  console.log("token", req.headers.get("token"));
  try {
    const stream = await createStream(req);
    return new Response(stream);
  } catch (error) {
    console.error("[Chat Stream]", error);
  }
}

export const config = {
  runtime: "edge",
};
