import { OpenAIChatModels } from "@/utils/OpenAI";
import type { NextApiRequest, NextApiResponse } from "next";
import { OpenAIApi, Configuration } from "openai";

export default async function handler(
  req: NextApiRequest,
  res: NextApiResponse
) {

  try {

    // Get the list of models
    const models = ['gpt-4','gpt-3.5-turbo-16k-0613','gpt-3.5']

    // Get the models that can interface with the chat API and return
    const chatModels = [
      {
        "id": "gpt-3.5-turbo-16k-0613",
        "name": "GPT-3.5 Turbo (16k, 06/13) ",
        "maxLimit": 16384
      },
      {
        "id": "gpt-3.5-turbo",
        "name": "GPT-3.5 Turbo",
        "maxLimit": 4096
      },{
        "id": "gpt-4",
        "name": "GPT-4",
        "maxLimit": 8192
      },
    ]

    return res.status(200).json({
      models,
      chatModels,
    });
  } catch (e: any) {
    if (e.response) {
      return res.status(e.response.status).json({ error: e.response.data });
    }

    return res.status(500).json({ error: e.message });
  }
}
