/**
 * 七鱼平台支持的模型配置
 */

// 模型配置
const models = [
  {
    name: "通义千问",
    id: "qwen3-235b-a22b",
    endpoint: "https://qianfan.baidubce.com/v2/chat/completions",
    description: "通义千问",
  },
  {
    name: "ERNIE-Bot-turbo",
    id: "ernie-bot-turbo",
    endpoint:
      "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant",
    description: "百度自行研发的大语言模型轻量版，响应速度更快，适合对话类场景",
  },
  {
    name: "ERNIE-Bot-4",
    id: "ernie-bot-4",
    endpoint:
      "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions_pro",
    description:
      "百度最新的大语言模型，拥有更强的理解和创作能力，支持更长的上下文",
  },
  {
    name: "BLOOMZ-7B",
    id: "bloomz-7b",
    endpoint:
      "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/bloomz_7b1",
    description: "基于BLOOM架构的开源大语言模型，支持多语言",
  },
  {
    name: "Llama-2-7B-Chat",
    id: "llama-2-7b",
    endpoint:
      "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/llama_2_7b",
    description: "Meta发布的Llama2模型，适合对话场景",
  },
  {
    name: "Llama-2-13B-Chat",
    id: "llama-2-13b",
    endpoint:
      "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/llama_2_13b",
    description: "Meta发布的Llama2模型，参数量更大，理解和生成能力更强",
  },
  {
    name: "Llama-2-70B-Chat",
    id: "llama-2-70b",
    endpoint:
      "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/llama_2_70b",
    description: "Meta发布的Llama2最大规模模型，拥有强大的理解和生成能力",
  },
  {
    name: "Qianfan-BLOOMZ-7B-compressed",
    id: "qianfan-bloomz-7b-compressed",
    endpoint:
      "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/qianfan_bloomz_7b_compressed",
    description:
      "千帆平台优化的BLOOMZ模型压缩版，在保持效果的同时提升了推理速度",
  },
  {
    name: "ChatGLM2-6B-32K",
    id: "chatglm2-6b-32k",
    endpoint:
      "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/chatglm2_6b_32k",
    description: "支持32K上下文的ChatGLM2模型，适合长文本理解和生成",
  },
];

/**
 * 获取支持的模型列表
 * @returns {Array} 模型列表
 */
export function getSupportedModels() {
  return models.map((model) => ({
    name: model.name,
    id: model.id,
    description: model.description,
  }));
}

/**
 * 根据模型ID获取API端点
 * @param {string} modelId 模型ID
 * @returns {string} API端点URL
 */
export function getModelEndpoint(modelId) {
  const model = models.find((m) => m.id === modelId);
  if (!model) {
    throw new Error(`Unsupported model: ${modelId}`);
  }
  return model.endpoint;
}

/**
 * 检查模型是否支持
 * @param {string} modelId 模型ID
 * @returns {boolean} 是否支持
 */
export function isModelSupported(modelId) {
  return models.some((m) => m.id === modelId);
}

export default models;
