import { OpenAI } from "openai";

const openaiApiKey =
  process.env.OPENAI_API_KEY || "sk-389235a18aed49a79223c594a4aed719" // "sk-fc6fc4cce0df4f72bb0a886fc2cb21bc"; // || "sk-5e9ae7bf8b1d4df3b915d834344c0451";
const openaiApiBase =
  process.env.OPENAI_API_BASE ||
  "https://dashscope.aliyuncs.com/compatible-mode/v1";

export interface LLMOptions {
  model: string;
  messages: any[];
  stream?: boolean;

  [key: string]: any;
}

export const llm = async (options: LLMOptions) => {
  try {
    const client = new OpenAI({
      apiKey: openaiApiKey,
      baseURL: openaiApiBase,
    });
    return await client.chat.completions.create(options);
  } catch (error) {
    console.error("调用LLM异常：", error);
    return null;
  }
};
