import { ChatOpenAI } from "@langchain/openai";

type IProps = {
  modelName?: string;
  temperature?: number;
  maxTokens?: number;
  apiKey?: string;
  baseURL?: string;
}
// 导出模型实例  优先用api 调用时传入的，没有则默认用环境变量
export const useModel = ({ temperature, modelName, maxTokens, apiKey, baseURL }: IProps) => {
  const config = {
    modelName: process.env.MODEL_NAME || "qwen-max",
    temperature: parseFloat(process.env.TEMPERATURE || "0.7"),
    maxTokens: parseInt(process.env.MAX_TOKENS || "2000"),
    apiKey: process.env.DASHSCOPE_API_KEY || "",
    baseURL: "https://dashscope.aliyuncs.com/compatible-mode/v1",
  };
  const model = new ChatOpenAI({
    modelName: modelName ?? config.modelName,
    // temperature 越低， llm 会越忠于事实，减少自己的自由发挥
    temperature: temperature ?? config.temperature,
    maxTokens: maxTokens ?? config.maxTokens,
    apiKey: apiKey ?? config.apiKey,
    configuration: {
      baseURL: baseURL ?? config.baseURL,
    },
  });
  return model;
};
