import { ChatOpenAI } from "@langchain/openai";

const ollama = "http://127.0.0.1:11434/v1";

export const qwen = new ChatOpenAI({
    temperature: 0,
    model: "qwen2.5:14b",
    verbose: false,
    configuration: {
        apiKey: "test",
        baseURL: ollama
    }
})
//

    export const gpt4 = new ChatOpenAI({
    model: "gpt-4o",
    configuration: {
        apiKey: "sk-SrZfBqhIlUE8BBbb0d974049B0Fa4f29943419653176F06c",
        baseURL: "https://pro.aiskt.com/v1",
    },
    temperature: 0,
});

export const v3 = new ChatOpenAI({
    model: "internlm/internlm2_5-20b-chat",
    configuration: {
        apiKey: "sk-abpnenisaziafgwrytngwzprwytuqmytwkjdwpcyjjaekpbt",
        baseURL: "https://api.siliconflow.cn/v1",
    },
    temperature: 0,
});

export function getModel(name: 'qwen2.5:7b' | 'deepseek-r1:7b' | 'ishumilin/deepseek-r1-coder-tools:7b') {
    return new ChatOpenAI({
        temperature: 0,
        model: name,
        configuration: {
            apiKey: "test",
            verbose: false,
            baseURL: ollama
        }
    })
}

export function getDeepseek() {
   return  new ChatOpenAI({
    model: "deepseek-chat",
    configuration: {
        apiKey: "sk-25f86dcc8d544649a264dfe9f235e2fd",
        baseURL: "https://api.deepseek.com/v1",
    },
    temperature: 0,
});
}