import { TavilySearchResults } from "@langchain/community/tools/tavily_search";
import  {ChatOpenAI} from "@langchain/openai";
import {MemorySaver} from "@langchain/langgraph";
import  {HumanMessage} from "@langchain/core/messages";
import { createReactAgent} from "@langchain/langgraph/prebuilt";

import fs from 'fs';

function saveArrayBufferAsImage(arrayBuffer, filename) {
    const buffer = Buffer.from(arrayBuffer);
    fs.writeFileSync(filename, buffer);
}


const agentTools = [new TavilySearchResults({maxResults: 3, apiKey: "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"})]

const agentModel = new ChatOpenAI({
    temperature: 0,
    model: "qwen2.5",
    configuration: {
        apiKey: "test",
        verbose: true,
        baseURL: "http://192.168.3.94:11434/v1"
    }
})

// const agentModel = new ChatOpenAI({
//     model: "gpt-4o-mini",
//     configuration: {
//         apiKey: "sk-SrZfBqhIlUE8BBbb0d974049B0Fa4f29943419653176F06c",
//         baseURL: "https://pro.aiskt.com/v1",
//     },
//     temperature: 0,
// });

// const agentModel = new ChatOpenAI({
//     temperature: 0,
//     model: "qwen2.5",
//     configuration: {
//         apiKey: "test",
//         verbose: true,
//         baseURL: "http://192.168.3.94:11434/v1"
//     }
// })
//

const agentCheckpointer = new MemorySaver();
const agent = createReactAgent({
    llm: agentModel,
    tools: agentTools,
    checkpointSaver: agentCheckpointer
})



const agentFinalState = await agent.invoke({
    messages: [new HumanMessage("上海的天气怎么样？")]
}, {
    configurable: {
        thread_id: "142"
    }
})


console.log(agentFinalState.messages)
//
// const agentNextState = await agent.invoke({
//     message: [new HumanMessage("那么南京呢？")],
// }, {
//     configurable: {
//         thread_id: "42"
//     }
// })
//
// console.log('~~~~~~~')
//
// console.log(agentNextState.messages)
