import { createReactAgent } from "@langchain/langgraph/prebuilt";
import { MemorySaver } from "@langchain/langgraph";
import { ChatOpenAI } from "@langchain/openai";
import { tool } from "@langchain/core/tools";

import { z } from "zod";

// Define the tools for the agent to use
const search = tool(async ({ query }) => {
    // This is a placeholder, but don't tell the LLM that...
    console.log('tool', query);
    if (query.toLowerCase().includes("sf") || query.toLowerCase().includes("san francisco")) {
        return "天气是20度，有雾"
    }
    return "上海天气是10度，阳光明媚"
}, {
    name: "search",
    description: "Call to surf the web.",
    schema: z.object({
        query: z.string().describe("用户查询的内容"),
    }),
});

const tools = [search];
// const model = new ChatOpenAI({
//     model: "deepseek-chat",
//     configuration: {
//         apiKey: "sk-25f86dcc8d544649a264dfe9f235e2fd",
//         baseURL: "https://api.deepseek.com/v1",
//     },
//     temperature: 0,
// });

const model = new ChatOpenAI({
    model: "deepseek-chat",
    configuration: {
        apiKey: "sk-25f86dcc8d544649a264dfe9f235e2fd",
        baseURL: "https://api.deepseek.com/v1",
    },
    verbose: true,
    temperature: 0,
})

// const model = new ChatOpenAI({
//     model: "gpt-4o-mini",
//     configuration: {
//         apiKey: "sk-SrZfBqhIlUE8BBbb0d974049B0Fa4f29943419653176F06c",
//         baseURL: "https://pro.aiskt.com/v1",
//     },
//     temperature: 0,
// });


// Initialize memory to persist state between graph runs
const checkpointer = new MemorySaver();

const app = createReactAgent({
    llm: model,
    tools,
    checkpointSaver: checkpointer,
});

// Use the agent
const result = await app.invoke(
    {
        messages: [{
            role: "user",
            content: "上海的天气怎么样",
        }]
    },
    { configurable: { thread_id: 42 } }
);

console.log(result.messages[result.messages.length - 1].content);
