package com.koicarp.agent.example.chatmemery;

import com.koicarp.agent.example.ChatModelInit;
import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.openai.OpenAiChatModelName;
import dev.langchain4j.model.openai.OpenAiTokenCountEstimator;
import dev.langchain4j.store.memory.chat.InMemoryChatMemoryStore;

/**
 * @Author: liuxia
 * @CreateTime: 2025/9/13 下午10:43
 * @Description: tokens窗口聊天缓存test
 */
public class TokenWindowChatMemoryTest {

    public static void main(String[] args) {
        ChatModel model = ChatModelInit.initOpenAi();
        OpenAiTokenCountEstimator estimator = new OpenAiTokenCountEstimator(OpenAiChatModelName.GPT_4);
        TokenWindowChatMemory memory = TokenWindowChatMemory.builder()
                .maxTokens(1000, estimator)
                .chatMemoryStore(new InMemoryChatMemoryStore())   //内存缓存存储类
                .build();
        UserMessage oneMsg = UserMessage.from("你好，你现在是我的朋友koi");
        memory.add(oneMsg);

        AiMessage oneAiMsg = model.chat(memory.messages()).aiMessage();
        System.out.println("ai回答1： " + oneAiMsg.text());

        UserMessage twoMsg = UserMessage.from("你是谁?");
        memory.add(twoMsg);

        AiMessage twoAiMsg = model.chat(memory.messages()).aiMessage();
        System.out.println("ai回答2： " + twoAiMsg.text());

        UserMessage threeMsg = UserMessage.from("你是叫koi吗");
        memory.add(threeMsg);

        AiMessage threeAiMsg = model.chat(memory.messages()).aiMessage();
        System.out.println("ai回答3： " + threeAiMsg.text());
    }
}
