package com.koicarp.agent.example.chatmodel;

import dev.langchain4j.data.message.AiMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.ollama.OllamaChatModel;

/**
 * @Author: liuxia
 * @CreateTime: 2025/9/12 下午9:43
 * @Description: 多次聊天test
 */
public class OllamaChatMultipleChatTest {
    public static void main(String[] args) {
        String modelName = "deepseek-r1:7b";          // 填入你刚刚下载的模型
        String baseUrl = "http://localhost:11434";     // ollama启动后的ip:port
        ChatModel model = OllamaChatModel.builder()
                .baseUrl(baseUrl)
                .modelName(modelName)
                .build();
        UserMessage helloUserMsg = UserMessage.from("你好，你现在是我的朋友koi");
        ChatResponse res1 = model.chat(helloUserMsg);
        AiMessage helloAiMsg = res1.aiMessage();
        System.out.println("ai答1：" + helloAiMsg);

        UserMessage nameMsg = UserMessage.from("你是谁?");
        ChatResponse res2 = model.chat(helloUserMsg, helloAiMsg, nameMsg);
        AiMessage nameAiMsg = res2.aiMessage();
        System.out.println("ai答2：" + nameAiMsg);
    }
}
