package com.koicarp.agent.example.chatmodel;

import dev.langchain4j.data.message.ChatMessage;
import dev.langchain4j.data.message.UserMessage;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;

import java.util.ArrayList;
import java.util.List;
import java.util.concurrent.CompletableFuture;

/**
 * @Author: liuxia
 * @CreateTime: 2025/9/12 下午9:44
 * @Description: 流式聊天test
 */
public class OllamaChatStreamChatTest {
    public static void main(String[] args) {
        String modelName = "deepseek-r1:7b";          // 填入你刚刚下载的模型
        String baseUrl = "http://localhost:11434";     // ollama启动后的ip:port
        StreamingChatModel model = OllamaStreamingChatModel.builder()
                .baseUrl(baseUrl)
                .modelName(modelName)
                .build();

        CompletableFuture<ChatResponse> futureResponse = new CompletableFuture<>();
        UserMessage helloUserMsg = UserMessage.from("你好，你现在是我的朋友koi");
        List<ChatMessage> messages = new ArrayList<>();
        messages.add(helloUserMsg);
        model.chat(messages, new StreamingChatResponseHandler() {
            @Override
            public void onPartialResponse(String partialResponse) {
                // 这里将持续接收ai回复的部分内容
                System.out.print(partialResponse);
            }

            @Override
            public void onCompleteResponse(ChatResponse completeResponse) {
                // 当ai回复完会调用该方法
                futureResponse.complete(completeResponse);
            }

            @Override
            public void onError(Throwable error) {
                // 当发生异常调用该方法
                futureResponse.completeExceptionally(error);
            }
        });
        futureResponse.join();
    }
}
