package com.zy;

import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;

import java.time.Duration;
import java.util.concurrent.CompletableFuture;

/**
 * @program: AI_langchain4j
 * @description: 流式响应
 * @author: zy
 * @create: 2025-06-28 15:38
 */
public class _07_Streaming {

    public static void main(String[] args) {
        String apiKey = System.getenv("OPEN_AI_KEY");

        OpenAiStreamingChatModel model = OpenAiStreamingChatModel.builder()
                .apiKey(apiKey)
                .modelName("deepseek-chat")
                .baseUrl(   "https://api.deepseek.com"    )    //deepseek的API地址
                .logRequests(true)
               // .logResponses(true)
                .timeout(   Duration.ofSeconds(60) )
                .build();

        CompletableFuture<ChatResponse> responseFuture = new CompletableFuture<>();
        String prompt="请告诉我什么是RAG";   //这是普通 提示词，没有用模板
        model.chat(prompt, new StreamingChatResponseHandler() {
            @Override
            public void onPartialResponse(String s) {
                System.out.print(s);    //输出到控制台
            }

            @Override
            public void onCompleteResponse(ChatResponse chatResponse) {
                System.out.println("Done streaming:"+ chatResponse.toString());
                responseFuture.complete(   chatResponse      );
            }

            @Override
            public void onError(Throwable throwable) {
                System.out.println("Error streaming");
                throwable.printStackTrace();
                responseFuture.completeExceptionally(   throwable );   //异常时的处理
            }
        });
        responseFuture.join();  // 相当于 thread.join()
    }
}
