package com.yc;

import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;

import java.util.concurrent.CompletableFuture;

import static java.time.Duration.ofSeconds;

public class _07_Streaming {
    public static void main(String[] args) {
        String apiKey=System.getenv("DEEPSEEK_API_KEY");
        OpenAiStreamingChatModel model = OpenAiStreamingChatModel.builder()
                .apiKey(apiKey)
                .modelName("deepseek-chat")
                .baseUrl("https://api.deepseek.com")
                .logRequests(true)
                .logResponses(true)
                .timeout(ofSeconds(60))
                .build();

        CompletableFuture<ChatResponse> future=new CompletableFuture<>();
        String prompt="请告诉我什么是RAG";
        model.chat(prompt,new StreamingChatResponseHandler() {
            @Override
            public void onPartialResponse(String s) {
                System.out.println(s);
            }
            @Override
            public void onCompleteResponse(ChatResponse chatResponse) {
            System.out.println("Done Streaming"+chatResponse.toString());
            future.complete(chatResponse);
            }

            @Override
            public void onError(Throwable throwable) {
                System.out.println("Error Streaming");
                throwable.printStackTrace();
                future.completeExceptionally(throwable);
            }
        });
        future.join();
    }
}
