package com.xp.ai.chat.openai;

import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;

import java.util.concurrent.TimeUnit;


/**
 * OpenAI 流式模型API 调用
 */
public class OpenAiStreamChat {
    public static void main(String[] args) {
        OpenAiStreamingChatModel chatModel = OpenAiStreamingChatModel.builder()
                .apiKey("demo")
                .modelName("gpt-4o-mini")
                .build();

        chatModel.chat("你好，你是谁？", new StreamingChatResponseHandler() {
            @Override
            public void onPartialResponse(String partialResponse) {
                System.out.println(partialResponse);
                try {
                    TimeUnit.SECONDS.sleep(1L);
                } catch (InterruptedException e) {
                    throw new RuntimeException(e);
                }
            }

            @Override
            public void onCompleteResponse(ChatResponse completeResponse) {

            }

            @Override
            public void onError(Throwable error) {
                System.out.println(error);
            }
        });
    }
}
