package org.example.offical.doc.ai.service;

import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import org.example.offical.doc.ModelUtils;

/**
 * @author superMan
 * @since fish_temp_since
 */
public class StreamingAiServiceDemo {
    public static void main(String[] args) {
        StreamingChatLanguageModel openAiStreamingDemoModel = ModelUtils.getOpenAiStreamingDemoModel();

        TokenStream tokenStream = AiServices.create(Assistant.class, openAiStreamingDemoModel)
                .chat("介绍一下你自己");

        tokenStream.onPartialResponse(System.out::println)
                .onCompleteResponse(chatResponse -> System.out.println("chatResponse = " + chatResponse))
                .onError(throwable -> System.out.println("throwable = " + throwable))
                .onToolExecuted(toolExecution -> System.out.println("toolExecution = " + toolExecution))
                .onRetrieved(content -> System.out.println("content = " + content))
                .start();
    }


    interface Assistant {
        TokenStream chat(String message);
    }
}
