package com.example.langchanin4jdemo1.controller;

import dev.langchain4j.community.model.dashscope.QwenStreamingChatModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;

import java.util.concurrent.TimeUnit;

public class StreamDemo {
    public static void main(String[] args) {
        StreamingChatLanguageModel model = QwenStreamingChatModel.builder()
                .apiKey("sk-875dd6ef14244431acdc7ccb974f5bfe")
                .modelName("qwen-plus")
                .build();
        model.chat("你好，你是谁？", new StreamingChatResponseHandler() {
            @Override
            public void onPartialResponse(String token) {
                System.out.println(token);
                try {
                    TimeUnit.SECONDS.sleep(1);
                } catch (InterruptedException e) {
                    throw new RuntimeException(e);
                }
            }

            @Override
            public void onCompleteResponse(ChatResponse chatResponse) {
                System.out.println(chatResponse.aiMessage().text());
            }

            @Override
            public void onError(Throwable throwable) {
                System.out.println("出错了");
            }
        });
    }
}
