package com.example.isoftlangchainai.streamchat;

import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.TokenStream;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

import java.util.concurrent.CompletableFuture;

import static java.util.concurrent.TimeUnit.SECONDS;

/**
 * @Description:
 * @Date :2025/7/14 16:41
 * @Author :chenjun
 */
@RestController
@RequestMapping("/stream")
public class StreamChatController {
    @Value("${langchain4j.open-ai.chat-model.api-key}")
    protected String apikey;
    @Value("${langchain4j.open-ai.chat-model.model-name}")
    protected String modelName;
    @Value("${langchain4j.open-ai.chat-model.base-url}")
    protected String baseUrl;

    //定义AiService代理
    interface Assistant {
        TokenStream chat(String message);
    }

    interface FluxAssistant {
        Flux<String> chat(String message);
    }

    /**
     *
     * [描述]: 聊天，流式响应
     */
    @GetMapping ("/chat")
    public String getChat() {
        try {
            //模型生成器-ModelBuilder,设置模型的每个可用参数
            //完整的模型参数设置 https://github.com/langchain4j/langchain4j-spring/blob/main/langchain4j-open-ai-spring-boot-starter/src/main/java/dev/langchain4j/openai/spring/AutoConfig.java
            StreamingChatModel model = OpenAiStreamingChatModel.builder()
                    .baseUrl(baseUrl)
                    .apiKey(apikey)
                    .modelName(modelName)
                    .build();

            //1-lambda 表达式写法
//            model.chat("Tell me a joke", onPartialResponseAndError(System.out::print, Throwable::printStackTrace));

            //2-显示写法
//            String userMessage = "Tell me a joke";
//            model.chat(userMessage, new StreamingChatResponseHandler() {
//                @Override
//                public void onPartialResponse(String partialResponse) {
//                    System.out.println("onPartialResponse: " + partialResponse);
//                }
//                @Override
//                public void onCompleteResponse(ChatResponse completeResponse) {
//                    System.out.println("onCompleteResponse: " + completeResponse);
//                }
//                @Override
//                public void onError(Throwable error) {
//                    error.printStackTrace();
//                }
//            });

            Assistant assistant = AiServices.create(Assistant.class, model);
            TokenStream tokenStream = assistant.chat("Tell me a joke");
            CompletableFuture<ChatResponse> futureResponse = new CompletableFuture<>();
            tokenStream.onPartialResponse(System.out::print)
                    .onCompleteResponse(futureResponse::complete)
                    .onError(futureResponse::completeExceptionally)
                    .start();
            ChatResponse chatResponse = futureResponse.get(60, SECONDS);
            System.out.println("\n" + chatResponse.aiMessage().text());
        }catch (Exception e) {
            System.out.println("聊天-流式响应异常,原因:" + e.getMessage());
        }
        return "success";
    }

    /**
     *
     * [描述]: 聊天，流式响应
     */
    @GetMapping ("/fluxchat")
    public String fluxchat() {
        try {
            //模型生成器-ModelBuilder,设置模型的每个可用参数
            //完整的模型参数设置 https://github.com/langchain4j/langchain4j-spring/blob/main/langchain4j-open-ai-spring-boot-starter/src/main/java/dev/langchain4j/openai/spring/AutoConfig.java
            StreamingChatModel model = OpenAiStreamingChatModel.builder()
                    .baseUrl(baseUrl)
                    .apiKey(apikey)
                    .modelName(modelName)
                    .build();
            FluxAssistant fluxAssistant = AiServices.create(FluxAssistant.class, model);
            Flux<String> chatStream = fluxAssistant.chat("Tell me a joke");
            chatStream.toStream().forEach(System.out::println);
        }catch (Exception e) {
            System.out.println("聊天-流式响应异常,原因:" + e.getMessage());
        }
        return "success";
    }
}
