package cn.edu.lingnan.utils;

import cn.edu.lingnan.controllerV1.ChatDto;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Component;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import reactor.core.publisher.Flux;

import java.io.IOException;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.CountDownLatch;
import java.util.concurrent.TimeUnit;
import java.util.function.Consumer;

@Slf4j
@Component
public class AIChatUtils {

    @Autowired
    private OpenAiChatModel chatModel;

    /**
     * AI生成试卷专用接口，不用复用，流失输出使用下面的方法
     * @param promptText
     * @return
     */
    public String processChat(String promptText) {
        // 创建响应内容容器
        final StringBuilder fullResponse = new StringBuilder();

        // 创建同步锁（确保异步流处理完成）
        final CountDownLatch latch = new CountDownLatch(1);

        Prompt prompt = new Prompt(new UserMessage(promptText));

        // 订阅流式响应
        chatModel.stream(prompt).subscribe(
                response -> {
                    String content = response.getResult().getOutput().getContent();
                    fullResponse.append(content);
                    //System.out.print(content); // 实时打印片段
                },
                error -> {
                    System.err.println("\n处理异常，已接收内容：\n" + fullResponse);
                    error.printStackTrace();
                    latch.countDown();
                },
                () -> {
//                    System.out.println("\n\n完整响应内容：\n" + fullResponse);
                    latch.countDown();
                }
        );

        try {
            // 等待流处理完成（最大等待5分钟）
            latch.await(5, TimeUnit.MINUTES);
        } catch (InterruptedException e) {
            System.err.println("处理被中断，当前内容：\n" + fullResponse);
            Thread.currentThread().interrupt();
        }

        return fullResponse.toString();
    }

    /**
     * 流式输出，需要客户端支持
     * @param
     * @return
     */
    // 修改后的AIChatUtils.java部分
    public SseEmitter streamChat(String prompt, Consumer<String> completionCallback) {
        SseEmitter emitter = new SseEmitter(60_000L);
        StringBuilder fullContent = new StringBuilder(); // 新增内容收集器



        // 使用CompletableFuture异步处理
        CompletableFuture.runAsync(() -> {
            Prompt promptObj = new Prompt(new UserMessage(prompt));
            chatModel.stream(promptObj).subscribe(
                    response -> {
                        String chunk = response.getResult().getOutput().getContent();
                        fullContent.append(chunk);
                        try {
                            emitter.send(SseEmitter.event()
                                    .data(chunk)
                                    .id(String.valueOf(System.currentTimeMillis())));
                        } catch (IOException e) {
                            emitter.completeWithError(e);
                        }
                    },
                    emitter::completeWithError,
                    () -> {
                        completionCallback.accept(fullContent.toString());
                        emitter.complete();
                    }
            );
        });

        return emitter;
    }
}