package com.langchain4jspringboot.controller;

import com.langchain4jspringboot.config.AiConfig;
import dev.langchain4j.community.model.dashscope.QwenChatModel;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.response.ChatResponse;
import dev.langchain4j.model.chat.response.StreamingChatResponseHandler;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.service.TokenStream;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

/**
 * @author Lsc
 * @date 2025/3/27 10:44
 */
@RestController
@RequestMapping(value = "/langChain4j", produces = "application/json;charset=UTF-8")
public class LangChain4jController {

    private static final Logger log = LoggerFactory.getLogger(LangChain4jController.class);

    @Autowired
    private QwenChatModel qwenChatModel;

    @Autowired
    private OpenAiChatModel openAiChatModel;

    @Autowired
    private OpenAiStreamingChatModel aiStreamingChatModel;

    // 记忆对话
    @Autowired
    private AiConfig.Assistant assistant;

    @Autowired
    private AiConfig.AssistantUnique assistantUnique;

//    @Autowired
//    private ChatLanguageModel qwenChatModel;

    /**
     * 测试qwen系列模型
     * @param message
     * @return
     */
    @RequestMapping("/test")
    public String test(@RequestParam String message){
        long startTime = System.currentTimeMillis();
        log.info("调用Qwen模型接口，请求参数: {}", message);
        
        if (message.isBlank()){
            message = "你好";
            log.info("请求参数为空，使用默认值: {}", message);
        }
        
        try {
            String chat = qwenChatModel.chat(message);
            log.info("Qwen模型响应成功，耗时: {}ms", System.currentTimeMillis() - startTime);
            return chat;
        } catch (Exception e) {
            log.error("Qwen模型调用异常: {}", e.getMessage(), e);
            throw e;
        }
    }

    /**
     * 测试openai系列模型  比如硅基流动 , deepseek官网
     * @param message
     * @return
     */
    @RequestMapping("/openai")
    public String openai(@RequestParam String message){
        long startTime = System.currentTimeMillis();
        log.info("调用OpenAI模型接口，请求参数: {}", message);
        
        if (message.isBlank()){
            message = "你好";
            log.info("请求参数为空，使用默认值: {}", message);
        }
        
        try {
            String chat = openAiChatModel.chat(message);
            log.info("OpenAI模型响应成功，耗时: {}ms", System.currentTimeMillis() - startTime);
            return chat;
        } catch (Exception e) {
            log.error("OpenAI模型调用异常: {}", e.getMessage(), e);
            throw e;
        }
    }

    /**
     * 测试流失输出
     */
    @RequestMapping("/stream")
    public Flux<String> stream(@RequestParam String message){
        long startTime = System.currentTimeMillis();
        log.info("调用OpenAI流式输出接口，请求参数: {}", message);
        
        if (message.isBlank()) {
            message = "你好";
            log.info("请求参数为空，使用默认值: {}", message);
        }
        
        final String finalMessage = message;
        
        Flux<String> objectFlux = Flux.create(sink -> {
            try {
                aiStreamingChatModel.chat(finalMessage, new StreamingChatResponseHandler() {
                    @Override
                    public void onPartialResponse(String s) {
                        sink.next(s);
                        log.debug("流式响应部分内容: {}", s);
                    }

                    @Override
                    public void onCompleteResponse(ChatResponse chatResponse) {
                        sink.complete();
                        log.info("流式响应完成，耗时: {}ms", System.currentTimeMillis() - startTime);
                    }

                    @Override
                    public void onError(Throwable throwable) {
                        log.error("流式响应异常: {}", throwable.getMessage(), throwable);
                        sink.error(throwable);
                    }
                });
            } catch (Exception e) {
                log.error("流式响应初始化异常: {}", e.getMessage(), e);
                sink.error(e);
            }
        });
        return objectFlux;
    }

    /**
     * 记忆对话
     */
    @RequestMapping("/memoryChat")
    public String memory(@RequestParam String message){
        long startTime = System.currentTimeMillis();
        log.info("调用记忆对话接口，请求参数: {}", message);
        
        if (message.isBlank()){
            message = "你好";
            log.info("请求参数为空，使用默认值: {}", message);
        }
        
        try {
            String response = assistant.chat(message);
            log.info("记忆对话响应成功，耗时: {}ms", System.currentTimeMillis() - startTime);
            return response;
        } catch (Exception e) {
            log.error("记忆对话调用异常: {}", e.getMessage(), e);
            throw e;
        }
    }

    /**
     * 记忆对话流式输出
     */
    @RequestMapping("/memoryStream")
    public Flux<String> memoryStream(@RequestParam String message) {
        long startTime = System.currentTimeMillis();
        log.info("调用记忆对话流式输出接口，请求参数: {}", message);
        
        if (message.isBlank()) {
            message = "你好";
            log.info("请求参数为空，使用默认值: {}", message);
        }
        
        final String finalMessage = message;
        
        try {
            TokenStream stream = assistant.stream(finalMessage);
            log.debug("记忆对话流式输出初始化成功");
            
            Flux<String> objectFlux = Flux.create(sink -> {
                stream.onPartialResponse(chatResponse -> {
                    sink.next(chatResponse);
                    log.debug("记忆对话流式响应部分内容: {}", chatResponse);
                })
                .onCompleteResponse(chatResponse -> {
                    sink.complete();
                    log.info("记忆对话流式响应完成，耗时: {}ms", System.currentTimeMillis() - startTime);
                })
                .onError(error -> {
                    log.error("记忆对话流式响应异常: {}", error.getMessage(), error);
                    sink.error(error);
                })
                .start();
            });
            return objectFlux;
        } catch (Exception e) {
            log.error("记忆对话流式输出初始化异常: {}", e.getMessage(), e);
            return Flux.error(e);
        }
    }

    /**
     * 隔离记忆对话流式输出
     */
    @RequestMapping("/memoryUniqueStream")
    public Flux<String> memoryUniqueStream(@RequestParam int memoryId, @RequestParam String message) {
        long startTime = System.currentTimeMillis();
        log.info("调用隔离记忆对话流式输出接口，memoryId: {}, 请求参数: {}", memoryId, message);
        
        if (message.isBlank()){
            message = "你好";
            log.info("请求参数为空，使用默认值: {}", message);
        }
        
        final String finalMessage = message;
        
        try {
            TokenStream stream = assistantUnique.stream(memoryId, finalMessage);
            log.debug("隔离记忆对话流式输出初始化成功，memoryId: {}", memoryId);
            
            Flux<String> objectFlux = Flux.create(sink -> {
                stream.onPartialResponse(chatResponse -> {
                    sink.next(chatResponse);
                    log.debug("隔离记忆对话流式响应部分内容: {}", chatResponse);
                })
                .onCompleteResponse(chatResponse -> {
                    sink.complete();
                    log.info("隔离记忆对话流式响应完成，memoryId: {}, 耗时: {}ms", memoryId, System.currentTimeMillis() - startTime);
                })
                .onError(error -> {
                    log.error("隔离记忆对话流式响应异常，memoryId: {}, 错误: {}", memoryId, error.getMessage(), error);
                    sink.error(error);
                })
                .start();
            });
            return objectFlux;
        } catch (Exception e) {
            log.error("隔离记忆对话流式输出初始化异常，memoryId: {}, 错误: {}", memoryId, e.getMessage(), e);
            return Flux.error(e);
        }
    }
}
