package com.ruoyi.ai.service.impl;

import com.ruoyi.ai.domain.ChatModelList;
import com.ruoyi.ai.service.ILangchainService;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.service.SystemMessage;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;
import reactor.core.publisher.Flux;

import java.time.Duration;

@Service
public class LangchainServiceImpl implements ILangchainService {


    @Autowired
    private ChatModelListServiceImpl chatModelListService;

    @Override
    public String ollamaChatModel(String msg) {
        interface Assistant {
            @SystemMessage("请用中文回答")
            String chat(String msg);
        }
        ChatModelList chooseModel = chatModelListService.selectEnableChatModel();
        OllamaChatModel chatModel = OllamaChatModel.builder()
                .baseUrl(chooseModel.getBaseUrl())
                .modelName(chooseModel.getModelVersion())
                .temperature(chooseModel.getTemperature().doubleValue())
                .timeout(Duration.ofSeconds(30))
                .maxRetries(3)
                .logRequests(true)
                .logResponses(true)
                .build();
        Assistant assistant = AiServices.create(Assistant.class, chatModel);
        String answer = assistant.chat(msg);
        return answer;
    }

    @Override
    public Flux<String> ollamaChatStreaming(String msg) {
        // 定义聊天助手接口
        interface Assistant {
            @SystemMessage("请用中文回答")
            Flux<String> chat(String message);
        }

        // 选择一个启用的聊天模型配置
        ChatModelList chooseModel = chatModelListService.selectEnableChatModel();
        if (chooseModel == null || chooseModel.getBaseUrl() == null || chooseModel.getModelVersion() == null) {
            throw new IllegalArgumentException("Model configuration is invalid");
        }

        // 构建流式聊天模型，设置相关参数
        StreamingChatLanguageModel ollamaStreamingChatModel = OllamaStreamingChatModel.builder()
                .baseUrl(chooseModel.getBaseUrl())  // 基础URL
                .modelName(chooseModel.getModelVersion())  // 模型名称
                .temperature(chooseModel.getTemperature().doubleValue())  // 温度
                .timeout(Duration.ofSeconds(30))  // 设置超时
                .logRequests(true)  // 日志记录请求
                .logResponses(true)  // 日志记录响应
                .build();

        // 确保模型对象成功创建
        if (ollamaStreamingChatModel == null) {
            throw new RuntimeException("Ollama streaming chat model failed to initialize");
        }

        // 构建助手对象，绑定流式聊天模型
        Assistant assistant = AiServices.builder(Assistant.class)
                .streamingChatLanguageModel(ollamaStreamingChatModel)
                .chatMemory(MessageWindowChatMemory.withMaxMessages(10))  // 最大消息窗口为10
                .build();

        // 确保助手对象有效
        if (assistant == null) {
            throw new RuntimeException("Assistant not initialized correctly");
        }

        // 使用 assistant.chat() 发起流式聊天请求
        Flux<String> tokenFlux = assistant.chat(msg);
        return tokenFlux;  // 返回聊天的流式结果
    }
}
