package org.oa.ai.config;

//import dev.langchain4j.model.openai.OpenAiChatModel;
//import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import org.oa.ai.service.GwToolService;
import org.oa.ai.service.ToolService;
//import dev.langchain4j.community.model.dashscope.QwenEmbeddingModel;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.service.AiServices;
import org.oa.ai.service.CommonToolsService;
import org.oa.ai.service.YyToolService;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

@Configuration
public class AiConfig {

    private static final Logger logger = LoggerFactory.getLogger(AiConfig.class);


    @Value("${ai.modelName}")
    private String modelName;

    @Value("${ai.baseUrl}")
    private String baseUrl;

    @Value("${ai.apiKey}")
    private String apiKey;
    @Value("${ai.maxToken}")
    private Integer maxToken;

    // 将消息存储到redis，mysql等数据库中
    @Bean
    public AssistantUnique assistantUniqueStore(
                ChatLanguageModel chatLanguageModel,
                StreamingChatLanguageModel chatLanguageModelStream,
                ToolService toolService
    ) {
        // 构建一个内容检索器
//        EmbeddingStoreContentRetriever embeddingStoreContentRetriever = EmbeddingStoreContentRetriever.builder()
//                .embeddingStore(embeddingStore)  // 将向量存储到内存中
//                .embeddingModel(qwenEmbeddingModel) // 绑定千问的向量模型
//                .maxResults(1)  // 查找最相似的一条数据
//                .minScore(0.6) // 查找出来的相似度在0.6以上
//                .build();

        AssistantUnique assistantUnique = AiServices.builder(AssistantUnique.class)
                .chatLanguageModel(chatLanguageModel)  // 对话model
                .streamingChatLanguageModel(chatLanguageModelStream)  // 流式对话model
                .chatMemoryProvider(memoryId -> {
                    MessageWindowChatMemory build = MessageWindowChatMemory.builder()  // 构建一个对话存储，通过id将用户消息进行隔离
                            .maxMessages(20)
                            .id(memoryId)
                            .build();
                    return build;
                })
//                .contentRetriever(embeddingStoreContentRetriever)  // 绑定内容检索器
                .tools(toolService)  // 将 function call 的service注册给大模型
                .build();
        return assistantUnique;
    }


    // 创建一个向量数据库
//    @Bean
//    public EmbeddingStore embeddingStore() {
//        return new InMemoryEmbeddingStore();
//    }

    // 接入公司的deepseek大模型
//    @Bean
//    public ChatLanguageModel chatLanguageModel() {
//        // 如果是本地则使用本地deepseek
//        if (baseUrl.contains("localhost") || baseUrl.contains("127.0.0.1")) {
//            return OllamaChatModel.builder()
//                    .baseUrl(baseUrl)
//                    .modelName(modelName)
//                    .temperature(0.0)
//                    .build();
//        }else {
//            return OpenAiChatModel.builder()
//                    .apiKey(apiKey)
//                    .baseUrl(baseUrl) // DeepSeek 的 API 地址
//                    .modelName(modelName) // 可选：具体模型名
//                    .temperature(0.000001)
//                    .maxTokens(maxToken)
//                    .build();
//        }
//
//    }

//    @Bean
//    public StreamingChatLanguageModel chatLanguageModelStream() {
//        logger.info("----启动使用模型----");
//        logger.info("模型URL:{}", baseUrl);
//        logger.info("模型名称:{}", modelName);
//        logger.info("模型apiKey:{}", apiKey);
//        if (baseUrl.contains("localhost") || baseUrl.contains("127.0.0.1")) {
//            return OllamaStreamingChatModel.builder()
//                    .baseUrl(baseUrl)
//                    .modelName(modelName)
//                    .temperature(0.8)
//                    .build();
//        }else {
//            return OpenAiStreamingChatModel.builder()
//                    .apiKey(apiKey)
//                    .baseUrl(baseUrl) // DeepSeek 的 API 地址
//                    .modelName(modelName) // 可选：具体模型名
//                    .temperature(0.000001)
//                    .maxTokens(maxToken)
//                    .build();
//        }
//    }

    @Bean
    public YyAssistant yyAssistant(
            ChatLanguageModel chatLanguageModel,
            StreamingChatLanguageModel chatLanguageModelStream,
            YyToolService toolService
    ) {
        return AiServices.builder(YyAssistant.class)
                .chatLanguageModel(chatLanguageModel)  // 对话model
                .streamingChatLanguageModel(chatLanguageModelStream)  // 流式对话model
                .chatMemoryProvider(memoryId -> {
                    MessageWindowChatMemory build = MessageWindowChatMemory.builder()  // 构建一个对话存储，通过id将用户消息进行隔离
                            .maxMessages(20)
                            .id(memoryId)
                            .build();
                    return build;
                })
                .tools(toolService)  // 将 function call 的service注册给大模型
                .build();
    }

    @Bean
    public YcsqAssistant ycsqAssistant(
            ChatLanguageModel chatLanguageModel,
            StreamingChatLanguageModel chatLanguageModelStream,
            CommonToolsService toolService
    ) {
        return AiServices.builder(YcsqAssistant.class)
                .chatLanguageModel(chatLanguageModel)  // 对话model
                .streamingChatLanguageModel(chatLanguageModelStream)  // 流式对话model
                .chatMemoryProvider(memoryId -> {
                    MessageWindowChatMemory build = MessageWindowChatMemory.builder()  // 构建一个对话存储，通过id将用户消息进行隔离
                            .maxMessages(20)
                            .id(memoryId)
                            .build();
                    return build;
                })
                .tools(toolService)  // 将 function call 的service注册给大模型
                .build();
    }

    @Bean
    public GwAssistant gwAssistant(
            ChatLanguageModel chatLanguageModel,
            StreamingChatLanguageModel chatLanguageModelStream,
            GwToolService toolService
    ) {
        return AiServices.builder(GwAssistant.class)
                .chatLanguageModel(chatLanguageModel)  // 对话model
//                .streamingChatLanguageModel(chatLanguageModelStream)  // 流式对话model
                .chatMemoryProvider(memoryId -> {
                    MessageWindowChatMemory build = MessageWindowChatMemory.builder()  // 构建一个对话存储，通过id将用户消息进行隔离
                            .maxMessages(20)
                            .id(memoryId)
                            .build();
                    return build;
                })
                .tools(toolService)  // 将 function call 的service注册给大模型
                .build();
    }


}
