package com.steve.ai.config;

import com.steve.ai.service.IChatAssistant;
import com.steve.ai.service.IChatStreamAssistant;
import com.steve.ai.service.impl.PersistentChatMemoryStore;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import dev.langchain4j.model.openai.OpenAiTokenizer;
import dev.langchain4j.service.AiServices;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

/**
 * @Author: STEVE
 * @Description: LLM配置
 * @since: 2025-07-14
 */
@Configuration
public class LLMConfig {

    /**
     * 构建 LLM 模型
     */
    @Bean
    public ChatLanguageModel chatLanguageModel() {
        return OpenAiChatModel.builder()
                .apiKey("sk-85c689f91fd44a29b7c235fef0aca4af")
                .modelName("qwen-max")
                .baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
                .logRequests(true)  // 是否打印请求日志
                .logResponses(true) // 是否打印响应日志
                .build();
    }

    @Bean
    public StreamingChatLanguageModel streamingChatLanguageModel() {
        return OpenAiStreamingChatModel.builder()
                .apiKey("sk-85c689f91fd44a29b7c235fef0aca4af")
                .modelName("qwen-max")
                .baseUrl("https://dashscope.aliyuncs.com/compatible-mode/v1")
                .logRequests(true)  // 是否打印请求日志
                .logResponses(true) // 是否打印响应日志
                .build();
    }

    /**
     * 构建 LLM 服务,AI助手,基于配置的大模型来使用
     */
    @Bean
    public IChatAssistant chatAssistant() {
        return AiServices.builder(IChatAssistant.class)
                .chatLanguageModel(chatLanguageModel())
                .build();
    }

    /**
     * 构建 LLM 服务，AI助手，基于配置的大模型来使用
     */
    @Bean
    public IChatStreamAssistant chatStreamAssistant() {
        return AiServices.builder(IChatStreamAssistant.class)
                .streamingChatLanguageModel(streamingChatLanguageModel())
                .chatMemory(tokenWindowChatMemory())   // 语言记忆
                .build();
    }

    /**
     * 消息窗口管理
     */
    @Bean
    public MessageWindowChatMemory messageWindowChatMemory() {
        return MessageWindowChatMemory.builder()
                // 内存记忆唯一标识id
                .id("1")
                // 记忆最大消息条数
                .maxMessages(3)
                .build();
    }

    /**
     * token窗口管理
     */
    @Bean
    public TokenWindowChatMemory tokenWindowChatMemory() {
        return TokenWindowChatMemory.builder()
                // 配置token的最大限制以及按何种token分词分析器
                .maxTokens(1000, new OpenAiTokenizer())
                .chatMemoryStore(persistentChatMemoryStore())
                .build();
    }

    /**
     * 持久化存储
     */
    @Bean
    public ChatMemoryStore persistentChatMemoryStore() {
        return new PersistentChatMemoryStore();
    }

}
