package cn.fz.demo06.config;


import dev.langchain4j.community.model.dashscope.QwenChatModel;
import dev.langchain4j.community.model.dashscope.QwenTokenizer;
import dev.langchain4j.memory.ChatMemory;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.memory.chat.TokenWindowChatMemory;
import dev.langchain4j.store.memory.chat.ChatMemoryStore;
import dev.langchain4j.store.memory.chat.InMemoryChatMemoryStore;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

/**
 * 定义一个配置类，配置对话流历史的存储方式
 * 需要maven配置 langchain4j
 */
@Configuration
public class AssistantConfig {
    @Value("${app.openai-api-key}")
    private String openaiApiKey;
    @Value("${app.tools-model-name}")
    private String toolsModelName;
    @Bean
    public ChatMemoryProvider chatMemoryProvider(ChatMemoryStore chatMemoryStore){
        return id -> new MessageWindowChatMemory.Builder()
                .id(id)
                .chatMemoryStore(chatMemoryStore)
                .maxMessages(10)
                .build();
    }

    @Bean
    public ChatMemoryStore chatMemoryStore(){
        return new InMemoryChatMemoryStore();
    }

    @Bean
    public QwenChatModel customQwenChatModel(){
        return QwenChatModel.builder()
                .apiKey(openaiApiKey)
                .modelName(toolsModelName)
                .build();
    }

    @Bean
    public ChatMemory chatMemory(){
        return TokenWindowChatMemory.withMaxTokens(300, new QwenTokenizer(openaiApiKey,toolsModelName));
    }
}
