package com.test.langchain4j.config;

import com.test.langchain4j.service.ChatAssistant;
import com.test.langchain4j.service.RedisChatMemoryStore;
import dev.langchain4j.memory.chat.ChatMemoryProvider;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.service.AiServices;
import jakarta.annotation.Resource;
import java.time.Duration;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

/**
 * Created with IntelliJ IDEA.
 *
 * @description:
 * @author: liuziyang
 * @since: 8/15/25 12:58 PM
 * @modifiedBy:
 * @version: 1.0
 */
@Configuration
public class LLMConfig {
  @Resource private RedisChatMemoryStore redisChatMemoryStore;

  @Bean(name = "chatModelOllama")
  public ChatModel chatModelOllama() {
    return OllamaChatModel.builder()
        .modelName("qwen3:32b")
        .baseUrl("http://10.2.54.170:11434")
        .maxRetries(3)
        .timeout(Duration.ofSeconds(60))
        .build();
  }

  @Bean(name = "streamingChatModelOllama")
  public StreamingChatModel streamingChatModel() {
    return OllamaStreamingChatModel.builder()
        .modelName("qwen3:32b")
        .baseUrl("http://10.2.54.170:11434")
        .timeout(Duration.ofSeconds(60))
        .build();
  }

  @Bean
  public ChatAssistant chatAssistant(StreamingChatModel chatModel) {
    ChatMemoryProvider chatMemoryProvider =
        memoryId ->
            MessageWindowChatMemory.builder()
                .id(memoryId)
                .maxMessages(20)
                .chatMemoryStore(redisChatMemoryStore)
                .build();

    return AiServices.builder(ChatAssistant.class)
        .chatMemoryProvider(chatMemoryProvider)
        .streamingChatModel(chatModel)
        .build();
  }
}
