package com.hxx.config;

import com.hxx.ai.AiAssiatant;
import dev.langchain4j.memory.chat.MessageWindowChatMemory;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaLanguageModel;
import dev.langchain4j.service.AiServices;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

@Configuration
public class MyConfig {

    @Bean
    public ChatLanguageModel chatLanguageModel() {
        return OllamaChatModel.builder()
                .modelName("qwen2.5:3b")
                .baseUrl("http://localhost:11434")
                .build();
    }

    @Bean
    public AiAssiatant aiAssiatant(ChatLanguageModel chatLanguageModel) {
        return AiServices.builder(AiAssiatant.class)
                .chatLanguageModel(chatLanguageModel)
                .chatMemoryProvider(cmp->MessageWindowChatMemory.withMaxMessages(10))
                .build();
    }
}
