package com.hxx.config;

import com.hxx.ai.ChatAisistant;
import com.hxx.ai.StreamingChatAisistant;
import dev.langchain4j.model.chat.ChatLanguageModel;
import dev.langchain4j.model.chat.StreamingChatLanguageModel;
import dev.langchain4j.model.ollama.OllamaChatModel;
import dev.langchain4j.model.ollama.OllamaStreamingChatModel;
import dev.langchain4j.service.AiServices;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import java.time.Duration;
import java.util.List;

@Configuration
public class MyConfig {

    @Bean
    public ChatLanguageModel chatLanguageModel(){
        return OllamaChatModel.builder()
                .modelName("deepseek-r1:8b")
                .logRequests(true)
                .logResponses(true)
                .timeout(Duration.ofSeconds(1000))
                .maxRetries(1)
                .listeners(List.of(new TestChatModelListener()))
                .baseUrl("http://localhost:11434")
                .build();
    }

    @Bean
    public StreamingChatLanguageModel streamingChatLanguageModel(){
        return OllamaStreamingChatModel.builder()
                .modelName("qwen2.5:3b")
                .timeout(Duration.ofSeconds(1000))
                .baseUrl("http://localhost:11434")
                .build();
    }

    @Bean
    public ChatAisistant chatAisistant(ChatLanguageModel chatLanguageModel){
        return AiServices.create(ChatAisistant.class, chatLanguageModel);
    }

    @Bean
    public StreamingChatAisistant streamingChatAisistant(StreamingChatLanguageModel streamingChatLanguageModel){
        return AiServices.create(StreamingChatAisistant.class, streamingChatLanguageModel);
    }
}























