package dev.alm.aiserviceintegration.config;

import dev.langchain4j.model.chat.ChatModel;
import dev.langchain4j.model.chat.StreamingChatModel;
import dev.langchain4j.model.chat.listener.ChatModelListener;
import dev.langchain4j.model.openai.OpenAiChatModel;
import dev.langchain4j.model.openai.OpenAiStreamingChatModel;
import jakarta.annotation.Resource;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

import java.time.Duration;
import java.util.List;

@Configuration
public class LLMConfig {

    @Value("${llm.api.key}")
    private String deepSeekApiKey;

    @Value("${llm.base.url}")
    private String deepSeekApiUrl;

    @Value("${llm.model.name}")
    private String deepSeekModelName;

    @Resource
    private ChatModelListener chatModelListener;

    @Bean(name = "deepseekChatModel")
    public ChatModel deepseekChatModel() {
        return OpenAiChatModel.builder()
                .apiKey(deepSeekApiKey)
                .baseUrl(deepSeekApiUrl)
                .modelName(deepSeekModelName)
//                .logRequests(true)
//                .logResponses(true)
                .listeners(List.of(chatModelListener))// 监听器
                .maxRetries(3)          // 重试
                .timeout(Duration.ofSeconds(1))
                .build();
    }

//    @Bean(name = "deepseekStreamingChatModel")
//    public StreamingChatModel deepseekStreamingChatModel() {
//        return OpenAiStreamingChatModel.builder()
//                .apiKey(deepSeekApiKey)
//                .baseUrl(deepSeekApiUrl)
//                .modelName(deepSeekModelName)
//                .build();
//    }
}
