package com.ics.atable.chat.config;

import com.ics.atable.chat.log.WebClientLoggingFilter;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.ai.openai.OpenAiChatOptions;
import org.springframework.ai.openai.api.OpenAiApi;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.context.annotation.Scope;
import org.springframework.http.client.JettyClientHttpRequestFactory;
import org.springframework.http.client.reactive.JettyClientHttpConnector;
import org.springframework.web.client.RestClient;
import org.springframework.web.reactive.function.client.WebClient;

import java.util.concurrent.ConcurrentHashMap;

/**
 * LLM 配置类
 *
 * @author haohongbin
 */
@Configuration
public class LlmConfiguration {

    @Value("${spring.ai.openai.api-key}")
    private String apiKey;

    @Value("${spring.ai.openai.base-url}")
    private String baseUrl;

    @Value("${spring.ai.openai.model}")
    private String model;

    // 缓存ChatModel实例
    private final ConcurrentHashMap<String, ChatModel> chatModelCache = new ConcurrentHashMap<>();

    /**
     * 获取或创建ChatModel实例
     * @param modelName 模型名称
     * @param baseUrl API基础URL
     * @param temperature 温度参数
     * @return ChatModel实例
     */
    @Scope("prototype")
    public ChatModel getOrCreateChatModel(String modelName, String baseUrl, Double temperature) {
        // 生成唯一key
        String key = generateKey(modelName, baseUrl, temperature);

        // 先从缓存中获取
        ChatModel cachedModel = chatModelCache.get(key);
        if (cachedModel != null) {
            return cachedModel;
        }

        // 创建新的ChatModel实例
        try {
            WebClient.Builder webClientBuilder = WebClient.builder()
                    .clientConnector(new JettyClientHttpConnector())
                    .filter(WebClientLoggingFilter.logRequest())
                    .filter(WebClientLoggingFilter.logResponse());

            OpenAiApi openAiApi = OpenAiApi.builder()
                    .apiKey(apiKey)
                    .baseUrl(baseUrl)
                    .restClientBuilder(RestClient.builder().requestFactory(new JettyClientHttpRequestFactory()))
                    .webClientBuilder(webClientBuilder)
                    .build();

            OpenAiChatOptions.Builder optionsBuilder = OpenAiChatOptions.builder()
                    .model(modelName);

            if (temperature != null) {
                optionsBuilder.temperature(temperature);
            }

            ChatModel chatModel = OpenAiChatModel.builder()
                    .openAiApi(openAiApi)
                    .defaultOptions(optionsBuilder.build())
                    .build();

            // 放入缓存
            chatModelCache.put(key, chatModel);
            return chatModel;
        } catch (Exception e) {
            throw new RuntimeException("Failed to create ChatModel", e);
        }
    }

    /**
     * 默认ChatModel Bean
     */
    @Bean
    public ChatModel chatModel() {
        return getOrCreateChatModel(model, baseUrl, 0.7);
    }

    /**
     * 生成缓存key
     */
    private String generateKey(String modelName, String baseUrl, Double temperature) {
        return modelName + "|" + baseUrl + "|" + (temperature != null ? temperature : "null");
    }
}
