package xjay.ai.emotionalsupport.model;

import io.micrometer.observation.ObservationRegistry;
import jakarta.annotation.Resource;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.model.tool.ToolCallingManager;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaApi;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.ai.ollama.management.ModelManagementOptions;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;

@Configuration
public class MultiModelConfig {


    @Bean("emoChatModel")
    public ChatModel emoChatModel(
            @Value("${spring.ai.ollama.base-url}") String url,
            @Value("${spring.ai.ollama.chat.model}") String modelName
    ) { // OllamaApi 会被自动注入
        OllamaApi api = OllamaApi.builder().baseUrl(url).build();
        return OllamaChatModel.builder()
                .ollamaApi(api)
                .defaultOptions(
                        OllamaOptions.builder()
                                .model(modelName)
                                .temperature(0.9)
                                .build())
                .build();

    }


    @Resource
    private ChatModel dashscopeChatModel;

    @Bean("qwenChatModel")
    public ChatModel qwenChatModel() {
        return this.dashscopeChatModel;
    }

}
