package org.ruoyi.interview.config;

import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaApi;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Bean;
import org.springframework.context.annotation.Configuration;
import org.springframework.web.client.RestClient;

/**
 * Ollama配置类
 */
@Configuration
public class OllamaConfig {

    @Value("${spring.ai.ollama.base-url:http://localhost:11434}")
    private String baseUrl;
    
    @Value("${spring.ai.ollama.chat.options.model:qwen2:7b}")
    private String model;

    /**
     * 创建Ollama聊天模型
     * 
     * @return OllamaChatClient
     */
    @Bean
    public OllamaChatClient ollamaChatClient() {
        RestClient.Builder restClientBuilder = RestClient.builder();
        OllamaApi ollamaApi = new OllamaApi(baseUrl, restClientBuilder);
        return new OllamaChatClient(ollamaApi)
                .withDefaultOptions(OllamaOptions.create().withModel(model));
    }
}