package cn.bugstack.xfg.dev.tech.test;

import lombok.extern.slf4j.Slf4j;
import org.junit.Test;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaApi;
import org.springframework.ai.ollama.api.OllamaOptions;

@Slf4j
public class OllamaConnectionTest {

    @Test
    public void testOllamaConnection() {
        try {
            // 使用项目配置的Ollama地址
            OllamaApi ollamaApi = new OllamaApi("http://117.72.151.140:11434");
            OllamaChatClient ollamaClient = new OllamaChatClient(ollamaApi);
            
            // 发送一个简单的测试请求，指定使用qwen3:0.6b模型
            Prompt prompt = new Prompt(
                "请回复'Hello, Ollama!'，不需要其他内容",
                OllamaOptions.create().withModel("qwen3:0.6b")
            );
            ChatResponse response = ollamaClient.call(prompt);
            
            log.info("Ollama服务连接测试成功!");
            log.info("模型响应: {}", response.getResult().getOutput().getContent());
        } catch (Exception e) {
            log.error("Ollama服务连接测试失败: {}", e.getMessage(), e);
        }
    }
    
    @Test
    public void testOllamaWithModel() {
        try {
            // 使用项目配置的Ollama地址
            OllamaApi ollamaApi = new OllamaApi("http://117.72.151.140:11434");
            OllamaChatClient ollamaClient = new OllamaChatClient(ollamaApi);
            
            // 指定使用qwen3:0.6b模型
            Prompt prompt = new Prompt(
                "请简单介绍一下你自己",
                OllamaOptions.create().withModel("qwen3:0.6b")
            );
            
            ChatResponse response = ollamaClient.call(prompt);
            
            log.info("Ollama模型测试成功!");
            log.info("模型响应: {}", response.getResult().getOutput().getContent());
        } catch (Exception e) {
            log.error("Ollama模型测试失败: {}", e.getMessage(), e);
        }
    }
}