package org.example.config;

import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.boot.ApplicationArguments;
import org.springframework.boot.ApplicationRunner;
import org.springframework.http.ResponseEntity;
import org.springframework.stereotype.Component;
import org.springframework.web.client.RestTemplate;

@Component
public class OllamaHealthCheck implements ApplicationRunner {

    private static final Logger log = LoggerFactory.getLogger(OllamaHealthCheck.class);

    @Value("${langchain4j.ollama.chat-model.base-url:http://localhost:11434}")
    private String ollamaBaseUrl;

    @Override
    public void run(ApplicationArguments args) throws Exception {
        checkOllamaService();
    }

    private void checkOllamaService() {
        try {
            RestTemplate restTemplate = new RestTemplate();
            ResponseEntity<String> response = restTemplate.getForEntity(ollamaBaseUrl + "/api/tags", String.class);

            if (response.getStatusCode().is2xxSuccessful()) {
                log.info("✅ Ollama 服务连接正常 - ChatModel 和 StreamingChatModel 均可使用");
            } else {
                log.warn("⚠️ Ollama 服务响应异常: {}", response.getStatusCode());
            }
        } catch (Exception e) {
            log.error("❌ 无法连接到 Ollama 服务: {}", e.getMessage());
            log.info("请确保 Ollama 服务正在运行: ollama serve");
            log.info("请确保已拉取 qwen3:8b 模型: ollama pull qwen3:8b");
        }
    }
}