package com.hzy.service.ollama;

import io.vertx.core.AbstractVerticle;
import io.vertx.core.Promise;
import io.vertx.core.json.JsonObject;
import io.vertx.ext.web.client.WebClient;
import io.vertx.ext.web.client.WebClientOptions;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

public class OllamaVerticle extends AbstractVerticle {

    private static final Logger LOGGER = LoggerFactory.getLogger(OllamaVerticle.class);

    public static final String OLLAMA_REQUEST_ADDRESS = "ollama.request";

    private WebClient webClient;
    private String ollamaHost;
    private int ollamaPort;

    @Override
    public void start(Promise<Void> startPromise) {
        JsonObject config = (JsonObject) vertx.sharedData().getLocalMap("config").get("appConfig");
        JsonObject ollamaConfig = config.getJsonObject("ollama");

        if (ollamaConfig == null) {
            LOGGER.error("Ollama configuration not found in config.json. Please add it.");
            startPromise.fail("Ollama configuration missing.");
            return;
        }

        ollamaHost = ollamaConfig.getString("host", "localhost");
        ollamaPort = ollamaConfig.getInteger("port", 11434);

        WebClientOptions options = new WebClientOptions()
                .setDefaultHost(ollamaHost)
                .setDefaultPort(ollamaPort)
                .setConnectTimeout(5000)
                .setIdleTimeout(10000);

        webClient = WebClient.create(vertx, options);

        vertx.eventBus().consumer(OLLAMA_REQUEST_ADDRESS,
                message -> {
                    JsonObject requestBody = (JsonObject) message.body();
                    LOGGER.info("Received Ollama request: {}", requestBody.encodePrettily());
                    callOllama(requestBody).future()
                            .onSuccess(response -> {
                                LOGGER.info("callOllama response: {}", response);
                                message.reply(response);
                            })
                            .onFailure(throwable -> {
                                LOGGER.error("Failed to call Ollama API: {}", throwable.getMessage());
                                message.fail(500, new JsonObject().put("error", "Ollama API call failed").put("details", throwable.getMessage()).encodePrettily());
                            });
                });

        LOGGER.info("OllamaVerticle deployed successfully, configured for {}:{}", ollamaHost, ollamaPort);
        startPromise.complete();
    }

    /**
     * 调用 Ollama 的 /api/generate 接口
     *
     * @param requestBody 包含 model, prompt 等信息的 JsonObject
     * @return 包含 Ollama 响应的 Promise
     */
    private Promise<String> callOllama(JsonObject requestBody) {
        Promise<String> promise = Promise.promise();
        requestBody.put("stream", false);
        webClient.post("/api/generate")
                .sendJsonObject(requestBody)
                .onSuccess(response -> {
                    if (response.statusCode() == 200) {
                        LOGGER.debug("Ollama API response: {}", response.bodyAsString());
                        promise.complete(response.bodyAsJsonObject().getString("response"));
                    } else {
                        promise.fail("Ollama API returned non-200 status: " + response.statusCode() + ", Body: " + response.bodyAsString());
                    }
                })
                .onFailure(throwable -> {
                    promise.fail("WebClient request to Ollama failed: " + throwable.getMessage());
                });

        return promise;
    }

    @Override
    public void stop(Promise<Void> stopPromise) {
        if (webClient != null) {
            webClient.close();
            LOGGER.info("Ollama WebClient closed.");
        }
        stopPromise.complete();
    }
}