package com.ai.aimodel.controller;
import io.github.ollama4j.OllamaAPI;
import io.github.ollama4j.exceptions.OllamaBaseException;
import io.github.ollama4j.models.chat.OllamaChatMessageRole;
import io.github.ollama4j.models.chat.OllamaChatRequestBuilder;
import io.github.ollama4j.models.chat.OllamaChatRequestModel;
import io.github.ollama4j.models.chat.OllamaChatResult;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

import java.io.IOException;
import java.net.http.HttpClient;
import java.time.Duration;

@RestController
@RequestMapping("aiModel/")
public class AiModelController {
    @RequestMapping("test")
    public String test(String request) throws OllamaBaseException, IOException, InterruptedException {

        OllamaAPI ollamaAPI = new OllamaAPI("http://127.0.0.1:11434/");
        ollamaAPI.setVerbose(true);
        ollamaAPI.setRequestTimeoutSeconds(60);
        boolean isOllamaServerReachable = ollamaAPI.ping();
        System.out.println("Is Ollama server running: " + isOllamaServerReachable);
        OllamaChatRequestBuilder builder = OllamaChatRequestBuilder.getInstance("deepseek-r1:1.5b");

        OllamaChatRequestModel req = builder.withMessage(OllamaChatMessageRole.USER, request).build();
        OllamaChatResult chat = ollamaAPI.chat(req);
        System.out.println("Response: ");
        String response = chat.getResponse();
        if (response.length()>0) {
            response= response.split("</think>")[1];
        }else{
            response = "抱歉，我还不理解你在说什么";
        }
        return response;
    }
}
