package cjl.http;

import cjl.AIService;
import jakarta.annotation.Resource;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;

/**
 * @Author ChenJueLong
 * @Date 2025/8/21 下午6:00
 */
@RestController()
@CrossOrigin("*")
@RequestMapping("/api/v1/ollama")
public class ollamaController implements AIService {

    @Resource
    private OllamaChatClient chatClient;


    /**
     * http://127.0.0.1:8090/api/v1/ollama/generate?model=deepseek-r1:1.5b&message=hi
     */
    @Override
    @RequestMapping(value = "generate",method = RequestMethod.GET)
    public ChatResponse generate(@RequestParam String model,@RequestParam String message){
        return chatClient.call(new Prompt(message, OllamaOptions.create().withModel(model)));
    }

    /**
     *http://119.91.46.212:11434/api/generate_stream?model=deepseek-r1:1.5b&message=1+1
     * @param model
     * @param message
     * @return
     */
    @Override
    @RequestMapping(value = "generate_stream",method = RequestMethod.GET)
    public Flux<ChatResponse> generateStream(@RequestParam String model, @RequestParam String message){
        return chatClient.stream(new Prompt(message, OllamaOptions.create().withModel(model)));
    }
}
