/**
 * @description ollama服务接口
 * @author xu148 fengye.cn @秋枫
 * @create 2025/6/25
 */

package cn.fengye.Controller;

import cn.fengye.api.IAiService;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;

@Slf4j
@CrossOrigin(origins = "*")
@RestController()
@RequestMapping("/api/v1/ollama/")
public class OllamaAiController implements IAiService {
    @Resource
    private OllamaChatClient ollamaChatClient;

    /**
     * http://localhost:8090/api/v1/ollama/generate?model=deepseek-r1:1.5b&message=1+1
     * */
    @RequestMapping(value = "generate", method = RequestMethod.GET)
    @Override
    public ChatResponse genarate(@RequestParam String model, @RequestParam String message) {
        log.info("get message");
        return ollamaChatClient.call(new Prompt(message, OllamaOptions.create().withModel(model)));
    }

    @RequestMapping(value = "generate_stream", method = RequestMethod.GET)
    @Override
    public Flux<ChatResponse> streamGenerate(@RequestParam String model, @RequestParam String message) {
        return ollamaChatClient.stream(new Prompt(message, OllamaOptions.create().withModel(model)));
    }
}
