package com.zxt.spring_ai_demo.controller;

import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

/**
 * @Author Mr.zeng
 * @Date 2025/8/13 20:51
 */

@RestController
@RequestMapping("/ollama")
public class OllamaController {

    @Autowired
    private OllamaChatModel ollamaChatModel;

    @GetMapping("/chat")
    public String chat(String userInput) {
        // 也可以在局部使用参数的形式指定访问哪个模型覆盖配置文件中的模型
        OllamaOptions options = OllamaOptions.builder()
//                .model("gpt4")//指定使用哪个模型
                .model("deepseek-r1:1.5b")//指定使用哪个模型
                .temperature(0.5)//temperature数值越高,模型的回答就越有创造性
                .build();
        Prompt prompt = new Prompt(userInput, options);
        ChatResponse chatResponse = ollamaChatModel.call(prompt);
        String text = chatResponse.getResult().getOutput().getText();
        return text;
    }
    @GetMapping(value = "/streamChat", produces = "text/plain;charset=UTF-8")
    public Flux<String> streamChat(String userInput) {
        Flux<String> result = ollamaChatModel.stream(userInput);
        return result;
    }
}