package com.qianlou.springai01chat.controller;


import jakarta.annotation.Resource;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

/**
 * <a href="https://docs.spring.io/spring-ai/reference/api/chat/ollama-chat.html">参考文档</a>
 */
@RestController
public class OllamaController {

    @Resource
    private OllamaChatModel ollamaChatModel;

    @RequestMapping("/ai/ollama")
    public String chat(@RequestParam("msg") String msg) {
        return ollamaChatModel.call(msg);
    }

    @RequestMapping("/ai/ollama2")
    public String chat2(@RequestParam("msg") String msg,
                        @RequestParam(value = "model", defaultValue = "qwen:0.5b-chat") String model,
                        @RequestParam(value = "t", defaultValue = "0.4") String t) {
        Prompt prompt = new Prompt(msg, OllamaOptions.create()
                .withModel(model)
                .withTemperature(Float.parseFloat(t)));
        return ollamaChatModel.call(prompt).getResult().getOutput().getContent();
    }

}
