package com.springboot_ollama.controller;

import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;

@RestController
@RequestMapping("/api/ai")
public class AIController {
    @Autowired
    private OllamaChatModel chatModel;

    // 基础对话接口
    @GetMapping("/chat")
    public String simpleChat(@RequestParam String message) {
        return chatModel.call(message)
                .replaceAll("(<think>|)", "");
    }

    // 带参数控制的流式输出（网页2/7增强）
    @GetMapping(value = "/stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
    public Flux<String> streamChat(
            @RequestParam String message,
            @RequestParam(defaultValue = "0.7") float temperature) {

        OllamaOptions options = OllamaOptions.builder()
                .build();

        return chatModel.stream(new Prompt(message, options))
                .map(r -> r.getResult().getOutput().getText());
    }
}
