package com.jiazhong.spring.ai.ollama.controller;

import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;

@CrossOrigin
@RequestMapping("/ollama/ai")
@RestController
@Slf4j
public class OllamaController {
    @Resource
    private ChatClient ollamaChatClient;

    @GetMapping("/sync")
    public String sync(@RequestParam("message") String message) {
        return ollamaChatClient.prompt()
                .user(message)  //我问大模型的内容
                .call()   // 同步操作
                .content();  //返回文本信息
    }

    @GetMapping(value = "/stream", produces = "text/html;charset=utf-8")
    public Flux<String> stream(@RequestParam("message") String message) {
        log.info("用户请求了:{}", message);
        return ollamaChatClient.prompt()
                .user(message)
                .stream() // 流式操作
                .content();
    }
}
