package com.demo.component.ollama.controller;

import com.demo.component.base.BaseController;
import com.demo.component.model.Question;
import com.demo.component.ollama.generate.inter.OllamaService;
import com.demo.component.utils.JsonUtil;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.context.ConfigurableApplicationContext;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;

import java.util.List;

@Slf4j
@RestController
@RequestMapping("/ollama")
public class OllamaController extends BaseController {

    private OllamaService ollamaService;


    @Autowired
    public OllamaController(OllamaService ollamaService) {
        this.ollamaService = ollamaService;
    }

    @PostMapping("/generate-sync")
    public String generateSync(@RequestBody Question question) {

        log.info(JsonUtil.toJson(question));

        String answer = this.ollamaService.generateAnswer(question.getPrompt());

        log.info("============================");
        log.info(answer);

        return answer;
    }

    @PostMapping(value = "/generate-stream", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
    public Flux<ChatResponse> generateStream(@RequestBody Question question) {
        return this.ollamaService.generateStream(question.getPrompt());
    }
}
