package com.bins.langchain.spring.ai.vector.store.controller;

import jakarta.annotation.Resource;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

@RestController
@RequestMapping("/chat")
public class ChatController {

    @Resource
    private OllamaChatModel ollamaChatModel;

    @GetMapping
    public String generate(@RequestParam(value = "message") String message) {
        ChatResponse response = ollamaChatModel.call(new Prompt(message));
        return response.getResult().getOutput().getContent();
    }

    @GetMapping("/stream")
    public Flux<String> generateStream(@RequestParam(value = "message", defaultValue = "讲个笑话") String message) {
        Prompt prompt = new Prompt(new UserMessage(message));
        Flux<ChatResponse> stream = this.ollamaChatModel.stream(prompt);
        return stream.map(response -> response.getResult().getOutput().getContent());
    }

    @GetMapping(value = "/switchModel")
    public String switchModel(@RequestParam(value = "message") String message) {
        ChatResponse chatResponse = ollamaChatModel.call(new Prompt(message, OllamaOptions.create()
                .withModel("mistral")
                .withTemperature(0.4D)));
        System.out.println(chatResponse.getResult().getOutput().getContent());
        return chatResponse.getResult().getOutput().getContent();
    }

}