package cn.ntopic.web;

import java.util.Map;

import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import reactor.core.publisher.Flux;

@RestController
public class OllamaClientController {

	private final OllamaChatModel chatModel;

	public OllamaClientController(@Qualifier("ollamaChatModel") OllamaChatModel chatModel) {
		this.chatModel = chatModel;
	}

	/**
	 * http://localhost:8088/ollama/chat/v1?msg=天空为什么是蓝色的？
	 */
	@GetMapping("/ollama/chat/v1")
	public Map<String, String> chatV1(@RequestParam(value = "msg", defaultValue = "天空为什么是蓝色的？") String msg) {
		return Map.of("generation", chatModel.call(msg));
	}

	/**
	 * http://localhost:8088/ollama/chat/v2?msg=人为什么要不断的追求卓越？
	 */
	@GetMapping("/ollama/chat/v2")
	public Flux<ChatResponse> chatV2(
			@RequestParam(value = "msg", defaultValue = "人为什么要不断的追求卓越？") String msg) {
		Prompt prompt = new Prompt(new UserMessage(msg));
		return chatModel.stream(prompt);
	}

	/**
	 * http://localhost:8088/ollama/chat/v3?msg=你认为老牛同学的文章如何？
	 */
	@GetMapping("/ollama/chat/v3")
	public Flux<ChatResponse> chatV3(@RequestParam(value = "msg", defaultValue = "人为什么要不断的追求卓越？") String msg) {
		Prompt prompt = new Prompt(
				msg,
				OllamaOptions.create()
						.withModel("qwen2:0.5b")
						.withTemperature(0.4F));
		return chatModel.stream(prompt);
	}
}
