package com.heakey.controller;

import org.springframework.ai.chat.messages.SystemMessage;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.RequestParam;
import reactor.core.publisher.Flux;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;

/**
 * @author yuluo
 * @author <a href="mailto:yuluo08290126@gmail.com">yuluo</a>
 */

@RestController
@RequestMapping("/ollama/chat-model")
public class OllamaChatModelController {

	private static final String DEFAULT_PROMPT = "你好，介绍下你自己吧。请用中文回答。";

	private final ChatModel ollamaChatModel;

	public OllamaChatModelController(ChatModel chatModel) {
		this.ollamaChatModel = chatModel;
	}

	/**
	 * 最简单的使用方式，没有任何 LLMs 参数注入。
	 *
	 * @return String types.
	 */
	@GetMapping(value = "/chat/tools",produces = MediaType.TEXT_EVENT_STREAM_VALUE)
	public Flux<String> chatTools() {
		OllamaOptions ollamaOptions = OllamaOptions.builder().toolNames("getCurrentDateTime","setAlarm").build();
		Prompt prompt = new Prompt("What day is tomorrow?",ollamaOptions);
		Flux<ChatResponse> stream = ollamaChatModel.stream(prompt);
		return stream.map(resp -> resp.getResult().getOutput().getText()).concatWith(Flux.just("[complete]"));
	}

	/**
	 * Stream 流式调用。可以使大模型的输出信息实现打字机效果。
	 *
	 * @return Flux<String> types.
	 */
	@GetMapping(value = "/stream/chat",produces = MediaType.TEXT_EVENT_STREAM_VALUE)
	public Flux<String> streamChat(@RequestParam("message")String message) {
		Flux<ChatResponse> stream = ollamaChatModel.stream(new Prompt(DEFAULT_PROMPT));
		return stream.map(resp -> resp.getResult().getOutput().getText()).concatWith(Flux.just("[complete]"));
	}

	/**
	 * 使用编程方式自定义 LLMs ChatOptions 参数， {@link OllamaOptions}。
	 * 优先级高于在 application.yml 中配置的 LLMs 参数！
	 */
	@GetMapping("/custom/chat")
	public String customChat() {

		OllamaOptions customOptions = OllamaOptions.builder()
				.topP(0.7)
				.model("llama3")
				.temperature(0.8)
				.build();

		return ollamaChatModel.call(new Prompt(DEFAULT_PROMPT, customOptions)).getResult().getOutput().getText();
	}

}