package com.tiandao.ai.test;

import java.util.List;
import java.util.Map;

import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.prompt.PromptTemplate;
import org.springframework.ai.openai.OpenAiChatModel;
import org.springframework.ai.openai.OpenAiChatOptions;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.core.io.Resource;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import com.tiandao.core.message.JsonResult;

import reactor.core.publisher.Flux;

@RestController
public class DeepSeekController {

	@Autowired
	private final OpenAiChatModel chatModel;

	@Value("classpath:/prompts/user-evaluator-message.st")
	private Resource userEvaluatorMessage;

	@Autowired
	public DeepSeekController(OpenAiChatModel chatModel) {
		this.chatModel = chatModel;
	}

	@GetMapping("/ai/generate")
	public JsonResult<String> generate(
			@RequestParam(value = "message", defaultValue = "Tell me a joke") String message) {
		ChatResponse response = chatModel.call(new Prompt(message));
		return JsonResult.ok(response.getResult().getOutput().getText());
	}

	@GetMapping("/ai/generateStream")
	public Flux<ChatResponse> generateStream(
			@RequestParam(value = "message", defaultValue = "Tell me a joke") String message) {
		Flux<ChatResponse> flux = chatModel.stream(
				new Prompt(message, OpenAiChatOptions.builder().model("deepseek-chat").temperature(0.4).build()));

		return flux.doOnNext(c -> System.out.print(c.getResult().getOutput().getText()))
				.doOnComplete(() -> System.out.println("\r\n"));
	}

	@GetMapping("/ai/generateStreamStr")
	public Flux<String> generateStreamStr(
			@RequestParam(value = "message", defaultValue = "Tell me a joke") String message) {
		Flux<ChatResponse> flux = chatModel.stream(
				new Prompt(message, OpenAiChatOptions.builder().model("deepseek-chat").temperature(0.4).build()));

		return flux.doOnNext(c -> System.out.print(c.getResult().getOutput().getText()))
				.doOnComplete(() -> System.out.println("\r\n")).map(c -> c.getResult().getOutput().getText());
	}

	@GetMapping("/ai/promptTemplate")
	public Flux<ChatResponse> promptTemplate() {
		PromptTemplate userPromptTemplate = new PromptTemplate(this.userEvaluatorMessage, Map.of("type", "爱情"));
		Flux<ChatResponse> flux = chatModel.stream(new Prompt(List.of(userPromptTemplate.createMessage()),
				OpenAiChatOptions.builder().model("deepseek-chat").temperature(0.4).build()));

		return flux.doOnNext(c -> System.out.print(c.getResult().getOutput().getText()))
				.doOnComplete(() -> System.out.println("\r\n"));
	}
}
