package com.example.erp;

import java.io.IOException;
import java.net.URI;
import java.net.http.HttpClient;
import java.net.http.HttpRequest;
import java.net.http.HttpResponse;
import java.net.http.HttpRequest.BodyPublishers;
import java.net.http.HttpResponse.BodyHandlers;
import java.time.Duration;

import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.model.ChatResponse;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.http.ResponseEntity;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;

import jakarta.servlet.http.HttpServletResponse;
import reactor.core.publisher.Flux;

/**
 * ollama 对接，在配置文件设定本地 ollama 服务地址或ERP ollama 服务地址
 * 
 * @author jiang 20250322
 */
@RestController
@RequestMapping("/ollama")
public class ChatOllamaController {

	// 直接调用ollama的rest服务与之通信 ERP地址 http://10.80.16.101:11434
	private static final String OLLAMA_API_URL = "http://10.80.16.101:11434";// "http://127.0.0.1:11434"; // Ollama默认端口

//	OllamaChatModel 是 ChatClient 的一个具体实现类，专门用于与本地或远程部署的 Ollama 模型（如 Llama3、Mistral 等）交互。
//	它提供了 Ollama 特有的配置选项（如模型名称、温度、top-k 等）。
	// 注入模型，配置文件中的模型，或者可以在方法中指定模型
	@Autowired
	private OllamaChatModel ollamaChatModel;

//	ChatClient 是 Spring AI 提供的一个通用接口，定义了一套统一的聊天模型交互 API（如 call()、generate() 方法）。
//	它的目的是屏蔽底层模型供应商的差异，让开发者可以通过同一套代码切换不同的 AI 模型（如 OpenAI、Ollama、Azure 等）。
	private final ChatClient chatClient;

//	1、ChatClient 是“抽象”，强调统一和可移植性。
//	2、OllamaChatModel 是“实现”，强调对 Ollama 的深度支持。
//	3、根据需求选择：需要灵活性 → ChatClient；需要 Ollama 特性 → OllamaChatModel。

//	ChatClient 是使用 ChatClient.Builder 对象创建的
	public ChatOllamaController(ChatClient.Builder builder) {
		this.chatClient = builder.build();
	}

//	http://localhost:8081/ai/chat?msg=全球有多少个国家  ollama api 方式，ollama连接信息配置在 application.properties
	@GetMapping("/chat")
	public String chat(@RequestParam(defaultValue = "你好，介绍下你自己吧。") String msg) {
		return this.chatClient.prompt().user(msg).call().content();
	}

	// 流式处理，解决中文乱码有效
	@GetMapping("/chatStream")
	public ResponseEntity<Flux<String>> chatStream(@RequestParam(defaultValue = "你好，介绍下你自己吧。") String msg, HttpServletResponse response) {
		try {
			response.setCharacterEncoding("UTF-8");// 解决中文乱码
			response.setContentType("text/event-stream;charset=UTF-8");

			Flux<String> streamResponse = chatClient.prompt(msg).stream().content();
			return ResponseEntity.ok(streamResponse);
		} catch (Exception e) {
			return ResponseEntity.badRequest().build();
		}
	}

	// 流式处理，解决中文乱码有效，输出带有 <think>...</think>
//	@GetMapping(value = "/chatStream2") // 1-中文乱码
	@GetMapping(value = "/chatStream2", produces = MediaType.TEXT_PLAIN_VALUE + ";charset=UTF-8") // 2-中文正常
//	@GetMapping(value = "/chat3", produces = MediaType.TEXT_EVENT_STREAM_VALUE + ";charset=UTF-8")// 3-中文乱码
	public ResponseEntity<Flux<String>> chatStream2(@RequestParam(defaultValue = "你好，介绍下你自己吧。") String msg) {
		try {
			Flux<String> streamResponse = chatClient.prompt(msg).stream().content();
			return ResponseEntity.ok(streamResponse);

			// 下面代码需要和 3 配合方有效且输出格式列式的，怪异
//			return ResponseEntity.ok()
//		            .contentType(MediaType.parseMediaType("text/event-stream;charset=UTF-8"))
//		            .body(streamResponse);

		} catch (Exception e) {
			return ResponseEntity.badRequest().build();
		}
	}

//	http://localhost:8081/ai/chat4 ollama api 方式，application.properties增加本地ollama配置
	@GetMapping("/chatModel")
	public String chat4() {
		String prompt = """
				你是一个精通中文和英文的翻译大师。如果我给你英文就翻译成中文，给你中文就翻译成英文。
				""";
		String message = """
				Ollama now supports tool calling with popular models such as Llama 3.1.
				This enables a model to answer a given prompt using tool(s) it knows about,
				making it possible for models to perform more complex tasks or interact with the outside world.
				""";
		return "原文：\n" + message + "译文：\n" + ollamaChatModel.call(prompt + ":" + message);
	}

	// 流式处理
	@GetMapping("/chatModelStream")
	public Flux<String> chat5(HttpServletResponse response) {
		String prompt = """
				你是一个精通中文和英文的翻译大师。如果我给你英文就翻译成中文，给你中文就翻译成英文。
				""";
		String message = """
				Ollama now supports tool calling with popular models such as Llama 3.1.
				This enables a model to answer a given prompt using tool(s) it knows about,
				making it possible for models to perform more complex tasks or interact with the outside world.
				""";
		response.setCharacterEncoding("UTF-8");// 解决中文乱码
		Flux<ChatResponse> streamResponse = ollamaChatModel.stream(new Prompt(prompt + ":" + message));

		// return ResponseEntity.ok(streamResponse);
		return streamResponse.map(resp -> resp.getResult().getOutput().getText());

	}

//	http://localhost:8081/ai/api/chat 调用rest服务方式，直接访问 ollama 服务地址
	@GetMapping("/api/chat")
	public String models() {
		HttpClient client = HttpClient.newHttpClient();
		// 构建请求，这里假设Ollama的API端点是 http://localhost:11434/api/models
		URI uri = URI.create(OLLAMA_API_URL + "/api/models");
		HttpRequest request = HttpRequest.newBuilder().uri(uri) // 根据实际情况修改URL
				.GET().build();
		try {
			HttpResponse<String> response = client.send(request, BodyHandlers.ofString());
			if (response.statusCode() == 200) {
				System.err.println("Model Info:" + response.body());
			} else {
				System.err.println("Failed to get model info, Status Code: " + response.statusCode());
			}
			return response.body();
		} catch (IOException | InterruptedException e) {
			e.printStackTrace();
		}
		return "no return";
	}

//	http://localhost:8081/ai/api/chat2   调用rest服务方式，直接访问 ollama 服务地址
	@GetMapping("/api/chat2")
	public String generate() throws IOException, InterruptedException {
		// 设置连接超时和读取超时
		Duration connectTimeout = Duration.ofSeconds(10);
		Duration responseTimeout = Duration.ofSeconds(30);
		HttpClient httpClient = HttpClient.newBuilder().connectTimeout(connectTimeout).build();

		String prompt = "红烧肉教程"; // 输入提示
		String model = "deepseek-r1:32b";// 根据需要更改模型
		String messageBody = String.format("{\"prompt\": \"%s\", \"model\": \"%s\"}", prompt, model);

		URI uri = URI.create(OLLAMA_API_URL + "/api/generate");
		HttpRequest postRequest = HttpRequest.newBuilder().timeout(responseTimeout).uri(uri)
				.header("Content-Type", "application/json;charset=utf-8").POST(BodyPublishers.ofString(messageBody))
				.build();
		HttpResponse<String> postResponse = httpClient.send(postRequest, BodyHandlers.ofString());
		System.out.println("POST 同步响应状态码：" + postResponse.statusCode());
		System.out.println("POST 同步响应体：\n" + postResponse.body());

		return postResponse.body();
	}
}
