package com.example.myspringai.controller;

import com.example.myspringai.HistoryMessageAOP;
import com.example.myspringai.model.LlmChatParam;
import com.example.myspringai.model.LlmChatWithRoleParam;
import com.example.myspringai.service.ChatService;
import jakarta.annotation.Resource;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.SystemPromptTemplate;
import org.springframework.ai.ollama.OllamaChatModel;
import org.springframework.http.MediaType;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;

import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

@RestController
@RequestMapping(value = "ai")
public class AIChatController {
    public static final String PROMPT_TEMPLATE = "你的名字叫{name}，你是一个{role}，可以帮助人们解决{question}\n" + "                  同时你需要表现得天生就知道这些内容\n" + "                  你应该用你的名字和{voice}的风格回复用户的请求。";
    public static final String PROMPT_TEMPLATE_2 = "{role},{content}";
    @Resource
    ChatService chatService;
    @Resource(name = "ollamaChatModelLlama")
    private OllamaChatModel ollamaChatModelLlama;
    @Resource(name = "ollamaChatModelGemma")
    private OllamaChatModel ollamaChatModelGemma;
    @Resource(name = "ollamaChatModelMistral")
    private OllamaChatModel ollamaChatModelMistral;
    @Resource(name = "ollamaChatModelQwen")
    private OllamaChatModel ollamaChatModelQwen;
    @Resource(name = "ollamaChatModelLlamaUnsecure")
    private OllamaChatModel ollamaChatModelLlamaUnsecure;

    @PostMapping("chat")
    public String embed(@RequestBody LlmChatParam llmChatParam) {
        return chatService.call(llmChatParam);
    }

    @PostMapping("chat-stream")
    public String chatWithRole(@RequestBody LlmChatParam llmChatParam) {
        return chatService.call(llmChatParam);
    }

    @PostMapping("chat-with-role")
    public Flux<String> chatWithRole(@RequestBody LlmChatWithRoleParam llmChatWithRoleParam) {
        String model = llmChatWithRoleParam.getModel();
        List<Message> historyMessages =  new ArrayList<>();

        llmChatWithRoleParam.getAgents().forEach(agent -> {
            Message systemMessage =  new SystemPromptTemplate(PROMPT_TEMPLATE_2).createMessage(agent);
            historyMessages.add(systemMessage);
        });
        //根据历史会话，获取本次AI会话结果
        if ("llama".equals(model)) {
            return this.ollamaChatModelLlama.stream(historyMessages.toArray(new Message[]{}));
        } else if ("mistral".equals(model)) {
            return this.ollamaChatModelMistral.stream(historyMessages.toArray(new Message[]{}));
        } else if ("gemma".equals(model)) {
            return this.ollamaChatModelGemma.stream(historyMessages.toArray(new Message[]{}));
        }
        return ollamaChatModelLlama.stream(historyMessages.toArray(new Message[]{}));
    }

    @GetMapping(value = "chat-room", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
    public Flux<String> chatRoom(@RequestParam(value = "message") String message, @RequestParam("roomId") String roomId, @RequestParam(value = "model", defaultValue = "llama") String model) {
        //模拟查询历史对话
        Map<String, List<Message>> historyMessagesDao = HistoryMessageAOP.historyMessages;
        List<Message> historyMessages = historyMessagesDao.get(roomId);
        if (historyMessages == null || historyMessages.isEmpty()) {
            historyMessages = new ArrayList<>();
            //封装提示词
            SystemPromptTemplate systemPromptTemplate = new SystemPromptTemplate(PROMPT_TEMPLATE);
            HashMap<String, Object> agent = new HashMap<>();
            agent.put("role", "网络安全专家");
            agent.put("question", "网络安全问题");
            agent.put("name", "小安");
            agent.put("voice", "网络安全专家");
            Message systemMessage = systemPromptTemplate.createMessage(agent);
            historyMessages.add(systemMessage);
        }
        //将用户输入文本添加到历史会话
        historyMessages.add(new UserMessage(message));
        //模拟将房间号与会话信息存入数据库
        historyMessagesDao.put(roomId, historyMessages);
        //根据历史会话，获取本次AI会话结果
        if ("llama".equals(model)) {
            return this.ollamaChatModelLlama.stream(historyMessages.toArray(new Message[]{}));
        } else if ("mistral".equals(model)) {
            return this.ollamaChatModelMistral.stream(historyMessages.toArray(new Message[]{}));
        } else if ("gemma".equals(model)) {
            return this.ollamaChatModelGemma.stream(historyMessages.toArray(new Message[]{}));
        }
        return ollamaChatModelLlama.stream(historyMessages.toArray(new Message[]{}));
    }


}
