package cn.liu.knowledge.controller;

import cn.liu.knowledge.advisor.RagAdvisor;
import cn.liu.knowledge.domain.ReqChatMessageVo;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.client.advisor.QuestionAnswerAdvisor;
import org.springframework.ai.chat.messages.SystemMessage;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.http.MediaType;
import org.springframework.http.codec.ServerSentEvent;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestBody;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import reactor.core.publisher.Flux;

import java.util.Map;

/**
 * 大模型对话接口
 *
 * @author LIU
 */
@Slf4j
@RestController
@RequestMapping(value = "chat")
@RequiredArgsConstructor(onConstructor = @__(@Autowired))
public class ChatController {

    private final Map<String, ChatModel> chatModelMap;

    private final VectorStore vectorStore;



    @PostMapping(value = "msg")
    public String chat(@RequestBody ReqChatMessageVo chatMessage) {
        String model = chatMessage.getModel();
        String message = chatMessage.getMessage();
        ChatModel chatModel = chatModelMap.get(model) != null ? chatModelMap.get(model) : chatModelMap.get("ollamaChatModel");
        return chatModel.call(message);
    }


    @PostMapping(value = "message", produces = MediaType.TEXT_EVENT_STREAM_VALUE)
    public Flux<ServerSentEvent<String>> chatMessage(@RequestBody ReqChatMessageVo chatMessage) {
        String model = chatMessage.getModel();
        String impl = chatMessage.getImpl();
        String message = chatMessage.getMessage();
        ChatModel chatModel = chatModelMap.get(impl) != null ? chatModelMap.get(impl) : chatModelMap.get("ollamaChatModel");

        return ChatClient.create(chatModel)
                .prompt()
//                .system("你是一个有着多年经验的java架构师")
//                .messages(new SystemMessage("你是一个有着多年经验的java架构师"))
                .user(message)
                .advisors(advisorSpec ->   {
                    useVectorStore(advisorSpec,true);
                })
                
                .options(OllamaOptions.builder().model("model").build())
                .stream()
                .content()
                .map(i-> ServerSentEvent.<String>builder().data(i).build());
//                .chatResponse()
//                .map(chatResponse -> ServerSentEvent
//                        .<String>builder()
//                        .data(JSON.toJSONString(chatResponse))
//                        .build());
    }


    public void useVectorStore(ChatClient.AdvisorSpec advisorSpec, Boolean enableVectorStore) {
        if (!enableVectorStore) return;
        // question_answer_context是一个占位符，会替换成向量数据库中查询到的文档。QuestionAnswerAdvisor会替换。
        String promptWithContext = """
                下面是上下文信息
                ---------------------
                {question_answer_context}
                ---------------------
                给定的上下文和提供的历史信息，而不是事先的知识，回复用户的意见。如果答案不在上下文中，告诉用户你不能回答这个问题。
                """;
        advisorSpec.advisors(new QuestionAnswerAdvisor(vectorStore, SearchRequest.builder().topK(1).build(), promptWithContext));

        advisorSpec.advisors(new RagAdvisor(promptWithContext));
    }

}
