package com.gzist.springailab05.controller;

import com.gzist.springailab05.rag.MyTokenTextSplitter;
import com.gzist.springailab05.service.RAGService;
import jakarta.annotation.Resource;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.client.ChatClient;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.document.Document;
import org.springframework.ai.document.DocumentReader;
import org.springframework.ai.rag.Query;
import org.springframework.ai.rag.preretrieval.query.transformation.QueryTransformer;
import org.springframework.ai.rag.preretrieval.query.transformation.RewriteQueryTransformer;
import org.springframework.ai.reader.TextReader;
import org.springframework.ai.tool.ToolCallback;
import org.springframework.ai.transformer.splitter.TokenTextSplitter;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.http.MediaType;
import org.springframework.http.codec.ServerSentEvent;
import org.springframework.web.bind.annotation.GetMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.servlet.mvc.method.annotation.SseEmitter;
import reactor.core.publisher.Flux;

import java.io.IOException;
import java.util.List;

/**
 * RAG接口类
 */
@Slf4j
@RestController
@RequiredArgsConstructor
@RequestMapping("/ai")
public class RagAdvanceController {

    private final RAGService ragService;
    private final VectorStore vectorStore;
    private final MyTokenTextSplitter myTokenTextSplitter;

    /**
     * 将本地数据（static/demoHospital.txt）写入到向量数据库中
     * @return
     * @throws IOException
     */
    @GetMapping("/data/load")
    public String loadData() throws IOException {
        // 1. 读取文件
        DocumentReader reader = new TextReader("static/demoHospital.txt");
        List<Document> docs = reader.get();
        // 2. 切分文件
        List<Document> splitDocuments = myTokenTextSplitter.splitCustomized(docs);
        log.info("文件切分为 [{}]", splitDocuments.size());
        // 3.写入数据
        vectorStore.add(splitDocuments);
        return "success";
    }

    /**
     * 同步调用
     */
    @GetMapping("/chat/sync")
    public String doChatWithragServiceSync(String message, String chatId) {
        return ragService.doChat(message, chatId);
    }


    /**
     * SSE 流式调用
     *
     * @param message
     * @param chatId
     * @return
     */
    @GetMapping("/chat/sse")
    public Flux<ServerSentEvent<String>> doChatWithragServiceServerSentEvent(String message, String chatId) {
        return ragService.doChatByStream(message, chatId)
                .map(chunk -> ServerSentEvent.<String>builder()
                        .data(chunk)
                        .build());
    }

    /**
     * SSE 流式调用 RAG知识库助手
     *
     * @param message
     * @param chatId
     * @return
     */
    @GetMapping("/rag/sse_emitter")
    public SseEmitter doChatWithragServiceServerSseEmitter(String message, String chatId) {
        // 创建一个超时时间较长的 SseEmitter
        SseEmitter sseEmitter = new SseEmitter(180000L); // 3 分钟超时
        // 获取 Flux 响应式数据流并且直接通过订阅推送给 SseEmitter
        ragService.doChatWithRag(message, chatId)
                .subscribe(chunk -> {
                    try {
                        sseEmitter.send(chunk);
                    } catch (IOException e) {
                        sseEmitter.completeWithError(e);
                    }
                }, sseEmitter::completeWithError, sseEmitter::complete);
        // 返回
        return sseEmitter;
    }
}
