package cn.xxliang.aiproject.trigger.http;

import cn.xxliang.aiproject.api.IAiService;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.prompt.SystemPromptTemplate;
import org.springframework.ai.document.Document;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.ai.openai.OpenAiChatClient;
import org.springframework.ai.vectorstore.PgVectorStore;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

/**
 * @author xxliang
 * @date 2025/7/4  19:56
 * @description 控制层;
 */
@RestController
@CrossOrigin("*")
@RequestMapping("/api/v1/openai")
public class OpenAiController implements IAiService {

    @Autowired
    private OllamaChatClient chatClient;


    @Autowired
    PgVectorStore pgVectorStore;

    @Autowired
    OllamaChatClient ollamaChatClient;

    @Autowired
    OpenAiChatClient openAiChatClient;

    @GetMapping("/test")
    public String test() {
        return "success!";
    }

    /**
     * 获取增强模型结果
     * @param model
     * @param message
     * @return
     */
    @GetMapping("/generate")
    @Override
    public ChatResponse generate(@RequestParam("model") String model,@RequestParam("message") String message) {
        return chatClient.call(new Prompt(message,OllamaOptions.create().withModel(model)));
    }

    /**
     * 流式接口
     * @param model
     * @param message
     * @return
     */
    @GetMapping("/generate_stream")
    @Override
    public Flux<ChatResponse> generateStream(@RequestParam String model,@RequestParam String message) {
        return chatClient.stream(new Prompt(message,OllamaOptions.create().withModel(model)));
    }

    @Override
    public Flux<ChatResponse> generateStream(@RequestParam  String model,@RequestParam String ragTag,@RequestParam String message) {
        String SYSTEM_PROMPT = """
            Use the information from the DOCUMENTS section to provide accurate answers but act as if you knew this information innately.
            If unsure, simply state that you don't know.
            Another thing you need to note is that your reply must be in Chinese!
            DOCUMENTS:
                {documents}
            """;
        //检索向量数据库
        SearchRequest searchRequest = SearchRequest.query(message).withTopK(5).withFilterExpression("ragTag = '" + ragTag + "'");
        //
        List<Document> documents = pgVectorStore.similaritySearch(searchRequest);
        //
        String collect = documents.stream().map(Document::getContent).collect(Collectors.joining());

        Message documents1 = new SystemPromptTemplate(SYSTEM_PROMPT).createMessage(Map.of("documents", collect));

        List<Message> messages = new ArrayList<>();
        messages.add(new UserMessage( message));
        messages.add(documents1);

        return openAiChatClient.stream(new Prompt(messages, OllamaOptions.create().withModel(model)));

    }
}
