package cn.bugstack.trigger.http;

import cn.bugstack.api.IAiService;
import com.alibaba.fastjson.JSON;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.ChatOptions;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.prompt.SystemPromptTemplate;
import org.springframework.ai.document.Document;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.ai.vectorstore.PgVectorStore;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.web.bind.annotation.*;
import reactor.core.publisher.Flux;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;

/**
 * @Description: ollama实现类
 * @ClassName: OllamaController
 * @Author: zhaiyongxin
 * @Date: 2025/7/7 16:30
 * @Version: 1.0
 */
@RestController
@RequestMapping("api/v1/ollama")
@CrossOrigin("*")
@Slf4j
public class OllamaController implements IAiService {

    @Resource
    private OllamaChatClient ollamaChatClient;
    @Resource
    private PgVectorStore pgVectorStore;

    /**
     *
     * @param model
     * @param message
     * @return
     * http://localhost:8090/api/v1/ollama/generate?model=deepseek-r1:1.5b&message=hi
     */
    @RequestMapping(value = "generate", method = RequestMethod.GET)
    @Override
    public ChatResponse generate(@RequestParam("model") String model, @RequestParam("message") String message) {
        return ollamaChatClient.call(new Prompt(message, OllamaOptions.create().withModel(model)));
    }

    /**
     * 流式调用ai接口
     * @param model
     * @param message
     * @return
     * http://localhost:8090/api/v1/ollama/generate_stream?model=deepseek-r1:1.5b&message=hi
     */
    @RequestMapping(value = "generate_stream", method = RequestMethod.GET)
    @Override
    public Flux<ChatResponse> generateStream(@RequestParam("model") String model, @RequestParam("message") String message) {
        return ollamaChatClient.stream(new Prompt(message, OllamaOptions.create().withModel(model)));
    }

    /**
     *
     * @param model
     * @param message
     * @param ragTag
     * @return
     * http://localhost:8090/api/v1/ollama/generate_stream_rag?model=deepseek-r1:1.5b&message=hi&ragTag=ragTag
     */
    @RequestMapping(value = "generate_stream_rag", method = RequestMethod.GET)
    @Override
    public Flux<ChatResponse> generateStreamByRag(@RequestParam("model") String model, @RequestParam("message") String message, @RequestParam("ragTag") String ragTag) {
        String SYSTEM_PROMPT = """
            Use the information from the DOCUMENTS section to provide accurate answers but act as if you knew this information innately.
            If unsure, simply state that you don't know.
            Another thing you need to note is that your reply must be in Chinese!
            DOCUMENTS:
                {documents}
            """;

        //从向量库获取内容
        SearchRequest searchRequest = SearchRequest.query(message)
                .withTopK(5)
                .withFilterExpression("knowledge == '" + ragTag + "'");
        List<Document> documents = pgVectorStore.similaritySearch(searchRequest);
        List<Message> messages = new ArrayList<>();
        messages.add(new UserMessage(message));
        Message sysMessage = new SystemPromptTemplate(SYSTEM_PROMPT).createMessage(Map.of("documents", documents));
        messages.add(sysMessage);
        return ollamaChatClient.stream(
                new Prompt(
                        messages,
                        OllamaOptions.create()
                                .withModel(model)
                ));

    }
}
