package com.zone.http;

import com.zone.IAiService;
import com.zone.RedisKeys;
import com.zone.template.PromptTemplate;
import jakarta.annotation.Resource;
import lombok.extern.slf4j.Slf4j;
import org.redisson.api.RList;
import org.redisson.api.RedissonClient;
import org.springframework.ai.chat.ChatResponse;
import org.springframework.ai.chat.messages.Message;
import org.springframework.ai.chat.messages.UserMessage;
import org.springframework.ai.chat.prompt.Prompt;
import org.springframework.ai.chat.prompt.SystemPromptTemplate;
import org.springframework.ai.document.Document;
import org.springframework.ai.ollama.OllamaChatClient;
import org.springframework.ai.ollama.api.OllamaOptions;
import org.springframework.ai.reader.tika.TikaDocumentReader;
import org.springframework.ai.transformer.splitter.TokenTextSplitter;
import org.springframework.ai.vectorstore.PgVectorStore;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.web.bind.annotation.*;
import org.springframework.web.multipart.MultipartFile;
import reactor.core.publisher.Flux;

import java.util.ArrayList;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

@Slf4j
@RestController
@CrossOrigin("${app.config.cross-origin}")
@RequestMapping("/api/${app.config.api-version}/ollama/")
public class OllamaController implements IAiService {
    @Resource
    private OllamaChatClient chatClient;
    @Autowired
    private TokenTextSplitter tokenTextSplitter;
    @Autowired
    private PgVectorStore pgVectorStore;
    @Autowired
    private RedissonClient redissonClient;

    @Override
    @GetMapping("/generate")
    public ChatResponse generate(@RequestParam String model, @RequestParam String message) {
        return chatClient.call(new Prompt(message, OllamaOptions.create().withModel(model)));
    }

    @Override
    @GetMapping("/generateStream")
    public Flux<ChatResponse> generateStream(@RequestParam String model, @RequestParam String message) {
        return chatClient.stream(new Prompt(message, OllamaOptions.create().withModel(model)));
    }

    @Override
    @PostMapping("/upload")
    public String upload(@RequestBody MultipartFile file, @RequestParam String tag) {
        // 文件上传解析
        TikaDocumentReader tikaDocumentReader = new TikaDocumentReader(file.getResource());
        List<Document> sourceDocumentList = tikaDocumentReader.get();

        // 拆分
        List<Document> splitDocumentList = tokenTextSplitter.apply(sourceDocumentList);

        // 文本标记，元数据标记
        sourceDocumentList.forEach(srcDocument -> srcDocument.getMetadata().put("knowledge", tag));
        splitDocumentList.forEach(splitDocument -> splitDocument.getMetadata().put("knowledge", tag));

        // 将打完标记、拆分的document存储到向量库
        pgVectorStore.accept(splitDocumentList);

        // 不存在缓存将tag存入缓存中
        RList<String> ragTagList = redissonClient.getList(RedisKeys.RAG_TAG_LIST);
        if (!ragTagList.contains(tag)) {
            ragTagList.add(tag);
        }

        return String.format("文档处理完成，共处理文档%s个，切分后文档%s个", sourceDocumentList.size(), splitDocumentList.size());
    }

    @Override
    @GetMapping("/chat")
    public ChatResponse chat(@RequestParam String model, @RequestParam String message, @RequestParam String tag) {
        // 向量库搜索
        SearchRequest searchRequest = SearchRequest.query(message)
                .withTopK(5)
                .withFilterExpression("knowledge == '" + tag + "'");
        List<Document> documents = pgVectorStore.similaritySearch(searchRequest);

        // 合并文档内容
        String documentContentJoin = documents.stream().map(Document::getContent).collect(Collectors.joining());

        // 创建rag消息
        Message ragMessage = new SystemPromptTemplate(PromptTemplate.SYSTEM_PROMPT_TEMPLATE).createMessage(Map.of("documents", documentContentJoin));

        // 创建消息
        List<Message> messages = new ArrayList<>();
        messages.add(new UserMessage(message));
        messages.add(ragMessage);

        // 调用chat client
        return chatClient.call(new Prompt(messages, OllamaOptions.create().withModel(model)));
    }
}
