package com.zz.ai.controller;


//import com.zz.ai.component.MyKeywordEnricher;
import com.zz.ai.service.ChatService;
import com.zz.ai.util.CustomTokenTextSplitter;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.document.Document;
import org.springframework.ai.reader.tika.TikaDocumentReader;
import org.springframework.ai.transformer.splitter.TokenTextSplitter;
import org.springframework.ai.vectorstore.SearchRequest;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.core.io.InputStreamResource;
import org.springframework.web.bind.annotation.PostMapping;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RequestParam;
import org.springframework.web.bind.annotation.RestController;
import org.springframework.web.multipart.MultipartFile;

import java.io.IOException;
import java.util.Collection;
import java.util.List;
import java.util.Map;
import java.util.stream.Collectors;

@RestController
@RequestMapping("/vector")
public class VectorController {

    @Autowired
    VectorStore vectorStore;

    @Autowired
    private ChatService chatService;

//    private EmbeddingClient embeddingClient;
//    @Autowired
//    private MyKeywordEnricher myKeywordEnricher;

    @PostMapping("embedding")
    public Boolean embedding(@RequestParam MultipartFile file) throws IOException {

//        TokenTextSplitter tokenTextSplitter = new TokenTextSplitter();
//        tokenTextSplitter.setChunkSize(1000);

        // 从IO流中读取文件
        TikaDocumentReader tikaDocumentReader = new TikaDocumentReader(new InputStreamResource(file.getInputStream()));
        List<Document> orginDocuments = tikaDocumentReader.read();
        orginDocuments.get(0);
        CustomTokenTextSplitter customTokenTextSplitter = new CustomTokenTextSplitter();
        // 将文本内容划分成更小的块
        List<Document> splitDocuments = orginDocuments.stream().map(document -> customTokenTextSplitter.split(document)).flatMap(Collection::stream).collect(Collectors.toList());

//        splitDocuments = myKeywordEnricher.enrichDocuments(splitDocuments);
        // 存入向量数据库，这个过程会自动调用embeddingModel,将文本变成向量再存入。
        vectorStore.add(splitDocuments);
        return true;
    }

    @PostMapping("doDocument")
    public Boolean doDocument() {

        List <Document> documents = List.of(
                new Document("Spring AI rocks!! Spring AI rocks!! Spring AI rocks!! Spring AI rocks!! Spring AI rocks!!", Map.of("meta1", "meta1")),
                new Document("The World is Big and Salvation Lurks Around the Corner"),
                new Document("You walk forward facing the past and you turn back toward the future.", Map.of("meta2", "meta2")));

        // Add the documents to Redis
        vectorStore.add(documents);

//         Retrieve documents similar to a query
        List<Document> results = this.vectorStore.similaritySearch(SearchRequest.builder().query("Spring").topK(5).build());

        return true;
    }

    @PostMapping("deleteDocument")
    public Boolean deleteDocument(String query) {

        List<Document> results = this.vectorStore.similaritySearch(SearchRequest.builder().query(query).topK(5).similarityThreshold(0.7d).build());
//        vectorStore.delete(results.stream().map(Document::getId).collect(Collectors.toList()));
        return true;
    }

    @PostMapping("getDocument")
    public String getDocument(String query) {

        List<Document> context = vectorStore.similaritySearch(SearchRequest.builder().query(query).topK(5)
                .similarityThreshold(0.7d).build());
        String prompt = "基于以下上下文回答：" +
                context.stream().map(Document::getText).collect(Collectors.joining(" "));


        List<Document> contextFilter = vectorStore.similaritySearch(
                SearchRequest.builder()
                        .query("The World")
                        .topK(2)
                        .similarityThreshold(0.5d)
                        .filterExpression("author in ['john', 'jill'] && article_type == 'Spring'").build());


        return prompt;
    }

    @PostMapping("generateResponse")
    public String generateResponse(String query) {
        String response = chatService.generateResponse(query);
        return response;
    }


}
