package com.hzsparrow.ai.util;

import java.util.List;
import java.util.Map;
import java.util.ArrayList;
import java.util.HashMap;

import org.springframework.ai.transformer.splitter.TokenTextSplitter;
import org.springframework.ai.vectorstore.VectorStore;

import com.hzsparrow.ai.entity.Document;

public class DocumentSplitSaveUtil {

    private int maxToken = 500;

    private VectorStore vectorStore;

    public DocumentSplitSaveUtil(int maxToken, VectorStore vectorStore) {
        this.maxToken = maxToken;
        this.vectorStore = vectorStore;
    }

    /**
     * 切分文档并保存到向量数据库
     * 
     * @param document 文档
     * @param metadata 元数据
     * @return 是否成功
     */
    public boolean splitAndSave(Document document, Map<String, Object> metadata) {
        org.springframework.ai.document.Document aiDocument = new org.springframework.ai.document.Document(
                document.getContent(), metadata);
        return splitAndSaveWithTokenSize(aiDocument, maxToken);
    }

    /**
     * 按指定token大小切分文档并保存
     * 
     * @param aiDocument 文档
     * @param tokenSize token大小
     * @return 是否成功
     */
    private boolean splitAndSaveWithTokenSize(org.springframework.ai.document.Document aiDocument, int tokenSize) {
        System.out.println("使用 " + tokenSize + " token大小进行文档切分");
        
        TokenTextSplitter splitter = TokenTextSplitter.builder()
                .withChunkSize(tokenSize)
                .withMinChunkLengthToEmbed(0)
                .withMaxNumChunks(1000)
                .build();
        List<org.springframework.ai.document.Document> list = splitter.split(aiDocument);

        // 分批保存到向量数据库，每批最多32个
        int batchSize = 32;
        int totalChunks = list.size();
        System.out.println("文档总块数: " + totalChunks + "，开始分批处理...");

        List<org.springframework.ai.document.Document> failedChunks = new ArrayList<>();

        for (int i = 0; i < totalChunks; i += batchSize) {
            int endIndex = Math.min(i + batchSize, totalChunks);
            List<org.springframework.ai.document.Document> batch = list.subList(i, endIndex);
            try {
                // 为每个文档块添加当前使用的token大小到metadata
                List<org.springframework.ai.document.Document> batchWithTokenSize = new ArrayList<>();
                for (org.springframework.ai.document.Document doc : batch) {
                    // 创建新的元数据，将token大小添加进去
                    Map<String, Object> docMetadata = new HashMap<>(doc.getMetadata());
                    docMetadata.put("tokenSize", tokenSize);
                    
                    // 创建一个新的文档对象，因为Document类没有提供直接修改metadata的方法
                    org.springframework.ai.document.Document newDoc = new org.springframework.ai.document.Document(
                            doc.getId(), 
                            doc.getText(), 
                            docMetadata);
                    
                    batchWithTokenSize.add(newDoc);
                }
                
                vectorStore.add(batchWithTokenSize);
                System.out.println("成功添加第 " + (i / batchSize + 1) + " 批，包含 " + batchWithTokenSize.size() + " 个文档块");
            } catch (Exception e) {
                System.err.println("添加第 " + (i / batchSize + 1) + " 批文档块时出错: " + e.getMessage());
                // 记录失败的块，稍后重新处理
                failedChunks.addAll(batch);
            }
        }
        
        // 处理失败的文档块
        if (!failedChunks.isEmpty()) {
            System.out.println("有 " + failedChunks.size() + " 个文档块保存失败，尝试减小token大小重新切分");
            
            // 减小token大小进行重新切分
            int newTokenSize = tokenSize / 2;
            if (newTokenSize < 100) {
                System.err.println("token大小已经很小 (" + newTokenSize + ")，无法继续切分，放弃处理");
                return false;
            }
            
            boolean allSuccess = true;
            for (org.springframework.ai.document.Document failedDoc : failedChunks) {
                boolean success = splitAndSaveWithTokenSize(failedDoc, newTokenSize);
                if (!success) {
                    allSuccess = false;
                }
            }
            
            return allSuccess;
        }
        
        return true;
    }
}

