package com.example.futureai.service.impl;

import cn.hutool.core.util.StrUtil;
import com.example.futureai.constants.CommonConst;
import com.example.futureai.constants.SpringAIConst;
import com.example.futureai.service.AiDocumentService;
import com.example.futureai.service.RedisService;
import com.example.futureai.service.TextSplitterIntf;
import com.example.futureai.service.ZskService;
import org.json.JSONObject;
import org.springframework.ai.document.Document;
import org.springframework.ai.reader.TextReader;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.core.io.InputStreamResource;
import org.springframework.stereotype.Service;

import java.io.ByteArrayInputStream;
import java.io.ByteArrayOutputStream;
import java.io.InputStream;
import java.security.MessageDigest;
import java.util.List;
import java.util.Map;

/**
 * @ClassName: AIDocumentImpl
 * @Description:
 * @Author: YourName
 * @Date: 2025/2/24
 */

@Service
public class AIDocumentImpl implements AiDocumentService {


   /* @Autowired
    @Qualifier("myVectorStore")
    private VectorStore VectorStore;*/

    @Autowired
    private RedisService redisService;

    @Autowired
    private ZskService zskService;



    /**
     * 文档入库
     *
     * @param cachedInputStream 文档输入流
     * @param docName           文档名
     */
    public void addDocument(InputStream cachedInputStream, String docName) throws Exception {
        // 将InputStream转换为字节数组
        ByteArrayOutputStream byteArrayOutputStream = new ByteArrayOutputStream();
        byte[] buffer = new byte[1024];
        int bytesRead;
        while ((bytesRead = cachedInputStream.read(buffer)) != -1) {
            byteArrayOutputStream.write(buffer, 0, bytesRead);
        }
        byte[] data = byteArrayOutputStream.toByteArray();
        // 使用字节数组计算哈希值
        String docHash = calculateFileHash(data, CommonConst.SHA_256);
        // 校验文档是否已经存在
        if (redisService.hasKey(CommonConst.RAG_KEY_PREFIX + docHash)) {
            return;
        }
        // 使用字节数组创建新的InputStream
        InputStream inputStream = new ByteArrayInputStream(data);
        int docSize = inputStream.available();
        // 文件流转为 Document
        TextReader textReader = new TextReader(new InputStreamResource(inputStream));
        List<Document> documents = textReader.get();
        // 文档拆分
        TextSplitterIntf textSplitter = new RecursiveCharacterTextSplitter(250, 30);
        List<Document> newDocuments = textSplitter.split(documents, List.of(Map.of(SpringAIConst.VectorStore.METADATA_DOC_ID, docHash,
                SpringAIConst.VectorStore.METADATA_DOC_NAME, docName)));
        // 向量入库
//        VectorStore.add(newDocuments);
        // 将文档记录存入数据库
        zskService.addZskJl(newDocuments, docName, docHash);
        // 将文档信息存入 Redis
        JSONObject docJson = new JSONObject();
        docJson.put(SpringAIConst.VectorStore.METADATA_DOC_NAME, docName);
        docJson.put("docSize", docSize);
        docJson.put("docChunks", newDocuments.size());
        redisService.setObjectRdb(CommonConst.RAG_KEY_PREFIX + docHash, docJson);
    }


    /**
     * 计算文件 Hash 值
     *
     * @param algorithm 算法，可选，MD5、SHA-1、SHA-256
     * @return
     * @throws Exception
     */
    public static String calculateFileHash(byte[] datas, String algorithm) throws Exception {
        // 创建MessageDigest实例
        algorithm = StrUtil.isBlank(algorithm) ? CommonConst.SHA_256 : algorithm;
        MessageDigest digest = MessageDigest.getInstance(algorithm);
        digest.update(datas);
        // 完成哈希计算
        byte[] hashBytes = digest.digest();
        // 将哈希值转换为十六进制字符串
        StringBuilder sb = new StringBuilder();
        for (byte b : hashBytes) {
            sb.append(String.format("%02x", b));
        }
        return sb.toString();
    }

    public void saveDocumentRecord() throws Exception {

    }

}
