package com.luo.ai.assistant.etl.impl;

import com.luo.ai.assistant.etl.MyDocumentReader;
import com.luo.ai.assistant.etl.MyDocumentTransformer;
import com.luo.ai.assistant.etl.MyDocumentWriter;
import com.luo.ai.assistant.etl.MyEtlService;
import com.luo.ai.assistant.file.FileConvertor;
import com.luo.ai.assistant.file.FileDetailService;
import com.luo.ai.assistant.file.MyFileStorageService;
import com.luo.ai.assistant.file.model.FileDetail;
import com.luo.ai.assistant.file.model.TrainStatusEnum;
import com.luo.ai.assistant.util.JsonUtils;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import org.springframework.ai.chat.model.ChatModel;
import org.springframework.ai.document.Document;
import org.springframework.ai.vectorstore.VectorStore;
import org.springframework.core.io.DefaultResourceLoader;
import org.springframework.core.io.FileSystemResource;
import org.springframework.core.io.Resource;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;

import java.util.*;
import java.util.function.BiConsumer;
import java.util.function.BiFunction;
import java.util.function.Function;
import java.util.stream.Collectors;
import java.util.stream.Stream;

/**
 * ETL服务实现
 *
 * @author luohq
 * @date 2025-01-02 10:01
 */
@Service
public class MyEtlServiceImpl implements MyEtlService {

    private static final Logger log = LoggerFactory.getLogger(MyEtlServiceImpl.class);

    /**
     * The Chat model.
     */
    private ChatModel chatModel;
    /**
     * The Vector store.
     */
    private VectorStore vectorStore;

    /**
     * 文件存储服务
     */
    private MyFileStorageService myFileStorageService;
    /**
     * 文件数据库记录服务
     */
    private FileDetailService fileDetailService;

    public MyEtlServiceImpl(ChatModel chatModel,
                            VectorStore vectorStore,
                            MyFileStorageService myFileStorageService,
                            FileDetailService fileDetailService) {
        this.chatModel = chatModel;
        this.vectorStore = vectorStore;
        this.myFileStorageService = myFileStorageService;
        this.fileDetailService = fileDetailService;
    }

    @Override
    public Map<String, List<Document>> etlFileBatch(Collection<String> fileIds) {
        // 查询文件
        List<FileDetail> fileDetails = this.fileDetailService.findByIds(fileIds);
        if (CollectionUtils.isEmpty(fileDetails)) {
            return Collections.emptyMap();
        }
        // 文件ID与向量文档列表的映射
        Map<String, List<Document>> fileIdToDocumentsMap = new HashMap<>(fileDetails.size());

        // 依次训练文件
        fileDetails.stream()
                // 过滤已训练的文件
                .filter(fileDetail -> !TrainStatusEnum.TRAINED.getStatus().equals(fileDetail.getTrainStatus()))
                // 转换为文件信息
                .map(FileConvertor::toFileInfo)
                .forEach(fileInfo -> {
                    log.info("Train File: {}", JsonUtils.toJson(fileInfo));
                    // 下载文件
                    FileSystemResource fileSystemResource = this.myFileStorageService.downloadFileAsResource(fileInfo);

                    // 转换元数据类型
                    Map<String, Object> convertedMetadata = new HashMap<>();
                    convertedMetadata.putAll(fileInfo.getMetadata());
                    // 转换type为Set，否则无法再Redis Search中通过Tag进行集合范围检索
                    convertedMetadata.computeIfPresent("type", (type, typeValues) -> {
                        String[] typeValueArray = String.valueOf(typeValues).trim().split(",|，");
                        return Stream.of(typeValueArray).collect(Collectors.toSet());
                    });

                    // 训练文件（附带元数据）
                    List<Document> documents = this.etl(fileSystemResource, convertedMetadata);
                    // 更新文件训练状态
                    this.fileDetailService.updateTrainStatus(fileInfo.getId(), TrainStatusEnum.TRAINED);
                    // 更新文档关联的向量文档ID（后续删除文档时，用于级联删除关联的向量文档）
                    List<String> documentIds = documents.stream().map(Document::getId).collect(Collectors.toList());
                    this.fileDetailService.updateDocumentIds(fileInfo.getId(), documentIds);
                    // 删除下载的文件
                    fileSystemResource.getFile().delete();

                    //记录文件关联的文档集合
                    fileIdToDocumentsMap.put(fileInfo.getId(), documents);
                });
        return fileIdToDocumentsMap;
    }

    @Override
    public List<Document> etl(Resource resource) {
        return this.etl(resource, Collections.emptyMap());
    }

    @Override
    public List<Document> etl(Resource resource, Map<String, Object> additionalDocumentMetadata) {
        //1. Extract documents from the resource
        MyDocumentReaderImpl myDocumentReader = new MyDocumentReaderImpl(resource);
        List<Document> documents = myDocumentReader.read();
        // Add additional metadata to the documents
        documents.forEach(document -> document.getMetadata().putAll(additionalDocumentMetadata));

        //2. Transform the documents
        MyDocumentTransformerImpl myDocumentTransformer = new MyDocumentTransformerImpl(chatModel);
        documents = myDocumentTransformer.transform(documents);

        //3. Load documents into the vector store
        MyDocumentWriterImpl myDocumentWriter = new MyDocumentWriterImpl(vectorStore);
        myDocumentWriter.write(documents);

        //return this.etlCustom(resource, MyDocumentReader::read, MyDocumentTransformer::transform, MyDocumentWriter::writeToVectorStore);
        return documents;
    }

    @Override
    public List<Document> etl(String resourceUrl) {
        return this.etl(new DefaultResourceLoader().getResource(resourceUrl));
    }


    @Override
    public List<Document> etlCustom(Resource resource,
                                    Function<MyDocumentReader, List<Document>> readOperation,
                                    BiFunction<MyDocumentTransformer, List<Document>, List<Document>> transformOperation,
                                    BiConsumer<MyDocumentWriter, List<Document>> writeOperation) {
        //1. Extract documents from the resource
        MyDocumentReaderImpl myDocumentReader = new MyDocumentReaderImpl(resource);
        List<Document> documents = readOperation.apply(myDocumentReader);

        //2. Transform the documents
        MyDocumentTransformerImpl myDocumentTransformer = new MyDocumentTransformerImpl(chatModel);
        documents = transformOperation.apply(myDocumentTransformer, documents);

        //3. Load documents into the vector store
        MyDocumentWriterImpl myDocumentWriter = new MyDocumentWriterImpl(vectorStore);
        writeOperation.accept(myDocumentWriter, documents);
        return documents;
    }

    @Override
    public List<Document> etlCustom(String resourceUrl,
                                    Function<MyDocumentReader, List<Document>> readOperation,
                                    BiFunction<MyDocumentTransformer, List<Document>, List<Document>> transformOperation,
                                    BiConsumer<MyDocumentWriter, List<Document>> writeOperation) {
        return this.etlCustom(new DefaultResourceLoader().getResource(resourceUrl),
                readOperation, transformOperation, writeOperation);
    }
}
