/*
 * Copyright (c) 2025 Industrial Software Feature Database
 */
package com.comac.ins.isfd.service.impl;

import cn.hutool.json.JSONObject;
import cn.hutool.json.JSONUtil;
import com.anwen.mongo.toolkit.Filters;
import com.comac.ins.common.core.exception.base.BaseException;
import com.comac.ins.common.core.utils.StringUtils;
import com.comac.ins.isfd.constant.IsfdMeshModelConstants;
import com.comac.ins.isfd.constant.MongoDataConstant;
import com.comac.ins.isfd.constant.enums.IsfdMeshOp2BoStatusEnum;
import com.comac.ins.isfd.domain.*;
import com.comac.ins.isfd.domain.bo.IsfdMeshOp2InfoBo;
import com.comac.ins.isfd.domain.bo.IsfdMeshOp2QueryBo;
import com.comac.ins.isfd.domain.bo.IsfdMeshOp2SubcaseBo;
import com.comac.ins.isfd.rpc.simright.SimrightRpcUtils;
import com.comac.ins.isfd.service.IIsfdDatabaseMongoService;
import com.comac.ins.isfd.service.IIsfdMeshOP2ParseService;
import com.comac.ins.isfd.util.nastranHdf5Parser.NastranHdf5Reader;
import com.comac.ins.isfd.util.op2parser.parsers.pyNastran.PyNastranOP2Reader;
import com.comac.ins.system.service.ISysDictDataService;
import com.mongodb.BasicDBObject;
import com.mongodb.bulk.BulkWriteResult;
import com.mongodb.client.MongoClient;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoDatabase;
import com.mongodb.client.model.BulkWriteOptions;
import com.mongodb.client.model.InsertOneModel;
import com.mongodb.client.model.WriteModel;
import com.mongodb.client.result.DeleteResult;
import jakarta.validation.Valid;
import lombok.RequiredArgsConstructor;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.exception.ExceptionUtils;
import org.apache.commons.lang3.time.StopWatch;
import org.bson.Document;
import org.bson.conversions.Bson;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Qualifier;
import org.springframework.data.mongodb.core.MongoTemplate;
import org.springframework.stereotype.Service;
import org.springframework.util.CollectionUtils;
import org.springframework.validation.annotation.Validated;

import java.nio.file.Paths;
import java.util.*;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ExecutorService;
import java.util.concurrent.Semaphore;
import java.util.stream.Collectors;

/**
 * 自然网格结果service
 *
 * @author lj
 * @date 2025-01-15
 */
@RequiredArgsConstructor
@Service
@Slf4j
public class IsfdMeshOP2ParseServiceImpl implements IIsfdMeshOP2ParseService {

    @Autowired
    private MongoTemplate mongoTemplate;

    @Autowired
    private MongoClient mongoClient;

    @Autowired
    private IsfdMeshOp2ServiceImpl isfdMeshOp2Service;

    @Autowired
    private IIsfdDatabaseMongoService isfdDatabaseMongoService;

    @Autowired
    private ISysDictDataService iSysDictDataService;

    @Autowired
    @Qualifier("mongoBatchExecutor")
    private ExecutorService mongoBatchExecutor;

    @Autowired
    private SimrightRpcUtils simrightRpcUtils;


    @Override
    public void parseAndSaveResultByType(IsfdMeshOp2InfoBo infoBo){
        try {
            // 计时
            long start = System.currentTimeMillis();

            // 获取文件地址
            String filePath = infoBo.getFilePath();
            String fileHash = infoBo.getFileHash();
            String uploadId = infoBo.getUploadId();
            // 转换op2
            IsfdMeshOp2Object op2Object = parseResultByType(filePath);
            // 保存result基本信息
            List<IsfdMeshOp2Result> resultList = op2Object.getResultList();
            List<IsfdMeshOp2Info.Op2Result> op2ResultList = new ArrayList<>();
            for (IsfdMeshOp2Result result : resultList) {

                List<IsfdMeshOp2Info.Op2Subcase> op2Subcases = new ArrayList<>();
                List<String> headers = new ArrayList<>();
                for (IsfdMeshOp2SubcaseBo subcaseBo : result.getSubcaseList()) {
                    IsfdMeshOp2Info.Op2Subcase op2Subcase = new IsfdMeshOp2Info.Op2Subcase();
                    op2Subcase.setId(subcaseBo.getSubcaseId());
                    op2Subcase.setNumColumn(subcaseBo.getColumnQuantity());
                    op2Subcase.setNumElement(subcaseBo.getElementQuantity());
                    op2Subcases.add(op2Subcase);

                    headers = subcaseBo.getHeaders();
                }

                IsfdMeshOp2Info.Op2Result op2Result = new IsfdMeshOp2Info.Op2Result();
                op2Result.setName(result.getResultName());
                op2Result.setOp2Subcases(op2Subcases);
                op2Result.setHeaders(headers);
                op2ResultList.add(op2Result);
            }

            // 保存结构化op2数据
            convertAndSaveOP2BySubcase(op2Object, uploadId);

            // 记录op2解析信息
            IsfdMeshOp2Info saveInfo = new IsfdMeshOp2Info();
            saveInfo.setFileHash(fileHash);
            saveInfo.setUploadId(uploadId);
            saveInfo.setFileName(Paths.get(filePath).getFileName().toString());
            saveInfo.setCollectionName(uploadId);
            saveInfo.setOp2Results(op2ResultList);
            saveOP2ParseInfo(saveInfo);

            long end = System.currentTimeMillis();
            long time_ms = end - start;
            String message = "结构化完成，uploadId = " + infoBo.getUploadId() + "，耗时：" + time_ms + " ms";
            log.info(message);

            updateStatusByUploadId(uploadId, IsfdMeshOp2BoStatusEnum.STRUCTURE_SUCCESS, null, null, message);

        } catch (Exception e) {
            log.error("结果文件解析失败/n" + e);
            String[] stackLines = ExceptionUtils.getStackTrace(e).split("\n");
            String shortMessage = Arrays.stream(stackLines).limit(3).collect(Collectors.joining("\n"));
            updateStatusByUploadId(infoBo.getUploadId(), IsfdMeshOp2BoStatusEnum.STRUCTURE_FAILURE, null, null, shortMessage);
        }
    }


    public void updateStatusByUploadId(String uploadId, IsfdMeshOp2BoStatusEnum status, String parser, String parseTaskId, String message) {
        // 查询数据
        String collectionName = isfdMeshOp2Service.getMeshModelTableName();
        IsfdDatabaseMongoQuery mongoQuery = new IsfdDatabaseMongoQuery();
        IsfdDatabaseMongoQuery.IsfdDatabaseMongoQueryCondition queryCondition = new IsfdDatabaseMongoQuery.IsfdDatabaseMongoQueryCondition();
        queryCondition.setFieldName("uploadId");
        queryCondition.setOperator("eq");
        queryCondition.setValue(uploadId);

//        IsfdDatabaseMongoQuery.IsfdDatabaseMongoQueryCondition statusCondition = new IsfdDatabaseMongoQuery.IsfdDatabaseMongoQueryCondition();
//        statusCondition.setFieldName("status");
//        statusCondition.setOperator("eq");
//        statusCondition.setValue(IsfdMeshOp2BoStatusEnum.INITIAL.getValue().toString());

//        mongoQuery.setConditions(List.of(queryCondition, statusCondition));
        mongoQuery.setConditions(List.of(queryCondition));

        Document updateDoc = new Document();
        updateDoc.put("status", status.getValue());
        updateDoc.put("message", message);
        if (StringUtils.isNotBlank(parser)) {
            updateDoc.put("parser", parser);
        }
        if (StringUtils.isNotBlank(parseTaskId)) {
            updateDoc.put("parseTaskId", parseTaskId);
        }

        isfdDatabaseMongoService.update(collectionName, mongoQuery, updateDoc);
    }

    public Boolean OP2ParseInfoExists(IsfdMeshOp2InfoBo infoBo) {
        // 校验解析记录是否存在，有记录一定是成功
        MongoDatabase op2Db = mongoClient.getDatabase(MongoDataConstant.OP2_DATABASE);
        String uploadId = infoBo.getUploadId();
        List<Document> op2ParseInfoList = op2Db.getCollection(MongoDataConstant.OP2_PARSE_INFO_COLLECTION)
            .find(Filters.eq("uploadId", uploadId))
            .into(new ArrayList<>());
        if (!CollectionUtils.isEmpty(op2ParseInfoList)) {
            updateStatusByUploadId(uploadId, IsfdMeshOp2BoStatusEnum.STRUCTURE_SUCCESS, null, null, null);
            return Boolean.TRUE;
        }

        // 校验自然网格结果库状态
        MongoDatabase comacDb = mongoClient.getDatabase(MongoDataConstant.DB_NAME);
        String op2TableName = isfdMeshOp2Service.getMeshModelTableName();
        List<Document> allOp2 = comacDb.getCollection(op2TableName)
            .find(Filters.eq("uploadId", uploadId))
            .into(new ArrayList<>());
        if (!CollectionUtils.isEmpty(allOp2)) {
            List<Integer> statusList = allOp2.stream().map(doc -> doc.getInteger("status")).toList();
            if (statusList.contains(IsfdMeshOp2BoStatusEnum.STRUCTURE_SUCCESS.getValue())) {
                updateStatusByUploadId(uploadId, IsfdMeshOp2BoStatusEnum.STRUCTURE_SUCCESS, null, null, null);
            } else if (statusList.contains(IsfdMeshOp2BoStatusEnum.STRUCTURE_FAILURE.getValue())) {
                Optional<Document> optional = allOp2.stream().filter(doc -> IsfdMeshOp2BoStatusEnum.STRUCTURE_FAILURE.getValue().equals(doc.getInteger("status"))).findFirst();
                String message = null;
                if (optional.isPresent()) {
                    message = optional.get().getString("message");
                }
                updateStatusByUploadId(uploadId, IsfdMeshOp2BoStatusEnum.STRUCTURE_FAILURE, null, null, message);
            } else if (statusList.contains(IsfdMeshOp2BoStatusEnum.STRUCTURING.getValue())) {
                updateStatusByUploadId(uploadId, IsfdMeshOp2BoStatusEnum.STRUCTURING, null, null, null);
            } else {
                return Boolean.FALSE;
            }
            return Boolean.TRUE;
        }
        return Boolean.FALSE;
    }


    private void saveOP2ParseInfo(IsfdMeshOp2Info info) {
        if (info == null) {
            log.error("数据批量插入失败，documents为空！");
            throw new BaseException("数据批量插入失败，documents为空！");
        }
        MongoDatabase database = mongoClient.getDatabase(MongoDataConstant.OP2_DATABASE);

        Document document = new Document();
        mongoTemplate.getConverter().write(info, document);
        document.remove("_class");  // 移除 "_class" 字段

        database.getCollection(MongoDataConstant.OP2_PARSE_INFO_COLLECTION).insertOne(document);
    }


    private IsfdMeshOp2Object parseResultByType(String filePath) {
        String suffix = filePath.substring(filePath.lastIndexOf(".") + 1);
        if ("op2".equalsIgnoreCase(suffix)) {
            return parseOP2(filePath);
        } else if ("h5".equalsIgnoreCase(suffix)) {
            return parseH5(filePath);
        } else {
            log.error("不支持解析的文件格式：" + suffix);
            throw new BaseException("不支持解析的文件格式：" + suffix);
        }
    }
    private IsfdMeshOp2Object parseOP2(String filePath) {
        // 转换op2
        return PyNastranOP2Reader.convertToOP2Object(filePath);
    }

    private IsfdMeshOp2Object parseH5(String filePath) {
        // 转换op2        // todo: 这里给H5转换
        return NastranHdf5Reader.parseAndconvertToOp2Object(filePath);
    }

    private void convertAndSaveOP2BySubcase(IsfdMeshOp2Object op2Object, String uploadId) {
        StopWatch stopWatch = StopWatch.createStarted();
        log.info("开始转换并保存op2");

        createCollection(uploadId);

        for (IsfdMeshOp2Result op2Result : op2Object.getResultList()) {
            String resultName = op2Result.getResultName();

            for (IsfdMeshOp2SubcaseBo subcase : op2Result.getSubcaseList()) {
                List<IsfdMeshOp2Record> subcaseRecords = PyNastranOP2Reader.convertSubcaseToRecords(subcase, resultName);
                // 保存数据
                saveBatch(uploadId, subcaseRecords);
            }
        }

        // 计时
        stopWatch.stop();
        log.info("OP2解析保存任务完成，id: {}, 耗时：{}",uploadId, stopWatch.getTime());
    }

    private void createCollection(String collectionName) {
        // 选择数据库（如果不存在，会在插入数据时自动创建）
        MongoDatabase database = mongoClient.getDatabase(MongoDataConstant.OP2_DATABASE);
        // 判断集合是否已经存在
        if (collectionExists(database, collectionName)) {
            // 清空已有数据（删除整个集合）
            MongoCollection<Document> existingCollection = database.getCollection(collectionName);
            existingCollection.drop(); // 删除集合
            log.info("已删除集合: " + collectionName);
        }
        // 创建集合
        try {
            database.createCollection(collectionName);
        } catch (Exception e) {
            log.error("MongoDB中创建表失败，" + e);
            throw new BaseException("MongoDB中创建表失败，" + e);
        }
    }

    private boolean collectionExists(MongoDatabase database, String collectionName) {
        for (String name : database.listCollectionNames()) {
            if (name.equals(collectionName)) {
                return true;
            }
        }
        return false;
    }

    private void saveBatch(String collectionName, List<IsfdMeshOp2Record> dataList) {
        if (CollectionUtils.isEmpty(dataList)) {
            log.error("数据批量插入失败，documents为空！");
            throw new BaseException("数据批量插入失败，documents为空！");
        }
        MongoDatabase database = mongoClient.getDatabase(MongoDataConstant.OP2_DATABASE);
        if (!collectionExists(database, collectionName)) {
            log.error("表未创建！: " + collectionName);
            throw new BaseException("表未创建！: " + collectionName);
        }

        int BATCH_SIZE = 100_000; // 更稳定的批量大小

        List<CompletableFuture<Void>> futures = new ArrayList<>();
        Semaphore semaphore = new Semaphore(8); // 限制最大并发插入的

        // 计时
        StopWatch stopWatch = StopWatch.createStarted();
        log.info("开始插入 OP2 数据，集合名：{}，总数：{}", collectionName, dataList.size());

        try {
            Iterator<IsfdMeshOp2Record> iterator = dataList.iterator();

            while(iterator.hasNext()) {
                List<IsfdMeshOp2Record> batch = new ArrayList<>(BATCH_SIZE);
                for (int i = 0; i < BATCH_SIZE && iterator.hasNext(); i++) {
                    batch.add(iterator.next());
                }
                semaphore.acquire(); // 控制并发批次
                CompletableFuture<Void> future = CompletableFuture.runAsync(() -> {
                    try {
                        insertBatch(database, collectionName, batch);
                    } catch (Exception e) {
                        log.error("数据插入失败: {}", e.getMessage());
                    } finally {
                        semaphore.release(); // 释放资源
                    }
                }, mongoBatchExecutor);
                futures.add(future);
            }

            CompletableFuture.allOf(futures.toArray(new CompletableFuture[0])).join();
        } catch (Exception e) {
            log.error("数据批量插入失败, message: " + e.getMessage());
            throw new BaseException("数据插入失败, message: " + e.getMessage());
        }

        // 检查数量
//        long count = database.getCollection(collectionName).countDocuments();
//        if (count != dataList.size()) {
//            throw new BaseException("数据未全部插入mongo！total: " + dataList.size() + ", inserted: " + count);
//        }

        // 计时
        stopWatch.stop();
        log.info("Task completed in {} milliseconds.", stopWatch.getTime());
    }

    private BulkWriteResult insertBatch(MongoDatabase database, String collectionName, List<IsfdMeshOp2Record> batch) {
        List<WriteModel<Document>> bulkOperations = new ArrayList<>();
        for (IsfdMeshOp2Record record : batch) {
            Document document = new Document();
            mongoTemplate.getConverter().write(record, document);
            document.remove("_class");
            bulkOperations.add(new InsertOneModel<>(document));
        }

        BulkWriteResult result = database.getCollection(collectionName)
            .bulkWrite(bulkOperations, new BulkWriteOptions().ordered(false)); // 无序批量写入

        bulkOperations.clear(); // 清空列表，减少内存占用
        return result;
    }


    /**
     * 查询op2
     */

    @Override
    public List<IsfdMeshOp2RecordData> findOP2Records(@Valid @Validated IsfdMeshOp2QueryBo queryBo) {


        String collectionName = queryBo.getCollectionName();
        String resultName = queryBo.getResultName();
        Map<Integer, Integer> elementPairs = queryBo.getElementPairs();

        MongoDatabase database = mongoClient.getDatabase(MongoDataConstant.OP2_DATABASE);

        // 找到collection
        if (!collectionExists(database, collectionName)) {
            log.error("表未创建！: " + collectionName);
            throw new BaseException("表未创建！: " + collectionName);
        }
        MongoCollection<Document> collection = database.getCollection(collectionName);

        // 通过variableId 和 elementId 查询结果列表

        return findRecordByResultNameAndIds(collection, resultName, new ArrayList<>(elementPairs.values()));
    }

    @Override
    public List<IsfdMeshOp2RecordSubResult> findOP2RecordSubResults(IsfdMeshOp2QueryBo queryBo) {
        String op2UploadId = queryBo.getUploadId();
        String resultName = queryBo.getResultName();
        String subResultName = queryBo.getSubResultName();

        // 找到解析信息
        MongoDatabase database = mongoClient.getDatabase(MongoDataConstant.OP2_DATABASE);
        MongoCollection<Document> parseInfoCollection = database.getCollection(MongoDataConstant.OP2_PARSE_INFO_COLLECTION);
        Document document = parseInfoCollection.find(Filters.eq("uploadId", op2UploadId)).first();
        if (document == null) {
            throw new BaseException("未找到解析记录！id: " + op2UploadId);
        }
        IsfdMeshOp2Info op2ParseInfo = mongoTemplate.getConverter().read(IsfdMeshOp2Info.class, document);
        queryBo.setCollectionName(op2ParseInfo.getCollectionName());

        // 找出所有符合条件的记录
        List<IsfdMeshOp2RecordData> dataList = findOP2Records(queryBo);

        // 找到分力index
        Map<String, List<String>> resultHeaders = getResultHeaders(op2ParseInfo);
        int subResultIndex = getSubResultIndex(resultHeaders.get(resultName), subResultName);

        List<IsfdMeshOp2RecordSubResult> subResults = new ArrayList<>();
        // 只保留subResult
        if (CollectionUtils.isEmpty(dataList)) {
            return subResults;
        }
        for (IsfdMeshOp2RecordData record : dataList) {
            Float value = record.getValues()[subResultIndex];
            if (value == null) {
                throw new BaseException("没有找到值！");
            }
            IsfdMeshOp2RecordSubResult subResult = new IsfdMeshOp2RecordSubResult();
            subResult.setValue(value);
            subResult.setSubResultName(subResultName);
            subResult.setResultName(resultName);
            subResult.setLoadCase(record.getLoadCase());
            subResult.setEntity(record.getEntity());
            subResults.add(subResult);
        }
        return subResults;
    }

    private Map<String, List<String>> getResultHeaders(IsfdMeshOp2Info op2ParseInfo) {
        if (op2ParseInfo == null || CollectionUtils.isEmpty(op2ParseInfo.getOp2Results())) {
            throw new BaseException("结果数据为空！");
        }

        Map<String, List<String>> resultHeaders = new HashMap<>();

        for (IsfdMeshOp2Info.Op2Result op2Result : op2ParseInfo.getOp2Results()) {
            String resultName = op2Result.getName();
            List<String> headers = op2Result.getHeaders();
            resultHeaders.put(resultName, headers);
        }

        return resultHeaders;
    }

    private int getSubResultIndex(List<String> headers, String subResultName) {
        if (CollectionUtils.isEmpty(headers)) {
            throw new BaseException("分力为空！");
        }
        int index = headers.indexOf(subResultName);
        if (index == -1) {
            throw new BaseException("分力名称 [" + subResultName + "] 不存在于结果头中！");
        }
        return index;
    }

    private List<IsfdMeshOp2RecordData> findRecordByResultNameAndIds(MongoCollection<Document> collection, String resultName, List<Integer> elementIds) {
        // 只有在 elementIds 和 variableIds 都有值时才添加额外的查询条件
        if (CollectionUtils.isEmpty(elementIds)) {
            throw new BaseException("节点清单不能为空！");
        }
        if (StringUtils.isBlank(resultName)) {
            throw new BaseException("结果名称不能为空！");
        }

        BasicDBObject query = new BasicDBObject();
        query.put("@class", PyNastranOP2Reader.RESULT);
        query.put("resultName", resultName);
        query.put("entity", new BasicDBObject("$in", elementIds));

        List<IsfdMeshOp2RecordData> results = new ArrayList<>();
        collection.find(query).forEach(document -> {
            IsfdMeshOp2RecordData record = mongoTemplate.getConverter().read(IsfdMeshOp2RecordData.class, document);
            results.add(record);
        });
        return results;
    }


    @Override
    public IsfdMeshOp2CompareResultData getResultData(IsfdMeshOp2QueryBo queryBo) {

        Map<Integer, Integer> elementPairs = queryBo.getElementPairs();
        Map<Integer, Integer> reversedMap = elementPairs.entrySet().stream()
            .collect(Collectors.toMap(
                Map.Entry::getValue, // 作为新的 key
                Map.Entry::getKey    // 作为新的 value
            ));

        List<IsfdMeshOp2RecordSubResult> subResults = findOP2RecordSubResults(queryBo);
        // 将所有结果转为map
        Map<Integer, List<IsfdMeshOp2RecordSubResult>> subResultMap = subResults.stream().collect(
            Collectors.groupingBy(IsfdMeshOp2RecordSubResult::getEntity));

        List<IsfdMeshOp2CompareResultDataDetail> dataDetails = new ArrayList<>();
        for (Map.Entry<Integer, List<IsfdMeshOp2RecordSubResult>> entry : subResultMap.entrySet()) {
            List<IsfdMeshOp2RecordSubResult> subResultList = entry.getValue();
            // 只存最大值、最小值
            IsfdMeshOp2RecordSubResult maxSubResult = subResultList.stream()
                .max(Comparator.comparing(IsfdMeshOp2RecordSubResult::getValue))
                .orElseThrow(() -> new BaseException("未找到最大值"));
            IsfdMeshOp2RecordSubResult minSubResult = subResultList.stream()
                .min(Comparator.comparing(IsfdMeshOp2RecordSubResult::getValue))
                .orElseThrow(() -> new BaseException("未找到最小值"));

            IsfdMeshOp2CompareResultDataDetail detail = new IsfdMeshOp2CompareResultDataDetail();
            detail.setElementId(entry.getKey());
            detail.setXIndex(reversedMap.get(entry.getKey()));
            detail.setMaxDataLoadCase(maxSubResult.getLoadCase());
            detail.setMaxData(maxSubResult.getValue());
            detail.setMinDataLoadCase(minSubResult.getLoadCase());
            detail.setMinData(minSubResult.getValue());
            dataDetails.add(detail);
        }

        IsfdMeshOp2CompareResultData result = new IsfdMeshOp2CompareResultData();
        result.setDataDetailList(dataDetails);
        return result;
    }

    @Override
    public List<Document> getAllRecords(String collectionName, String resultName, Integer subCaseId, int page, int pageSize) {
        try {
            MongoDatabase database = mongoClient.getDatabase(MongoDataConstant.OP2_DATABASE);

            if (!collectionExists(database, collectionName)) {
                throw new BaseException("表未创建！: " + collectionName);
            }

            MongoCollection<Document> collection = database.getCollection(collectionName);

            // 构建查询条件
            Bson filter = Filters.and(
                Filters.eq("resultName", resultName),
                Filters.eq("loadCase", subCaseId)
            );

            // 分页查询并逐条处理
            List<Document> result = new ArrayList<>();
            collection.find(filter)
                .skip((page - 1) * pageSize)  // 跳过前面已经处理的记录
                .limit(pageSize)              // 限制查询的条数
                .into(result);                // 直接把查询结果加入到 result 列表

            return result;
        } catch (Exception e) {
            log.error("获取记录失败，集合名：" + collectionName, e);
            throw new BaseException("获取记录失败，集合名：" + collectionName, e.getMessage());
        }
    }

    @Override
    public Map<String, List<Integer>> getSampleElements(String uploadId, int page, int pageSize) {
        // 找到解析信息
        MongoDatabase database = mongoClient.getDatabase(MongoDataConstant.OP2_DATABASE);
        MongoCollection<Document> parseInfoCollection = database.getCollection(MongoDataConstant.OP2_PARSE_INFO_COLLECTION);
        Document document = parseInfoCollection.find(Filters.eq("uploadId", uploadId)).first();
        if (document == null) {
            throw new BaseException("未找到解析记录！id: " + uploadId);
        }
        IsfdMeshOp2Info op2ParseInfo = mongoTemplate.getConverter().read(IsfdMeshOp2Info.class, document);
        Map<String, Integer> resultNameIdMap = op2ParseInfo.getOp2Results().stream()
            .collect(Collectors.toMap(
                IsfdMeshOp2Info.Op2Result::getName,
                r -> r.getOp2Subcases().get(0).getId()
            ));

        Map<String, List<Integer>> sampleElements = new HashMap<>();

        for (Map.Entry<String, Integer> entry : resultNameIdMap.entrySet()) {
            String resultName = entry.getKey();
            Integer subCaseId = entry.getValue();
            List<Document> records = getAllRecords(uploadId, resultName, subCaseId, page, pageSize);
            List<Integer> entityIds = records.stream()
                .map(record -> record.getInteger("entity"))  // 提取 entityId 字段
                .filter(Objects::nonNull)  // 过滤掉 null 的值（如果需要）
                .toList();  // 收集到 List
            sampleElements.put(resultName, entityIds);
        }

        return sampleElements;
    }

    @Override
    public void executeParse(List<String> uploadIds) {
        if (CollectionUtils.isEmpty(uploadIds)) {
            log.info("上传列表为空，任务结束！");
            return;
        }
        // 查询数据
        String collectionName = isfdMeshOp2Service.getMeshModelTableName();
        IsfdDatabaseMongoQuery mongoQuery = new IsfdDatabaseMongoQuery();

        // 查询所有 uploadId 对应的记录
        IsfdDatabaseMongoQuery.IsfdDatabaseMongoQueryCondition idsCondition =
            new IsfdDatabaseMongoQuery.IsfdDatabaseMongoQueryCondition();
        idsCondition.setFieldName("uploadId");
        idsCondition.setOperator("in");
        idsCondition.setValue(String.join(",", uploadIds));
        mongoQuery.setConditions(List.of(idsCondition));

        List<Document> results = isfdDatabaseMongoService.findAll(collectionName, mongoQuery);
        if (CollectionUtils.isEmpty(results)) {
            log.info("所有文件都已结构化，任务调度执行完成！");
            return;
        }

        // 获取当前解析器
        String parser = iSysDictDataService.selectDictValueByTypeAndLabel(IsfdMeshModelConstants.MODEL_MESH, IsfdMeshModelConstants.MESH_OP2_PARSER_SWITCH);
        if (!parser.equalsIgnoreCase(IsfdMeshModelConstants.MESH_OP2_PARSER_SIMRIGHT) && !parser.equalsIgnoreCase(IsfdMeshModelConstants.MESH_OP2_PARSER_SFSOFT)) {
            log.error("不支持的解析器：" + parser);
            return;
        }

        // 按 uploadId 分组
        Map<String, List<Document>> uploadIdGroup = new HashMap<>();
        for (Document doc : results) {
            String uploadId = (String) doc.get("uploadId");
            if (StringUtils.isBlank(uploadId)) continue;

            uploadIdGroup.computeIfAbsent(uploadId, k -> new ArrayList<>()).add(doc);
        }

        for (Map.Entry<String, List<Document>> entry : uploadIdGroup.entrySet()) {
            String uploadId = entry.getKey();
            List<Document> docs = entry.getValue();

            // 1. 找出最高状态文档
            // Success 优先于 Structuring
            Document referenceDoc = docs.stream()
                .filter(d -> IsfdMeshOp2BoStatusEnum.STRUCTURE_SUCCESS.getValue().equals(d.getInteger("status")))
                .findFirst()
                .orElseGet(() -> docs.stream()
                    .filter(d -> IsfdMeshOp2BoStatusEnum.STRUCTURING.getValue().equals(d.getInteger("status")))
                    .findFirst()
                    .orElse(null));

            if (referenceDoc != null) {

                Integer statusValue = referenceDoc.getInteger("status");
                IsfdMeshOp2BoStatusEnum targetStatus = IsfdMeshOp2BoStatusEnum.fromValue(statusValue);

                parser = referenceDoc.getString("parser");
                String parseTaskId = referenceDoc.getString("parseTaskId");

                // 2. 批量更新（避免重复 stream）
                for (Document doc : docs) {
                    this.updateStatusByUploadId(
                        doc.getString("uploadId"),
                        targetStatus,
                        parser,
                        parseTaskId,
                        "统一更新为 " + targetStatus.getDesc()
                    );
                }

                log.info("uploadId={} 存在 {} 状态，全部更新为 {}，跳过解析",
                    uploadId, targetStatus.getDesc(), targetStatus.getDesc());
                continue;
            }

            // 取一条记录作为解析对象
            Document doc = docs.get(0);
            String objectId = (String) doc.get("_id");
            String fileHash = (String) doc.get("fileHash");
            String filePath = (String) doc.get("fileUrl");

            if (StringUtils.isBlank(objectId) || StringUtils.isBlank(fileHash) || StringUtils.isBlank(filePath)) {
                log.error("结果数据存在问题，uploadId={}", uploadId);
                this.updateStatusByUploadId(uploadId, IsfdMeshOp2BoStatusEnum.STRUCTURE_FAILURE, parser, null, "结果数据存在问题");
                continue;
            }

            // 开始解析任务

            IsfdMeshOp2InfoBo infoBo = new IsfdMeshOp2InfoBo();
            infoBo.setObjectId(objectId);
            infoBo.setUploadId(uploadId);
            infoBo.setFilePath(filePath);
            infoBo.setFileHash(fileHash);
            infoBo.setParser(parser);

            // 原子性更新状态为 STRUCTURING，避免并发重复解析
            this.updateStatusByUploadId(uploadId, IsfdMeshOp2BoStatusEnum.STRUCTURING, parser, null, null);

            try {
                log.info("开始执行结构化任务，uploadId：" + infoBo.getUploadId());
                if (parser.equalsIgnoreCase(IsfdMeshModelConstants.MESH_OP2_PARSER_SIMRIGHT)) {

                    // 调用数巧解析，应该返回一个taskId
                    String taskId = executeSimRightParser(infoBo);
                    this.updateStatusByUploadId(uploadId, IsfdMeshOp2BoStatusEnum.STRUCTURING, parser, taskId, null);

                } else if (parser.equalsIgnoreCase(IsfdMeshModelConstants.MESH_OP2_PARSER_SFSOFT)) {

                    this.parseAndSaveResultByType(infoBo);

                } else throw new BaseException("不支持的解析器：" + parser);

            } catch (Exception e) {
                log.error("任务提交失败！uploadId: " + infoBo.getUploadId(), e);
                // 原子性更新状态为 STRUCTURING，避免并发重复解析
                String message = "任务提交失败！uploadId: " + infoBo.getUploadId();
                this.updateStatusByUploadId(uploadId, IsfdMeshOp2BoStatusEnum.INITIAL, null, null, message);
            }

        }
    }

    @Override
    public String executeSimRightParser(IsfdMeshOp2InfoBo infoBo ) {
        String localFilePath = infoBo.getFilePath();
        // Step 1: 统一分隔符
        localFilePath = localFilePath.replace("\\", "/");
        // Step 2: 去掉 /data/ 或 data/
        localFilePath = localFilePath.replaceFirst("^/*data/", "");
        // Step 3: 去掉开头多余的斜杠
        localFilePath = localFilePath.replaceFirst("^/+", "");

        String data = simrightRpcUtils.convertFromLocal(localFilePath);
        JSONObject json;
        try {
            json = JSONUtil.parseObj(data); // Hutool 解析 JSON
        } catch (Exception e) {
            log.error("解析 convert_from_local 接口返回 data 失败, data={}", data, e);
            throw new BaseException("解析 convert_from_local 接口返回 data 失败: " + e.getMessage());
        }

        String taskId = json.getStr("taskId");
        if (StringUtils.isBlank(taskId)) {
            log.error("调用 convert_from_local 接口失败，返回的 taskId 为空, data={}", data);
            throw new BaseException("调用 convert_from_local 接口失败，返回的 taskId 为空");
        }

        return taskId;
    }

    @Override
    public int deleteOp2ParseRecords(String uploadId) {
        // 找到解析信息
        MongoDatabase database = mongoClient.getDatabase(MongoDataConstant.OP2_DATABASE);
        MongoCollection<Document> parseInfoCollection = database.getCollection(MongoDataConstant.OP2_PARSE_INFO_COLLECTION);
        Document document = parseInfoCollection.find(Filters.eq("uploadId", uploadId)).first();
        if (document == null) {
            log.error("未找到解析记录！id: " + uploadId);
            return 0;
        }
        IsfdMeshOp2Info op2ParseInfo = mongoTemplate.getConverter().read(IsfdMeshOp2Info.class, document);
        MongoCollection<Document> op2Collection = database.getCollection(op2ParseInfo.getCollectionName());
        op2Collection.drop();

        DeleteResult deleteResult = parseInfoCollection.deleteMany(Filters.eq("uploadId", uploadId));
        return (int) deleteResult.getDeletedCount();
    }


}
