package com.tsy.pointcloud.service.impl;

import com.baomidou.mybatisplus.core.conditions.query.LambdaQueryWrapper;
import com.baomidou.mybatisplus.core.toolkit.Wrappers;
import com.baomidou.mybatisplus.extension.service.impl.ServiceImpl;
import com.tsy.common.constant.CacheConstants;
import com.tsy.common.constant.PointCloudConstants;
import com.tsy.common.core.redis.RedisCache;
import com.tsy.common.enums.UploadStatus;
import com.tsy.common.exception.ServiceException;
import com.tsy.common.utils.AntZipUtil;
import com.tsy.common.utils.SecurityUtils;
import com.tsy.common.utils.converter.PdalUtil;
import com.tsy.common.utils.converter.PointCloudUtil;
import com.tsy.common.utils.file.FileUtils;
import com.tsy.framework.config.ServerConfig;
import com.tsy.pointcloud.domain.FileInfo;
import com.tsy.pointcloud.domain.dto.HandleResultDto;
import com.tsy.pointcloud.domain.dto.UploadResultDto;
import com.tsy.pointcloud.service.FileInfoService;
import com.tsy.pointcloud.mapper.FileInfoMapper;
import com.tsy.pointcloud.sse.SseService;
import lombok.extern.slf4j.Slf4j;
import org.apache.commons.lang3.RandomStringUtils;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.beans.factory.annotation.Value;
import org.springframework.context.annotation.Lazy;
import org.springframework.data.redis.core.RedisTemplate;
import org.springframework.data.redis.core.script.DefaultRedisScript;
import org.springframework.scheduling.annotation.Async;
import org.springframework.stereotype.Service;
import org.springframework.transaction.annotation.Transactional;
import org.springframework.transaction.support.TransactionSynchronization;
import org.springframework.transaction.support.TransactionSynchronizationManager;
import org.springframework.web.multipart.MultipartFile;

import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.StandardCopyOption;
import java.util.Collections;
import java.util.Date;
import java.util.Objects;
import java.util.Set;
import java.util.concurrent.TimeUnit;

/**
* @author Administrator
* @description 针对表【file_info(文件信息表)】的数据库操作Service实现
* @createDate 2025-03-25 14:54:52
*/
@Service
@Slf4j
public class FileInfoServiceImpl extends ServiceImpl<FileInfoMapper, FileInfo>
    implements FileInfoService{


    @Value(value = "${tsy.profile}")
    private String basePath;

    @Autowired
    private RedisCache redisCache;

    private RedisTemplate<Object, Object> redisTemplate;
    @Autowired
    public void setRedisTemplate(RedisTemplate<Object, Object> redisTemplate)
    {
        this.redisTemplate = redisTemplate;
    }

    @Autowired
    private ServerConfig serverConfig;

    @Autowired
    @Lazy
    private FileInfoServiceImpl fileInfoService;

    @Autowired
    private SseService sseService;

    public static final String VECTOR_FOLDER = "vector";

    /**
     * 文件临时目录
     */
    public static final String FILE_TEMP_FOLDER = "temp";

    /**
     * 点云切片文件路径
     */
    private static final String CONVERTER_FOLDER = "converter";

    /**
     * 点云文件路径
     */
    private static final String POINT_CLOUD_FOLDER = "pointcloud";

    /**
     * 处理点云文件路径
     */
    private static final String HANDLE_POINT_CLOUD_FOLDER = "handle";

    /**
     * 文件处理锁前缀
     */
    private static final String FILE_PROCESS_LOCK_PREFIX = "file_process_lock:";
    
    // 分布式锁的获取脚本
    private static final String LOCK_SCRIPT = 
            "if redis.call('setnx', KEYS[1], ARGV[1]) == 1 then " +
            "  redis.call('expire', KEYS[1], tonumber(ARGV[2])) " +
            "  return 1 " +
            "else " +
            "  return 0 " +
            "end";
    
    // 分布式锁的释放脚本
    private static final String UNLOCK_SCRIPT = 
            "if redis.call('get', KEYS[1]) == ARGV[1] then " +
            "  return redis.call('del', KEYS[1]) " +
            "else " +
            "  return 0 " +
            "end";

    private static final String PYTHON_WINDOWS = "python";
    private static final String PYTHON_LINUX = "python3";
    // 根据当前系统选择合适的工具路径
    private static final String PYTHON_TOOL = System.getProperty("os.name").startsWith("Windows") ? PYTHON_WINDOWS : PYTHON_LINUX;

    @Override
    @Transactional(rollbackFor = Exception.class)
    public UploadResultDto fragmentUpload(MultipartFile file, String fileName, String fileMd5, Integer chunkIndex, Integer chunks) {
        UploadResultDto resultDto = new UploadResultDto();
        log.info("文件分片开始上传第{}片", chunkIndex);
        String lockKey = FILE_PROCESS_LOCK_PREFIX + fileMd5;
        String requestId = Thread.currentThread().getId() + ":" + System.currentTimeMillis();
        try {
            // 暂存临时目录
            String tempFolder = basePath + File.separator + FILE_TEMP_FOLDER;
            File tempFileFolderFile = new File(tempFolder + File.separator + fileMd5);
            synchronized (fileMd5.intern()) {
                // 使用同步块保护目录创建
                if (!tempFileFolderFile.exists()) {
                    tempFileFolderFile.mkdirs();
                }
            }

            // 保存当前文件分片
            File newFile = new File(tempFileFolderFile.getPath() + "/" + chunkIndex);
            file.transferTo(newFile);

            // 使用Redis的原子操作添加分片序号到集合
            String cacheKey = getCacheKey(fileMd5);
            Long currentSize = redisTemplate.opsForSet().add(cacheKey, chunkIndex);
            
            // 如果是首次添加，设置过期时间
            if (currentSize != null && currentSize == 1) {
                redisTemplate.expire(cacheKey, PointCloudConstants.FRAGMENT_UPLOAD_EXPIRATION, TimeUnit.DAYS);
            }

            // 获取当前已上传的分片数量
            Long setSize = redisTemplate.opsForSet().size(cacheKey);

            // 文件上传未结束
            if (setSize == null || setSize < chunks) {
                log.info("文件分片结束上传第{}片", chunkIndex);
                resultDto.setStatus(UploadStatus.UPLOADING.getStatus());
                return resultDto;
            }

            // 使用分布式锁确保只有一个线程执行合并操作
            boolean lockAcquired = acquireLock(lockKey, requestId, 30);
            
            if (!lockAcquired) {
                // 获取锁失败，说明已有其他线程在处理，直接返回处理中状态
                resultDto.setStatus(UploadStatus.UPLOADING.getStatus());
                return resultDto;
            }
            
            // 查询名称是否重复
            FileInfo getFileInfo = getOne(new LambdaQueryWrapper<FileInfo>().eq(FileInfo::getOriginalName, fileName));
            if (getFileInfo != null) {
                // 修改文件名
                fileName = FileUtils.getNameNotSuffix(fileName) + "_" + RandomStringUtils.random(5, true, true) + FileUtils.getFileType(fileName);
            }
            // 保存到数据库
            Long userId = SecurityUtils.getUserId();
            FileInfo fileInfo = new FileInfo();
            fileInfo.setStatus(0);
            fileInfo.setMd5(fileMd5);
            fileInfo.setOriginalName(fileName);
            fileInfo.setType(FileUtils.getFileType(fileName));
            fileInfo.setCreateTime(new Date());
            fileInfo.setUpdateTime(new Date());
            fileInfo.setUserId(userId);
            save(fileInfo);


            log.info("文件分片最后一片结束上传第{}片", chunkIndex);
            resultDto.setStatus(UploadStatus.COMPLETE.getStatus());

            // 注册一个事务同步对象，事务提交后提交异步任务：
            // HTTP请求线程 → 事务开始 → 业务逻辑 → 注册afterCommit回调 → 事务提交 → 执行afterCommit回调 → 调用transferFile →  提交异步任务到线程池 → HTTP响应返回
            String finalFileName = fileName;
            TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
                @Override
                public void afterCommit() {
                    // 在异步任务开始前获取URL前缀
//                        String urlPrefix = serverConfig.getUrl();
                    fileInfoService.transferFile(fileMd5, finalFileName, userId);
                }
            });


        } catch (Exception e) {
            log.error("文件上传失败", e);
        } finally {
            // 释放锁
            releaseLock(lockKey, requestId);
        }
        return resultDto;
    }


    @Override
    @Transactional(rollbackFor = Exception.class)
    public UploadResultDto batchFileUpload(File file, String fileName) {
        UploadResultDto resultDto = new UploadResultDto();
        try {

            // 查询名称是否重复
            FileInfo getFileInfo = getOne(new LambdaQueryWrapper<FileInfo>().eq(FileInfo::getOriginalName, fileName));
            if (getFileInfo != null) {
                resultDto.setStatus(UploadStatus.OK.getStatus());
                return resultDto;
                // 修改文件名
//                fileName = FileUtils.getNameNotSuffix(fileName) + "_" + RandomStringUtils.random(5, true, true) + FileUtils.getFileType(fileName);
            }

            String fileMd5 = FileMD5Calculator.calculateFileMD5(file);

            String fileType = "";
            boolean transferSuccess = true; // 转码是否成功
            String targetFilePath = "";
            try {

                // 目标目录
                String targetFolder = basePath + File.separator + POINT_CLOUD_FOLDER;
                File targetFolderFile = new File(targetFolder);
                if (!targetFolderFile.exists()) {
                    targetFolderFile.mkdirs();
                }

                fileType = FileUtils.getFileType(fileName);

                targetFilePath = targetFolderFile.getPath() + File.separator + fileMd5 + fileType;

                // todo 拷贝文件
                Path source = Paths.get(file.getPath());
                Path target = Paths.get(targetFilePath);
                Files.copy(source, target, StandardCopyOption.REPLACE_EXISTING);

                // 转码
                String converterFolder = basePath + File.separator + CONVERTER_FOLDER + File.separator + fileMd5;
                File converterFolderFile = new File(converterFolder);
                if (!converterFolderFile.exists()) {
                    converterFolderFile.mkdirs();
                }

                // 文件格式转换  pcd/ply -> las
                if (fileType.equals(".pcd") || fileType.equals(".ply")) {
                    String translateFilePath = targetFilePath.replace(fileType, ".las");
                    PdalUtil.pintCloudTypeTranslate(targetFilePath, translateFilePath);
                    targetFilePath = translateFilePath;
                    fileType = ".las";
                }

                if (fileType.equals(".las") || fileType.equals(".laz")) {
                    PointCloudUtil.pintCloudConverter(targetFilePath, converterFolder);
                }

            } catch (Exception e) {
                log.error("文件转码失败", e);
                transferSuccess = false;

            } finally {
                if (transferSuccess) {

                    // 保存到数据库
                    FileInfo fileInfo = new FileInfo();
                    fileInfo.setMd5(fileMd5);
                    fileInfo.setOriginalName(fileName);
                    fileInfo.setType(FileUtils.getFileType(fileName));
                    fileInfo.setCreateTime(new Date());
                    fileInfo.setUpdateTime(new Date());
                    fileInfo.setUserId(100L);
                    fileInfo.setStatus(1);
                    fileInfo.setNewName(fileMd5 + fileType);
                    String converterPath = "/profile/" + CONVERTER_FOLDER + "/" + fileMd5 + "/metadata.json";
                    fileInfo.setPath(converterPath);
                    fileInfo.setSize(new File(targetFilePath).length());
                    save(fileInfo);
                    log.info("=========url:{}===========", fileInfo.getPath());
                }

            }
            resultDto.setStatus(UploadStatus.COMPLETE_URL.getStatus());

        } catch (Exception e) {
            log.error("文件上传失败", e);
        }
        return resultDto;
    }



    @Async(value = "threadPoolTaskExecutor")
    public void transferFile(String fileMd5, String fileName, Long userId) {
        // 查询数据库文件
        LambdaQueryWrapper<FileInfo> queryWrapper = Wrappers.lambdaQuery(FileInfo.class)
                .eq(FileInfo::getMd5, fileMd5)
                .eq(FileInfo::getUserId, userId)
                .eq(FileInfo::getStatus, 0)
                .last("limit 1");
        FileInfo info = getOne(queryWrapper);
        if (info == null) return;

        String fileType = "";
        boolean transferSuccess = true; // 转码是否成功
        String targetFilePath = "";
        UploadResultDto resultDto = new UploadResultDto();
        try {
            // 暂存临时目录
            String tempFolder = basePath + File.separator + FILE_TEMP_FOLDER + File.separator + fileMd5;
            // 目标目录
            String targetFolder = basePath + File.separator + POINT_CLOUD_FOLDER;
            File targetFolderFile = new File(targetFolder);
            if (!targetFolderFile.exists()) {
                targetFolderFile.mkdirs();
            }

            fileType = FileUtils.getFileType(fileName);

            targetFilePath = targetFolderFile.getPath() + File.separator + fileMd5 + fileType;

            // 合并文件
            FileUtils.unionFile(tempFolder, targetFilePath, true);

            // 转码
            String converterFolder = basePath + File.separator + CONVERTER_FOLDER + File.separator + fileMd5;
            File converterFolderFile = new File(converterFolder);
            if (!converterFolderFile.exists()) {
                converterFolderFile.mkdirs();
            }

            // 文件格式转换  pcd/ply -> las
            if (fileType.equals(".pcd") || fileType.equals(".ply")) {
                String translateFilePath = targetFilePath.replace(fileType, ".las");
                PdalUtil.pintCloudTypeTranslate(targetFilePath, translateFilePath);
                targetFilePath = translateFilePath;
                fileType = ".las";
            }

            if (fileType.equals(".las") || fileType.equals(".laz")) {
                PointCloudUtil.pintCloudConverter(targetFilePath, converterFolder, progress -> {
                    try {
                        // SSE推送点云切片进度
                        resultDto.setStatus(UploadStatus.WAITING.getStatus());
                        resultDto.setProgress(progress);
                        sseService.sendMessage(fileMd5, resultDto);
                    } catch (IOException e) {
                        log.error("sse消息发送失败", e);
                    }
                });
            }

        } catch (Exception e) {
            log.error("文件转码失败", e);
            transferSuccess = false;

        } finally {
            if (transferSuccess) {
                // 修改文件状态
                info.setStatus(1);
                info.setNewName(fileMd5 + fileType);
                String converterPath = "/profile/" + CONVERTER_FOLDER + "/" + fileMd5 + "/metadata.json";
                info.setPath(converterPath);
                info.setSize(new File(targetFilePath).length());
                info.setUpdateTime(new Date());
                updateById(info);

                // 删除缓存
                String cacheKey = getCacheKey(fileMd5);
                // 获取要删除的成员集合
                Set<Object> members = redisTemplate.opsForSet().members(cacheKey);
                if (members != null && !members.isEmpty()) {
                    // 将集合转为数组作为参数
                    redisTemplate.opsForSet().remove(cacheKey, members.toArray());
                }

                // SSE推送文件转码完成
                resultDto.setStatus(UploadStatus.COMPLETE_URL.getStatus());
                resultDto.setProgress(100);
                resultDto.setFileId(info.getId());
                resultDto.setFileUrl(info.getPath());
                resultDto.setFileMd5(fileMd5);
                resultDto.setFileSize(info.getSize());
                resultDto.setFileName(fileName);

                try {
                    sseService.sendMessage(fileMd5, resultDto);
                } catch (IOException e) {
                    log.error("sse消息发送失败", e);
                } finally {
                    // 关闭SSE连接
                    sseService.disconnect(fileMd5);
                }

            }
            log.info("=========url:{}===========", info.getPath());
        }
    }

    @Override
    public UploadResultDto findByMd5(String fileMd5, String fileName) {
        UploadResultDto uploadResultDto = new UploadResultDto();

        // 1. 通过md5值判断文件是否已上传
        LambdaQueryWrapper<FileInfo> queryWrapper = Wrappers.lambdaQuery(FileInfo.class).eq(FileInfo::getMd5, fileMd5).last("limit 1");
        FileInfo fileInfo = fileInfoService.getOne(queryWrapper);
        // 秒传逻辑
        if (fileInfo != null) {
            if (fileInfo.getStatus() == 0) {
                throw new ServiceException("该文件已有进程正在上传，请稍后再试！");
            }
            // 2. 如果已上传，保存改数据
            // status: 1 秒传   2 已上传部分分片   0 未上传  3 上传未结束  4 完成上传 5 后端点云切片中  6 切片完成（url）
            // 如果上传用户不一致，新增文件
            FileInfo getFileInfo = getOne(new LambdaQueryWrapper<FileInfo>().eq(FileInfo::getOriginalName, fileName));
            // 如果名称相同
            if (getFileInfo != null) {
                // 修改文件名
                fileName = FileUtils.getNameNotSuffix(fileName) + "_" + RandomStringUtils.random(5, true, true) + FileUtils.getFileType(fileName);
            }
            Long userId = SecurityUtils.getUserId();
            fileInfo.setOriginalName(fileName);
            fileInfo.setUserId(userId);
            fileInfo.setCreateTime(new Date());
            fileInfo.setUpdateTime(new Date());
            fileInfo.setId(null);
            fileInfoService.save(fileInfo);

            uploadResultDto.setStatus(UploadStatus.OK.getStatus());
            uploadResultDto.setFileMd5(fileMd5);
            uploadResultDto.setFileName(fileName);
            uploadResultDto.setFileSize(fileInfo.getSize());
            uploadResultDto.setFileUrl(fileInfo.getPath());
            return uploadResultDto;
        }
        // 3. 查询缓存是否已上传部分分片
        Set<Integer> cacheList = redisCache.getCacheSet(getCacheKey(fileMd5));
        if (cacheList != null && !cacheList.isEmpty()) {
            uploadResultDto.setStatus(UploadStatus.PARTIAL.getStatus());
            uploadResultDto.setCacheList(cacheList);
            return uploadResultDto;
        }
        uploadResultDto.setStatus(UploadStatus.NOT_UPLOAD.getStatus());
        return uploadResultDto;
    }

    @Override
    @Async(value = "threadPoolTaskExecutor")
    public void handle(String fileName, String fileMd5, Long userId) {
        // 1. 查询未处理文件信息，获取点云文件名
        LambdaQueryWrapper<FileInfo> queryWrapper = Wrappers.lambdaQuery(FileInfo.class)
                .eq(FileInfo::getOriginalName, fileName)
                .eq(FileInfo::getMd5, fileMd5)
                .eq(FileInfo::getStatus, 1)
                .eq(FileInfo::getUserId, userId)
                .isNull(FileInfo::getHandle);
        FileInfo fileInfo = fileInfoService.getOne(queryWrapper);

        HandleResultDto resultDto = new HandleResultDto();
        String lockKey = FILE_PROCESS_LOCK_PREFIX + fileMd5 + "_handle";
        String requestId = Thread.currentThread().getId() + ":" + System.currentTimeMillis();

        String clientId = fileMd5 + "_handle";
        try {
            if (fileInfo == null) {
                throw new ServiceException("文件不存在或已分割处理！");
            }

            // 使用分布式锁确保只有一个进程处理同一个文件
            boolean lockAcquired = acquireLock(lockKey, requestId, 30);
            if (!lockAcquired) {
                throw new ServiceException("文件处理中，请稍后再试！");
            }

            String newName = fileInfo.getNewName();

            // 点云文件目录
            String pointCloudFolder = basePath + File.separator + POINT_CLOUD_FOLDER;
            String pointCloudFilePath = pointCloudFolder + File.separator + newName;

            // 点云处理结果目录
            String handleFolder = pointCloudFolder + File.separator + HANDLE_POINT_CLOUD_FOLDER;
            if (!new File(handleFolder).exists()) {
                new File(handleFolder).mkdirs();
            }

            // 多了一个predictions目录
            String handleFilePath = handleFolder + File.separator + "predictions"  + File.separator + newName;

            // 2. 调用python脚本处理点云
            // 点云文件拷贝
//            Path source = Paths.get(pointCloudFilePath);
//            Path target = Paths.get(handleFilePath);
//            Files.copy(source, target, StandardCopyOption.REPLACE_EXISTING);

            log.info("开始执行Python脚本处理点云");
            String pythonScript = "KPConv-Railway" + File.separator + "inference.py";

            // 确保Python脚本存在
            if (!new File(pythonScript).exists()) {
                throw new ServiceException("——找不到Python处理脚本！");
            }

            // 构建Python命令
            StringBuilder cmd = new StringBuilder();
//            String pythonExe = "python -u";  // 使用-u参数禁用Python输出缓冲
//            cmd.append(pythonExe).append(" ");
            cmd.append(PYTHON_TOOL).append(" ");
            cmd.append("-u").append(" ");
            cmd.append(pythonScript).append(" ");
            cmd.append("--input_file ").append(pointCloudFilePath).append(" ");
            cmd.append("--saving_path ").append(handleFolder).append(" ");

            log.info("执行命令: {}", cmd);

            // 执行命令
            Process proc = Runtime.getRuntime().exec(cmd.toString());

            // 处理输出和错误流
            PointCloudUtil.ProgressThread errorThread = new PointCloudUtil.ProgressThread(proc.getErrorStream(), null, "Python脚本错误输出");
            PointCloudUtil.ProgressThread outputThread = new PointCloudUtil.ProgressThread(proc.getInputStream(), null, "Python脚本标准输出");
            errorThread.start();
            outputThread.start();

            // 等待处理完成
            int exitCode = proc.waitFor();
            log.info("Python脚本执行完成，退出码: {}", exitCode);

            if (exitCode != 0) {
                throw new ServiceException("——Python脚本执行失败！");
            }


            // 点云处理切片目录
            String handleConverterFolder = basePath + File.separator + CONVERTER_FOLDER + File.separator + HANDLE_POINT_CLOUD_FOLDER + File.separator + fileMd5;
            if (!new File(handleConverterFolder).exists()) {
                new File(handleConverterFolder).mkdirs();
            }

            // 3. 对处理的点云进行切片，替换之前的切片文件
            PointCloudUtil.pintCloudConverter(handleFilePath, handleConverterFolder, progress -> {
//                try {
//                    // SSE推送点云切片进度
//                    resultDto.setFileMd5(fileMd5);
//                    resultDto.setFileName(fileName);
//                    resultDto.setStatus("0");
//                    resultDto.setMessage("handle...");
//                    sseService.sendMessage(clientId, resultDto);
//                } catch (IOException e) {
//                    log.error("sse消息发送失败", e);
//                }
            });

            // 4. 更新数据库
            fileInfo.setHandle(1);
            fileInfo.setPath("/profile/" + CONVERTER_FOLDER  + "/" + HANDLE_POINT_CLOUD_FOLDER + "/" + fileMd5 + "/metadata.json");
            fileInfo.setUpdateTime(new Date());
            updateById(fileInfo);

        } catch (Exception e) {
            resultDto.setFileMd5(fileMd5);
            resultDto.setFileName(fileName);
            resultDto.setStatus("0");
            resultDto.setMessage(e.getMessage());
            try {
                sseService.sendMessage(clientId, resultDto);
            } catch (IOException ex) {
                log.error("sse消息发送失败！");
            } finally {
                // 关闭SSE连接
                sseService.disconnect(clientId);
            }
            log.error("点云处理失败：{}", e.getMessage());
        } finally {
            try {
                resultDto.setFileMd5(fileMd5);
                resultDto.setFileName(fileName);
                resultDto.setStatus("1");
                resultDto.setFileUrl("/profile/" + CONVERTER_FOLDER  + "/" + HANDLE_POINT_CLOUD_FOLDER + "/" + fileMd5 + "/metadata.json");
                resultDto.setMessage("点云分割成功！");
                sseService.sendMessage(clientId, resultDto);
            } catch (IOException e) {
                log.error("sse消息发送失败", e);
            } finally {
                // 释放锁
                releaseLock(lockKey, requestId);
                // 关闭SSE连接
                sseService.disconnect(clientId);
            }

        }
    }

    /**
     * 轨道点云导出为矢量
     *
     * @param fileName 文件名
     * @param fileMd5  文件MD5
     * @return 矢量文件路径
     */
    @Override
    @Async(value = "threadPoolTaskExecutor")
    public void exportRailwayVector(String fileName, String fileMd5, Long userId) {
        log.info("开始导出轨道点云为矢量，文件名：{}，MD5：{}", fileName, fileMd5);
        // 1. 查询文件信息，获取点云文件名（已处理点云）
        LambdaQueryWrapper<FileInfo> queryWrapper = Wrappers.lambdaQuery(FileInfo.class)
                .eq(FileInfo::getOriginalName, fileName)
                .eq(FileInfo::getMd5, fileMd5)
                .eq(FileInfo::getUserId, userId)
                .eq(FileInfo::getHandle, 1)
                .eq(FileInfo::getStatus, 1);
        FileInfo fileInfo = fileInfoService.getOne(queryWrapper);

        HandleResultDto resultDto = new HandleResultDto();
        String lockKey = FILE_PROCESS_LOCK_PREFIX + fileMd5 + "_vector";
        String requestId = Thread.currentThread().getId() + ":" + System.currentTimeMillis();
        String clientId = fileMd5 + "_vector";

        try {
            if (fileInfo == null) {
                throw new ServiceException("点云不存在或未进行点云分割！");
            }

            // 使用分布式锁确保只有一个进程处理同一个文件
            boolean lockAcquired = acquireLock(lockKey, requestId, 30);

            if (!lockAcquired) {
                throw new ServiceException("点云已有进程处理中，请稍后再试！");
            }

            // 2. 查询点云是否已经提取过矢量文件
            // 矢量输出目录
            String vectorParentFolder = basePath + File.separator + VECTOR_FOLDER + File.separator + fileMd5;
            String vectorFolder = vectorParentFolder + File.separator + "vector";
            File vectorFolderFile = new File(vectorFolder);
            if (!vectorFolderFile.exists()) {
                vectorFolderFile.mkdirs();
            }
            // 输出矢量文件前缀（不包含扩展名）
            String vectorFilePrefix = FileUtils.getNameNotSuffix(fileName) + "_railway";
            // 矢量压缩文件
            String zipFolder = vectorParentFolder + File.separator + "zip";
            File zipFolderFile = new File(zipFolder);
            if (!zipFolderFile.exists()) {
                zipFolderFile.mkdirs();
            }
            String zipFilePath = zipFolder + File.separator + vectorFilePrefix + ".zip";
            // 矢量压缩文件是否存在
//            if (new File(zipFilePath).exists()) {
//                log.info("矢量文件已存在，直接返回！");
//                resultDto.setFileMd5(fileMd5);
//                resultDto.setFileName(fileName);
//                // 矢量提取 1 已有处理完成的矢量文件！
//                resultDto.setStatus("1");
//                resultDto.setMessage("已有处理完成的矢量文件！");
//                resultDto.setFileUrl("/profile/" + VECTOR_FOLDER + "/" + fileMd5 + "/" + vectorFilePrefix + ".zip");
//                sseService.sendMessage(clientId, resultDto);
//                return;
//            }

            // 3. 准备目录和文件路径
            // 点云文件目录
            String handleFolder = basePath + File.separator + POINT_CLOUD_FOLDER + File.separator + HANDLE_POINT_CLOUD_FOLDER;
            String newName = fileInfo.getNewName();
            String handleFilePath = handleFolder + File.separator + "predictions" + File.separator + newName;

            // 4. 执行Python脚本处理点云
            log.info("开始执行Python脚本处理点云");
//            String pythonScript = basePath + File.separator + "script" + File.separator + "4.las_to_railway_vector.py";
            String pythonScript = "VectorScript" + File.separator + "4.las_to_railway_vector.py";

            // 确保Python脚本存在
            if (!new File(pythonScript).exists()) {
                throw new ServiceException("——找不到Python处理脚本！");
            }
            
            // 构建Python命令
            StringBuilder cmd = new StringBuilder();
//            String pythonExe = "python -u";  // 使用-u参数禁用Python输出缓冲
//            cmd.append(pythonExe).append(" ");
            cmd.append(PYTHON_TOOL).append(" ");
            cmd.append("-u").append(" ");
            cmd.append(pythonScript).append(" ");
            cmd.append("--input ").append(handleFilePath).append(" ");
            cmd.append("--output ").append(vectorFolder).append(" ");
            cmd.append("--output_filename ").append(vectorFilePrefix);
            
            log.info("执行命令: {}", cmd);
            
            // 执行命令
            Process proc = Runtime.getRuntime().exec(cmd.toString());
            
            // 处理输出和错误流
            PointCloudUtil.ProgressThread errorThread = new PointCloudUtil.ProgressThread(proc.getErrorStream(), null, "Python脚本错误输出");
            PointCloudUtil.ProgressThread outputThread = new PointCloudUtil.ProgressThread(proc.getInputStream(), null, "Python脚本标准输出");
            errorThread.start();
            outputThread.start();

            // 等待处理完成
            int exitCode = proc.waitFor();
            log.info("Python脚本执行完成，退出码: {}", exitCode);
            
            if (exitCode != 0) {
                throw new ServiceException("——Python脚本执行失败！");
            }
            
            // 5. 压缩矢量文件目录
            AntZipUtil.zip(vectorFolderFile, zipFilePath);
            resultDto.setFileMd5(fileMd5);
            resultDto.setFileName(fileName);
            // 矢量提取 1
            resultDto.setStatus("1");
            resultDto.setMessage("完成点云矢量文件提取！");
            resultDto.setFileUrl("/profile/" + VECTOR_FOLDER + "/" + fileMd5 + "/" + "zip" + "/" + vectorFilePrefix + ".zip");
            sseService.sendMessage(clientId, resultDto);

        } catch (Exception e) {
            resultDto.setFileMd5(fileMd5);
            resultDto.setFileName(fileName);
            resultDto.setStatus("0");
            resultDto.setMessage(fileName + e.getMessage());
            try {
                sseService.sendMessage(clientId, resultDto);
            } catch (IOException ex) {
                log.error("sse消息发送失败！");
            } finally {
                // 关闭SSE连接
                sseService.disconnect(clientId);
            }
            log.error("导出轨道点云为矢量失败：{}", e.getMessage());
        } finally {
            // 释放锁
            releaseLock(lockKey, requestId);
            // 关闭SSE连接
            sseService.disconnect(clientId);
        }
    }

    @Override
    @Transactional(rollbackFor = Exception.class)
    public void delete(Long id) {
        FileInfo fileInfo = fileInfoService.getById(id);

        // 删除数据
        fileInfoService.removeById(id);

        // 注册一个事务同步对象，事务提交后提交异步任务：
        TransactionSynchronizationManager.registerSynchronization(new TransactionSynchronization() {
            @Override
            public void afterCommit() {
                fileInfoService.deleteFile(fileInfo);
            }
        });
    }

    @Async(value = "threadPoolTaskExecutor")
    public void deleteFile(FileInfo fileInfo) {
        // 查询md5是否有其他文件关联，有关联则不删除
        LambdaQueryWrapper<FileInfo> queryWrapper = Wrappers.lambdaQuery(FileInfo.class).eq(FileInfo::getMd5, fileInfo.getMd5()).last("limit 1");
        FileInfo fileInfo1 = fileInfoService.getOne(queryWrapper);
        log.info("查询md5是否有其他文件关联：{}", fileInfo1);
        boolean flag = fileInfo1 == null || (fileInfo.getHandle() == null || fileInfo1.getHandle() != 1);

        if (fileInfo.getHandle() != null && fileInfo.getHandle() == 1 && flag) {
            // 删除处理的点云文件
            String handleFolder = basePath + File.separator + POINT_CLOUD_FOLDER + File.separator + HANDLE_POINT_CLOUD_FOLDER;
            String handleFilePath = handleFolder + File.separator + "predictions"  + File.separator + fileInfo.getNewName();
            log.info("删除处理的点云文件：{}", handleFilePath);
            FileUtils.deleteFile(handleFilePath);
            // 删除处理的切片文件夹
            String converterFolder = basePath + File.separator + CONVERTER_FOLDER + File.separator + HANDLE_POINT_CLOUD_FOLDER;
            String converterFilePath = converterFolder + File.separator + fileInfo.getMd5();
            FileUtils.deleteDirectoryRecursively(converterFilePath);
            log.info("删除处理的切片文件：{}", converterFilePath);
            // 删除矢量文件夹
            String vectorFolder = basePath + File.separator + VECTOR_FOLDER + File.separator + fileInfo.getMd5();
            FileUtils.deleteDirectoryRecursively(vectorFolder);
            log.info("删除矢量文件：{}", vectorFolder);
        }


        if (fileInfo1 != null) {
            return;
        }


        // 删除点云原始文件
        String pointcloudFolder = basePath + File.separator + POINT_CLOUD_FOLDER;
        String pointcloudPath = pointcloudFolder + File.separator + fileInfo.getNewName();
        FileUtils.deleteFile(pointcloudPath);
        log.info("删除点云原始文件：{}", pointcloudPath);

        // 如果类型不同，则还需要删除pdal转格式原文件
        if (!fileInfo.getType().equals(FileUtils.getFileType(fileInfo.getNewName()))) {
            pointcloudPath = pointcloudFolder + File.separator + fileInfo.getMd5() + fileInfo.getType();
            FileUtils.deleteFile(pointcloudPath);
            log.info("删除pdal转格式原文件：{}", pointcloudPath);
        }

        // 删除点云切片文件夹
        String converterFolder = basePath + File.separator + CONVERTER_FOLDER + File.separator + fileInfo.getMd5();
        FileUtils.deleteDirectoryRecursively(converterFolder);
        log.info("删除点云切片文件：{}", converterFolder);

    }

    /**
     * 设置cache key
     *
     * @param configKey 参数键
     * @return 缓存键key
     */
    private String getCacheKey(String configKey)
    {
        return CacheConstants.FRAGMENT_UPLOAD_KEY + configKey;
    }

    /**
     * 获取分布式锁
     * @param lockKey 锁键
     * @param requestId 请求ID
     * @param expireTime 过期时间(秒)
     * @return 是否获取成功
     */
    private boolean acquireLock(String lockKey, String requestId, int expireTime) {
        try {
            DefaultRedisScript<Long> redisScript = new DefaultRedisScript<>();
            redisScript.setScriptText(LOCK_SCRIPT);
            redisScript.setResultType(Long.class);

            Long result = redisTemplate.execute(redisScript,
                    Collections.singletonList(lockKey),
                    requestId,
                    expireTime);

            return result != null && result == 1;
        } catch (Exception e) {
            log.error("获取分布式锁异常", e);
            return false;
        }
    }

    /**
     * 释放分布式锁
     * @param lockKey 锁键
     * @param requestId 请求ID
     * @return 是否释放成功
     */
    private boolean releaseLock(String lockKey, String requestId) {
        try {
            DefaultRedisScript<Long> redisScript = new DefaultRedisScript<>();
            redisScript.setScriptText(UNLOCK_SCRIPT);
            redisScript.setResultType(Long.class);

            Long result = redisTemplate.execute(redisScript,
                    Collections.singletonList(lockKey),
                    requestId);

            return result != null && result == 1;
        } catch (Exception e) {
            log.error("释放分布式锁异常", e);
            return false;
        }
    }

    public static void main(String[] args) {
        File vectorFolderFile = new File("C:\\Users\\Administrator\\Desktop\\test");
        String zipFilePath = "C:\\Users\\Administrator\\Desktop\\test.zip";
        AntZipUtil.zip(vectorFolderFile, zipFilePath);
    }
}




