package com.detection.yolo.service;

import ai.onnxruntime.OnnxTensor;
import ai.onnxruntime.OrtEnvironment;
import ai.onnxruntime.OrtException;
import ai.onnxruntime.OrtSession;
import cn.hutool.core.io.FileUtil;
import cn.hutool.core.lang.Snowflake;
import cn.hutool.core.util.IdUtil;
import com.dahuatech.icc.exception.ClientException;
import com.dahuatech.icc.oauth.utils.HttpUtils;
import com.detection.common.config.DetectionConfig;
import com.detection.common.domain.*;
import com.detection.common.utils.spring.SpringUtils;
import com.detection.dahua.config.OauthConfigUtil;
import com.detection.hikvision.domain.CameraPreviewResultView;
import com.detection.hikvision.service.ArtemisCamerasService;
import com.detection.tcp.client.NettyTcpClient;
import com.detection.tcp.domain.TCPMessage;
import com.detection.tcp.filetransfer.client.FileUploadClient;
import com.detection.tcp.utils.CameraDetectionStatusUtils;
import com.detection.tcp.variable.YoloVariables;
import com.detection.yolo.cache.LocalCache;
import com.detection.yolo.config.ODConfig;
import com.detection.yolo.constant.Constants;
import com.detection.yolo.domain.*;
import com.detection.yolo.utils.*;
import com.example.network.demos.tcp.domain.FileUploadFile;
import lombok.extern.slf4j.Slf4j;
import org.opencv.core.Point;
import org.opencv.core.*;
import org.opencv.highgui.HighGui;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;
import org.opencv.videoio.VideoCapture;
import org.opencv.videoio.VideoWriter;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.stereotype.Service;

import java.awt.*;
import java.io.File;
import java.io.IOException;
import java.nio.FloatBuffer;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.nio.file.attribute.BasicFileAttributes;
import java.nio.file.attribute.FileTime;
import java.time.LocalDateTime;
import java.time.format.DateTimeFormatter;
import java.util.List;
import java.util.*;
import java.util.concurrent.*;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.stream.Collectors;
import java.util.stream.Stream;

import static com.detection.tcp.utils.DockingUtils.postForTCP;

@Service
@Slf4j
public class CameraDetectionService {

    /**
     * 检测视频流线程数
     */
    private static final int THREAD_POOL_SIZE = 8;

    /**
     * 跳帧检测帧数
     */
    private static final int DETECT_SKIP = 6;

    /**
     * 火焰检测置信度阈值
     */
    private static final double FIRE_CONFIDENCE_THRESHOLD = 0.5;

    /**
     * 吸烟检测置信度阈值
     */
    private static final double SMOKE_CONFIDENCE_THRESHOLD = 0.5;

    /**
     * 手机检测置信度阈值
     */
    private static final double PHONE_CONFIDENCE_THRESHOLD = 0.5;

    /**
     * 常服检测置信度阈值
     */
    private static final double CLOTHING_CONFIDENCE_THRESHOLD = 0.5;

    /**
     * 视频录制时间
     */
    private static final int VIDEO_TIME = 5;

    /**
     * 检测字体
     */
    private static final Font DEFAULT_FONT = new Font("SimHei", Font.BOLD, 12);
    private static final int PRE_RECORD_SECONDS = 2;     // 提前缓存2秒
    private static final int FPS = 30;
    // 缓冲区大小（帧数）
    private static final int BUFFER_FRAMES = FPS * PRE_RECORD_SECONDS;
    private static final int TOTAL_RECORD_FRAMES = FPS * VIDEO_TIME;
    /**
     * 人数计数器
     */
    static Map<String, Integer> current = new ConcurrentHashMap<>();
    /**
     * 检测视频流线程池
     */
    private final ExecutorService threadPool = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
    /**
     * 检测视频流线程池
     */
    private final ExecutorService sendThreadPool = Executors.newFixedThreadPool(THREAD_POOL_SIZE);
    /**
     * 是否使用本地摄像头
     */

    private final boolean isLocal = false;
    /**
     * 事件检测后跨网延时传输（单位：毫秒）
     */
    private final long sleepTime = 10000;

    /**
     * 定时回放视频时长（秒）
     */
    private static final int SCHEDULED_VIDEO_DURATION = 5;

    /**
     * 定时回放缓冲帧数（5秒 * 30FPS）
     */
    private static final int SCHEDULED_BUFFER_FRAMES = FPS * SCHEDULED_VIDEO_DURATION;

    @Autowired
    private FileUploadClient fileUploadClient;

    // 多线程通信组件
//    private final BlockingQueue<Mat> frameQueue = new LinkedBlockingQueue<>(10);
//    private final BlockingQueue<RecordTask> recordTriggerQueue = new LinkedBlockingQueue<>();
//    private final CircularFrameBuffer circularBuffer = new CircularFrameBuffer(BUFFER_FRAMES);
//    private final AtomicBoolean running = new AtomicBoolean(true);

    /**
     * 连续监测计数器
     */

    private Map<String, Integer> frameCounters = new ConcurrentHashMap<>();

    public void startDetection() {
        if (isLocal) {
            threadPool.submit(() -> {
                try {
                    Map<String,Boolean> streamTheadMap = new HashMap<>(16);
                    streamTheadMap.put("isCaptureThreadStop",false);
                    streamTheadMap.put("isInferenceThreadStop",false);
                    streamTheadMap.put("isRecordThreadStop",false);
                    CameraDetectionStatusUtils.CameraDetectionStatusMap.put("000",false);
                    CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.put("000",streamTheadMap);
                    runLocalStream();
                } catch (Exception e) {
                    log.error("处理视频流时发生错误: {}", e.getMessage());
                    e.printStackTrace();
                }
            });
        } else {
            List<YqCameraInfo> cameraList = getCameraList();
            if (cameraList.isEmpty()) {
                log.error("没有启用的视频流");
                return;
            }

            for (YqCameraInfo cameraInfo : cameraList) {
                Map<String,Boolean> streamTheadMap = new HashMap<>(16);
                streamTheadMap.put("isCaptureThreadStop",false);
                streamTheadMap.put("isInferenceThreadStop",false);
                streamTheadMap.put("isRecordThreadStop",false);
                CameraDetectionStatusUtils.CameraDetectionStatusMap.put(cameraInfo.getIndexCode(),false);
                CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.put(cameraInfo.getIndexCode(),streamTheadMap);
                threadPool.submit(() -> {
                    try {
                        runStream(cameraInfo);
                    } catch (Exception e) {
                        log.error("处理视频流时发生错误: {}", e.getMessage());
                        e.printStackTrace();
                    }
                });
            }
        }
    }

    public void startDetection(String indexCode,String cameraName,String hlsPath,String supportAlgorithms) {

        YqCameraInfo cameraInfo = YqCameraInfo.builder()
                .indexCode(indexCode)
                .name(cameraName)
                .streamUrl(hlsPath)
                .supportAlgorithms(supportAlgorithms)
                .build();
        Map<String,Boolean> streamTheadMap = new HashMap<>(16);
        streamTheadMap.put("isCaptureThreadStop",false);
        streamTheadMap.put("isInferenceThreadStop",false);
        streamTheadMap.put("isRecordThreadStop",false);
        CameraDetectionStatusUtils.CameraDetectionStatusMap.put(indexCode,false);
        CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.put(indexCode,streamTheadMap);
        threadPool.submit(() -> {
            try {
                runStream(cameraInfo);
            } catch (Exception e) {
                log.error("处理视频流时发生错误: {}", e.getMessage());
                e.printStackTrace();
            }
        });

    }

    private List<YqCameraInfo> getCameraList() {
        List<YqCameraInfo> list = new ArrayList<>();
//        list.add(YqCameraInfo.builder()
//                .name("camera 1")
//                .streamUrl("https://demo.unified-streaming.com/k8s/features/stable/video/tears-of-steel/tears-of-steel.ism/.m3u8")
//                .supportAlgorithms("111,222,333,444,555")
//                .build());
//        list.add(YqCameraInfo.builder()
//                 .indexCode("000")
//                .name("camera 2")
//                .streamUrl("0")
//                .supportAlgorithms("111,222,333,444,555")
//                .build());
        list.add(YqCameraInfo.builder()
                .name("camera 3")
                        .indexCode("000")
                .streamUrl("rtsp://192.168.10.4:554/openUrl/QUA46pW")
                .supportAlgorithms("111,222,333,444,555")
                .build());
//        List<YqCameraInfo> list = yqCameraInfoService.selectDetectionCameraList();
//        for (YqCameraInfo yqCameraInfo : list) {
//            Result result = haiKangCameraController.getCameraPreviewUrls(yqCameraInfo.getIndexCode(), 2, "hls", 1);
//            CameraPreviewView data = (CameraPreviewView) result.getData();
//            String url = data.getUrl();
//            yqCameraInfo.setStreamUrl(url);
//        }
        return list;
    }

    /**
     * 启动视频监控系统
     */
    public void runLocalStream() {

        log.info("启动多线程视频监控系统...");

        YqCameraInfo cameraInfo = new YqCameraInfo();
        cameraInfo.setName("本机摄像头");
        cameraInfo.setIndexCode("000");
        cameraInfo.setSupportAlgorithms("111,222,333,444,555");
        if (Boolean.parseBoolean(DetectionConfig.getIsVideo())){
            cameraInfo.setStreamUrl("rtsp://192.168.10.4:554/openUrl/WfAJjy0");
        }else {
            cameraInfo.setStreamUrl("rtsp://192.168.10.4:554/openUrl/WfAJjy0");
        }
        // 线程安全组件（每个摄像头独立）
        BlockingQueue<Mat> frameQueue = new LinkedBlockingQueue<>(10);
        BlockingQueue<RecordTask> recordTriggerQueue = new LinkedBlockingQueue<>();
        CircularFrameBuffer circularBuffer = new CircularFrameBuffer(BUFFER_FRAMES);
        AtomicBoolean running = new AtomicBoolean(true);
        ArrayBlockingQueue<List<YqCameraEvent>> eventQueue = new ArrayBlockingQueue<>(10);

        // 线程：采集、推理、录像
        Thread captureThread = createRemoteCaptureThread(cameraInfo, frameQueue, circularBuffer, running);
        Thread inferenceThread = createInferenceThread(cameraInfo, frameQueue, recordTriggerQueue, running, eventQueue);
        Thread recordThread = createRecordThread(cameraInfo, recordTriggerQueue, running, frameQueue, circularBuffer, eventQueue);

        // 启动线程
        captureThread.start();
        inferenceThread.start();
        recordThread.start();

        // 注册关闭钩子
        Runtime.getRuntime().addShutdownHook(new Thread(() -> {
            log.info("收到关闭信号，正在停止...");
            running.set(false);
            try {
                captureThread.join(3000);
                inferenceThread.join(3000);
                recordThread.join(3000);
            } catch (InterruptedException e) {
                Thread.currentThread().interrupt();
            }
            log.info("所有线程已退出。");
        }));

        // 主线程等待
        try {
            captureThread.join();
            inferenceThread.join();
            recordThread.join();
        } catch (InterruptedException e) {
            log.warn("主线程被中断");
            Thread.currentThread().interrupt();
            running.set(false);
        }
    }

    /**
     * 创建推理线程（支持指定摄像头）
     */
    private Thread createInferenceThread(YqCameraInfo cameraInfo,
                                         BlockingQueue<Mat> frameQueue,
                                         BlockingQueue<RecordTask> recordTriggerQueue,
                                         AtomicBoolean running,
                                         ArrayBlockingQueue<List<YqCameraEvent>> eventQueue) {
        return new Thread(() -> {
            try (OrtEnvironment env = OrtEnvironment.getEnvironment()) {
                OrtSession.SessionOptions sessionOptions = new OrtSession.SessionOptions();
                // sessionOptions.addCUDA(0); // 启用 GPU

                try (OrtSession personSession = env.createSession(DetectionConfig.getPersonModelPath(), sessionOptions);
                     OrtSession smokeSession = env.createSession(DetectionConfig.getSmokeModelPath(), sessionOptions);
                     OrtSession fireSession = env.createSession(DetectionConfig.getFireModelPath(), sessionOptions);
                     OrtSession phoneSession = env.createSession(DetectionConfig.getPhoneModelPath(), sessionOptions);
                     OrtSession clothingSession = env.createSession(DetectionConfig.getClothingModelPath(), sessionOptions)) {

                    Letterbox letterbox = new Letterbox();
                    int detectSkipIndex = 1;
                    int minDwDh = 0; // 可从配置获取

                    while (running.get()) {
                        if(CameraDetectionStatusUtils.CameraDetectionStatusMap.get(cameraInfo.getIndexCode())){
                            Map<String,Boolean> streamTheadMap = CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.get(cameraInfo.getIndexCode());
                            streamTheadMap.replace("isInferenceThreadStop",true);
                            CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.replace(cameraInfo.getIndexCode(),streamTheadMap);
                            if(streamTheadMap.get("isCaptureThreadStop")&&streamTheadMap.get("isRecordThreadStop")){
                                CameraDetectionStatusUtils.CameraDetectionStatusMap.remove(cameraInfo.getIndexCode());
                                CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.remove(cameraInfo.getIndexCode());
                            }
                            break;
                        }
                        Mat img = frameQueue.poll(1, TimeUnit.SECONDS);
                        if (img == null || img.empty()) continue;

                        if (detectSkipIndex % DETECT_SKIP != 0) {
                            img.release();
                            detectSkipIndex++;
                            continue;
                        }

                        try {
                            List<YqCameraEvent> events = processFrame(
                                    img, letterbox, personSession, smokeSession, fireSession,
                                    phoneSession, clothingSession, cameraInfo, minDwDh,
                                    null, null, null, null, null
                            );

                            if (!events.isEmpty()) {
                                eventQueue.offer(events);
                                Size frameSize = new Size(img.width(), img.height());
                                recordTriggerQueue.offer(new RecordTask(System.currentTimeMillis(), frameSize));
                                log.info("[{}] 检测到事件，触发录像", cameraInfo.getName());
                            }

                            detectSkipIndex++;
                        } catch (Exception e) {
                            log.error("推理异常: {}", cameraInfo.getName(), e);
                        } finally {
                            img.release();
                        }
                    }
                }
            } catch (Exception e) {
                log.error("推理线程初始化失败: {}", cameraInfo.getName(), e);
            }
            log.info("推理线程退出: {}", cameraInfo.getName());
        }, "Inference-" + cameraInfo.getIndexCode());
    }

    /**
     * 创建录像线程（支持指定摄像头）
     */
    private Thread createRecordThread(YqCameraInfo cameraInfo,
                                      BlockingQueue<RecordTask> recordTriggerQueue,
                                      AtomicBoolean running,
                                      BlockingQueue<Mat> frameQueue,
                                      CircularFrameBuffer circularBuffer,
                                      ArrayBlockingQueue<List<YqCameraEvent>> eventQueue) {
        return new Thread(() -> {
            int fourcc = VideoWriter.fourcc('a', 'v', 'c', '1'); // H.264

            while (running.get()) {
                try {
                    RecordTask task = recordTriggerQueue.take();

                    Size frameSize = task.frameSize;
                    String yyMMdd = LocalDateTime.now().format(DateTimeFormatter.ofPattern("yyMMdd"));
                    String videoName = cameraInfo.getIndexCode() + "_" +
                            LocalDateTime.now().format(DateTimeFormatter.ofPattern("HHmmss")) + ".mp4";
                    String dirPath = DetectionConfig.getSaveVideoPath() + "/" + yyMMdd;
                    String outputPath = dirPath + "/" + videoName;

                    Files.createDirectories(Paths.get(dirPath));

                    VideoWriter writer = new VideoWriter(outputPath, fourcc, 30, frameSize, true);

                    if (!writer.isOpened()) {
                        log.error("VideoWriter 打开失败: {}", outputPath);
                        continue;
                    }

                    log.info("开始录制: {} -> {}", cameraInfo.getName(), outputPath);

                    // 写入前置帧（回溯）
                    List<Mat> preFrames = circularBuffer.getRecentFrames(BUFFER_FRAMES);
                    for (Mat mat : preFrames) {
                        if (mat.size().equals(frameSize)) {
                            writer.write(mat);
                            mat.release(); // 释放克隆帧
                        }
                    }

                    // 写入后续帧
                    long start = System.currentTimeMillis();
                    int recorded = preFrames.size();
                    while (recorded < TOTAL_RECORD_FRAMES && (System.currentTimeMillis() - start) < 5100) {
                        Mat next = frameQueue.poll(100, TimeUnit.MILLISECONDS);
                        if (next != null && !next.empty()) {
                            writer.write(next);
                            recorded++;
                            next.release();
                        }
                    }

                    log.info("录像保存: {}", outputPath);
                    sendTcpFile(outputPath);
                    // 发送事件（含视频URL）
                    sendThreadPool.submit(() -> {
                        try {
                            sendRecordedEvent(outputPath, dirPath, videoName, eventQueue);
                        } catch (IOException | InterruptedException e) {
                            throw new RuntimeException(e);
                        }
                    });
                    if(CameraDetectionStatusUtils.CameraDetectionStatusMap.get(cameraInfo.getIndexCode())){
                        Map<String,Boolean> streamTheadMap = CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.get(cameraInfo.getIndexCode());
                        streamTheadMap.replace("isRecordThreadStop",true);
                        CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.replace(cameraInfo.getIndexCode(),streamTheadMap);
                        if(streamTheadMap.get("isCaptureThreadStop")&&streamTheadMap.get("isInferenceThreadStop")){
                            CameraDetectionStatusUtils.CameraDetectionStatusMap.remove(cameraInfo.getIndexCode());
                            CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.remove(cameraInfo.getIndexCode());
                        }
                        break;
                    }
                } catch (Exception e) {
                    if (running.get()) {
                        log.error("录制线程异常: {}", cameraInfo.getName(), e);
                    }
                }
            }
            log.info("录制线程退出: {}", cameraInfo.getName());
        }, "Record-" + cameraInfo.getIndexCode());
    }

    private void sendTcpFile(String fileFullPath) throws Exception {
        FileUploadFile uploadFile = new FileUploadFile();
        File file = new File(fileFullPath);
        String fileMd5 = file.getName();// 文件名
        uploadFile.setFile(file);
        uploadFile.setFile_md5(fileMd5);
        uploadFile.setStarPos(0);// 文件开始位置
        uploadFile.setFilePath(fileFullPath.replace(DetectionConfig.getProfile(),""));
        fileUploadClient.connect(uploadFile);
        log.info("发送文件成功: {}", fileFullPath);
    }

    // ------------------ 辅助方法 ------------------

    private void sendRecordedEvent(String outputPath, String dirPath, String videoName, ArrayBlockingQueue<List<YqCameraEvent>> eventQueue) throws IOException, InterruptedException {
        Thread.sleep(sleepTime);

        List<List<YqCameraEvent>> events = new ArrayList<>();
        eventQueue.drainTo(events);

        String videoUrl = DetectionConfig.getSendAddress() + FileUploadUtils.getPathFileName(dirPath, videoName);

        for (List<YqCameraEvent> eventList : events) {
            for (YqCameraEvent event : eventList) {
                event.setVideoUrl(videoUrl);
                sendCameraEvent(event);
            }
        }
    }


    // ------------------ 内部类 ------------------

    /**
     * 创建远程视频流采集线程
     */
    private Thread createRemoteCaptureThread(YqCameraInfo cameraInfo,
                                             BlockingQueue<Mat> frameQueue,
                                             CircularFrameBuffer circularBuffer,
                                             AtomicBoolean running) {
        return new Thread(() -> {
            VideoCapture video = new VideoCapture();
            String streamUrl = cameraInfo.getStreamUrl();

            // 可选：设置采集参数（适用于 RTSP）
            // video.set(CAP_PROP_BUFFERSIZE, 1); // 最小化延迟
            // video.set(CAP_PROP_FPS, TARGET_FPS);

            boolean connected = false;
            Mat frame = new Mat();

            while (running.get()) {
                if(CameraDetectionStatusUtils.CameraDetectionStatusMap.get(cameraInfo.getIndexCode())){
                    Map<String,Boolean> streamTheadMap = CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.get(cameraInfo.getIndexCode());
                    streamTheadMap.replace("isCaptureThreadStop",true);
                    CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.replace(cameraInfo.getIndexCode(),streamTheadMap);
                    if(streamTheadMap.get("isRecordThreadStop")&&streamTheadMap.get("isInferenceThreadStop")){
                        CameraDetectionStatusUtils.CameraDetectionStatusMap.remove(cameraInfo.getIndexCode());
                        CameraDetectionStatusUtils.CameraDetectionStatusThreadMap.remove(cameraInfo.getIndexCode());
                    }
                    break;
                }
                if (!connected) {
                    log.info("正在连接视频流: {}", streamUrl);

                    if ("0".equals(streamUrl) ? video.open(0) : video.open(streamUrl)) {
                        log.info("成功连接视频流: {}", cameraInfo.getName());
                        connected = true;
                    } else {
                        log.warn("连接失败，5秒后重试: {}", streamUrl);
                        try {
                            if("dahua".equals(DetectionConfig.getIOTplatform())){
                                String[] zStreamUrl = streamUrl.split("token=");
                                String token = HttpUtils.getToken(OauthConfigUtil.getOauthConfig()).getAccess_token();
                                streamUrl=zStreamUrl[0]+"token="+token;
                            }else if("hikvision".equals(DetectionConfig.getIOTplatform())){
                                streamUrl = startHaiKangDevice( cameraInfo.getIndexCode());
                            }

                            Thread.sleep(5000);
                        } catch (InterruptedException e) {
                            break;
                        } catch (ClientException e) {
                            throw new RuntimeException(e);
                        }
                        continue;
                    }
                }

                if (!video.read(frame) || frame.empty()) {
                    log.warn("读取帧失败，重新连接: {}", cameraInfo.getName());
                    connected = false;
                    frame.release();
                    video.release();
                    try {
                        Thread.sleep(3000);
                    } catch (InterruptedException e) {
                        break;
                    }
                    continue;
                }

                // 复制帧并存入环形缓冲
                Mat clone = new Mat();
                frame.copyTo(clone);
                circularBuffer.add(clone);

                // 提交推理（超时丢弃）
                if (!frameQueue.offer(clone)) {
                    log.debug("推理队列满，跳过一帧");
                    clone.release();
                }
            }

            // 释放资源
            if (frame != null && !frame.empty()) frame.release();
            if (video != null) video.release();
            log.info("采集线程退出: {}", cameraInfo.getName());
        }, "Capture-" + cameraInfo.getIndexCode());
    }

    private synchronized String startHaiKangDevice(String indexCode){
        ArtemisCamerasService artemisCamerasService = SpringUtils.getBean(ArtemisCamerasService.class);
        CameraPreviewResultView cameraPreviewUrls = artemisCamerasService.getCameraPreviewUrls(indexCode, null, null, null);
        String url = cameraPreviewUrls.getData().getUrl();
        return url;
    }

    /**
     * 处理单个远程视频流（RTSP/RTMP/HTTP等）
     *
     * @param cameraInfo 摄像头配置信息
     */
    private void runStream(YqCameraInfo cameraInfo) {
        log.info("开始处理视频流: {} (ID: {})", cameraInfo.getName(), cameraInfo.getIndexCode());

        // 线程安全组件（每个摄像头独立）
        BlockingQueue<Mat> frameQueue = new LinkedBlockingQueue<>(10);
        BlockingQueue<RecordTask> recordTriggerQueue = new LinkedBlockingQueue<>();
        CircularFrameBuffer circularBuffer = new CircularFrameBuffer(BUFFER_FRAMES);
        AtomicBoolean running = new AtomicBoolean(true);
        ArrayBlockingQueue<List<YqCameraEvent>> eventQueue = new ArrayBlockingQueue<>(10);

        // 线程：采集、推理、录像
        Thread captureThread = createRemoteCaptureThread(cameraInfo, frameQueue, circularBuffer, running);
        Thread inferenceThread = createInferenceThread(cameraInfo, frameQueue, recordTriggerQueue, running, eventQueue);
        Thread recordThread = createRecordThread(cameraInfo, recordTriggerQueue, running, frameQueue, circularBuffer, eventQueue);

        // 启动线程
        captureThread.start();
        inferenceThread.start();
        recordThread.start();

        // 为当前摄像头流注册关闭钩子（可选：统一管理时可不注册）
        // 实际中可通过一个全局集合管理所有 running 标志

        try {
            captureThread.join();
            inferenceThread.join();
            recordThread.join();
        } catch (InterruptedException e) {
            log.warn("视频流处理线程被中断: {}", cameraInfo.getIndexCode());
            running.set(false);
            Thread.currentThread().interrupt();
        }

        log.info("视频流处理结束: {}", cameraInfo.getIndexCode());
    }

    private List<YqCameraEvent> processFrame(Mat img, Letterbox letterbox, OrtSession personSession, OrtSession smokeSession, OrtSession fireSession, OrtSession phoneSession, OrtSession clothingSession, YqCameraInfo yqCameraInfo, int minDwDh, float[][] personOutputData, float[][][] smokeOutputData, float[][][] fireOutputData, float[][][] phoneOutputData, float[][][] clothingOutputData) throws OrtException, IOException {
        OnnxTensor tensor = null;
        OrtSession.Result smokeOutput = null;
        OrtSession.Result personOutput = null;
        OrtSession.Result fireOutput = null;
        OrtSession.Result phoneOutput = null;
        OrtSession.Result clothingOutput = null;
        try {
            Mat image = img.clone();
            image = letterbox.letterbox(image);
            Imgproc.cvtColor(image, image, Imgproc.COLOR_BGR2RGB);

            image.convertTo(image, CvType.CV_32FC1, 1. / 255);
            float[] whc = new float[3 * 640 * 640];
            image.get(0, 0, whc);
            float[] chw = ImageUtil.whc2cwh(whc);

            FloatBuffer inputBuffer = FloatBuffer.wrap(chw);
            tensor = OnnxTensor.createTensor(OrtEnvironment.getEnvironment(), inputBuffer, new long[]{1, 3, 640, 640});

            Map<String, OnnxTensor> inputMap = new HashMap<>();
            inputMap.put(personSession.getInputInfo().keySet().iterator().next(), tensor);

            personOutput = personSession.run(inputMap);
            smokeOutput = smokeSession.run(inputMap);
            fireOutput = fireSession.run(inputMap);
            phoneOutput = phoneSession.run(inputMap);
            clothingOutput = clothingSession.run(inputMap);

            personOutputData = (float[][]) personOutput.get(0).getValue();
            smokeOutputData = (float[][][]) smokeOutput.get(0).getValue();
            fireOutputData = (float[][][]) fireOutput.get(0).getValue();
            phoneOutputData = (float[][][]) phoneOutput.get(0).getValue();
            clothingOutputData = (float[][][]) clothingOutput.get(0).getValue();

            return processDetectionResults(img, letterbox, personOutputData, smokeOutputData, fireOutputData, phoneOutputData, clothingOutputData, yqCameraInfo, minDwDh);
        } catch (Exception e) {
            log.error("推理出错" + e.getMessage());
            e.printStackTrace();
        } finally {
            if (personOutput != null) {
                personOutput.close();
            }
            if (smokeOutput != null) {
                smokeOutput.close();
            }
            if (fireOutput != null) {
                fireOutput.close();
            }
            if (phoneOutput != null) {
                phoneOutput.close();
            }
            if (clothingOutput != null) {
                clothingOutput.close();
            }
            if (tensor != null) {
                tensor.close();
            }
        }
        return new ArrayList<>();
    }

    private List<YqCameraEvent> processDetectionResults(Mat img, Letterbox letterbox, float[][] personOutputData, float[][][] smokeOutputData, float[][][] fireOutputData, float[][][] phoneOutputData, float[][][] clothingOutputData, YqCameraInfo yqCameraInfo, int minDwDh) throws Exception {
        String[] algorithms = yqCameraInfo.getSupportAlgorithms().split(",");
        List<YqCameraEvent> yqCameraEvents = new ArrayList<>();
        for (String algorithm : algorithms) {
            switch (algorithm) {
                case "111":
                    YqCameraEvent personEvent = processPersonDetection(img, letterbox, personOutputData, minDwDh, yqCameraInfo);
                    if (Objects.nonNull(personEvent)) {
                        yqCameraEvents.add(personEvent);
                    }
                    break;
                case "222":
                    YqCameraEvent smokeEvent = processSmokeDetection(img, letterbox, smokeOutputData, minDwDh, yqCameraInfo);
                    if (Objects.nonNull(smokeEvent)) {
                        yqCameraEvents.add(smokeEvent);
                    }
                    break;
                case "333":
                    YqCameraEvent fireEvent = processFireDetection(img, letterbox, fireOutputData, minDwDh, yqCameraInfo);
                    if (Objects.nonNull(fireEvent)) {
                        yqCameraEvents.add(fireEvent);
                    }
                    break;
                case "444":
                    YqCameraEvent phoneEvent = processPhoneDetection(img, letterbox, phoneOutputData, minDwDh, yqCameraInfo);
                    if (Objects.nonNull(phoneEvent)) {
                        yqCameraEvents.add(phoneEvent);
                    }
                    break;
                case "555":
                    YqCameraEvent clothingEvent = processClothingDetection(img, letterbox, clothingOutputData, minDwDh, yqCameraInfo);
                    if (Objects.nonNull(clothingEvent)) {
                        yqCameraEvents.add(clothingEvent);
                    }
                    break;
            }
        }
        return yqCameraEvents;
    }

    private YqCameraEvent processPersonDetection(Mat img, Letterbox letterbox, float[][] personOutputData, int minDwDh, YqCameraInfo yqCameraInfo) throws Exception {
        current.clear();
        // 处理人员检测结果
        for (float[] x : personOutputData) {
            ODResult odResult = new ODResult(x);
            if (odResult.getClsId() != 0) {
                continue;
            }

            String boxName = "person";

            // 业务逻辑写在这里
            if (current.containsKey(boxName)) {
                current.put(boxName, current.get(boxName) + 1);
            } else {
                current.put(boxName, 1);
            }

            Point topLeft = new Point((odResult.getX0() - letterbox.getDw()) / letterbox.getRatio(), (odResult.getY0() - letterbox.getDh()) / letterbox.getRatio());
            Point bottomRight = new Point((odResult.getX1() - letterbox.getDw()) / letterbox.getRatio(), (odResult.getY1() - letterbox.getDh()) / letterbox.getRatio());
            Scalar color = new Scalar(0, 255, 0);

            Imgproc.rectangle(img, topLeft, bottomRight, color, minDwDh / ODConfig.lineThicknessRatio);
//            Point boxNameLoc = new Point((odResult.getX0() - letterbox.getDw()) / letterbox.getRatio(), (odResult.getY0() - letterbox.getDh()) / letterbox.getRatio() - 3);
//            Imgproc.putText(img, boxName, boxNameLoc, Imgproc.FONT_HERSHEY_SIMPLEX, 0.7, color, minDwDh / ODConfig.lineThicknessRatio);

            // 构造中文文本
            String chineseText = "人员检测";

            // 计算文本起始位置（与原 putText 一致）
            Point textOrigin = new Point((odResult.getX0() - letterbox.getDw()) / letterbox.getRatio(), (odResult.getY0() - letterbox.getDh()) / letterbox.getRatio() - 3 - 10);

            // 使用封装方法绘制中文
            OpenCVChineseUtil.drawText(img, chineseText, "person", textOrigin, color, DEFAULT_FONT, (int) (minDwDh / ODConfig.fontSizeRatio * 6));

            HighGui.imshow("result", img);

            if (current.isEmpty()) {
                log.info(0 + "个 " + "person" + " 出现了");
            }
            for (Map.Entry<String, Integer> entry : current.entrySet()) {
                int currentCount = entry.getValue();
                //重要载体、物资器材监测记录
                if (currentCount >= 1) {
                    List<SysDictData> sysDictData1 = getImportantCarrierCameraName();
                    for (SysDictData sysDictData2 : sysDictData1) {
                        String dictValue = sysDictData2.getDictValue();
//                        System.out.println("getImportantCarrierCameraName："+dictValue);
                        if (dictValue.equals(yqCameraInfo.getName()) && LocalCache.CAMERA_CACHE.getIfPresent(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "IMPORTANT_CARRIER") == null) {
                            String fileName = System.currentTimeMillis() + ".png";
                            FileUtil.mkdir(DetectionConfig.getPersonDetectionPathForCarrier());
                            Imgcodecs.imwrite(DetectionConfig.getPersonDetectionPathForCarrier() + "/" + fileName, img);
                            String outputPath = DetectionConfig.getPersonDetectionPathForCarrier() + "/" + fileName;
                            sendTcpFile(outputPath);
                            YqImportantCarrierRecord yqImportantCarrierRecord = YqImportantCarrierRecord.builder()
                                    .eventTime(new Date())
                                    .campCode(DetectionConfig.getCampCode())
                                    .campName(DetectionConfig.getCampName())
                                    .backImageUrl(FileUploadUtils.getPathFileName(DetectionConfig.getPersonDetectionPathForCarrier(), fileName))
                                    .name(yqCameraInfo.getName())
                                    .identifyFlag(yqCameraInfo.getIndexCode())
                                    .build();
                            sendImportantCarrier(yqImportantCarrierRecord);
                            LocalCache.CAMERA_CACHE.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "IMPORTANT_CARRIER", true);
                        }
                    }
                    //物资器材
                    List<SysDictData> sysDictDataList = getMaterialsEquipmentCameraName();
                    for (SysDictData sysDictData3 : sysDictDataList) {
                        String dictValue = sysDictData3.getDictValue();
//                        System.out.println("getMaterialsEquipmentCameraName："+dictValue);
                        if (dictValue.equals(yqCameraInfo.getName()) && LocalCache.CAMERA_CACHE.getIfPresent(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "MATERIALS_EQUIPMENT") == null) {
                            String fileName = System.currentTimeMillis() + ".png";
                            FileUtil.mkdir(DetectionConfig.getPersonDetectionPathForEquipment());
                            Imgcodecs.imwrite(DetectionConfig.getPersonDetectionPathForEquipment() + "/" + fileName, img);
                            String outputPath = DetectionConfig.getPersonDetectionPathForEquipment() + "/" + fileName;
                            sendTcpFile(outputPath);
                            YqMaterialsEquipmentRecord yqMaterialsEquipmentMovement = YqMaterialsEquipmentRecord.builder()
                                    .eventTime(new Date())
                                    .campCode(DetectionConfig.getCampCode())
                                    .campName(DetectionConfig.getCampName())
                                    .backImageUrl(FileUploadUtils.getPathFileName(DetectionConfig.getPersonDetectionPathForEquipment(), fileName))
                                    .name(yqCameraInfo.getName())
                                    .identifyFlag(yqCameraInfo.getIndexCode())
                                    .build();
                            sendMaterialsEquipment(yqMaterialsEquipmentMovement);
                            LocalCache.CAMERA_CACHE.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "MATERIALS_EQUIPMENT", true);
                        }
                    }
                }
                //人员聚集
                if (currentCount >= getpersonGather() && LocalCache.CAMERA_CACHE.getIfPresent(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "PERSON_GATHER") == null) {
//                    if (frameCounters.getOrDefault(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "PERSON_GATHER", 0) >= 3){
//                    }else {
//                        frameCounters.merge(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "PERSON_GATHER", 1, Integer::sum);
//                        log.info("增加人员检测帧");
//                        return null;
//                    }
//                    frameCounters.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "PERSON_GATHER", 0);
                    String fileName = System.currentTimeMillis() + ".png";
                    FileUtil.mkdir(DetectionConfig.getPersonDetectionPath());
                    Imgcodecs.imwrite(DetectionConfig.getPersonDetectionPath() + "/" + fileName, img);
                    String outputPath = DetectionConfig.getPersonDetectionPath() + "/" + fileName;
                    sendTcpFile(outputPath);
                    log.info(currentCount + "个 " + entry.getKey() + " 出现了");
                    Snowflake snowflake = IdUtil.getSnowflake(1L, 1L);
                    String eventId = snowflake.nextIdStr();
                    YqCameraEvent yqCameraEvent = YqCameraEvent.builder()
                            .eventId(eventId)
                            .eventStatus("0")
                            .ignoreFlag("0")
                            .parkCode("010001")
                            .happenTime(new Date())
                            .imageUrl(DetectionConfig.getSendAddress() + FileUploadUtils.getPathFileName(DetectionConfig.getPersonDetectionPath(), fileName))
                            .eventType(111)
                            .eventStatus("0")
                            .srcName(yqCameraInfo.getName())
                            .srcType("carema")
                            .campCode(DetectionConfig.getCampCode())
                            .campName(DetectionConfig.getCampName())
                            .build();
                    LocalCache.CAMERA_CACHE.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "PERSON_GATHER", true);
                    return yqCameraEvent;
                }
//                else {
//                    log.info("清零人员检测帧");
//                    frameCounters.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "PERSON_GATHER", 0);
//                }
                break;
            }
        }
        return null;
    }

    private YqCameraEvent processSmokeDetection(Mat img, Letterbox letterbox, float[][][] smokeOutputData, int minDwDh, YqCameraInfo yqCameraInfo) throws Exception {
        // 处理抽烟行为检测结果
        int numBoxes = smokeOutputData[0][0].length;
        for (int i = 0; i < numBoxes; i++) {
            float confidence = smokeOutputData[0][4][i];
            if (confidence < SMOKE_CONFIDENCE_THRESHOLD) continue;

            float centerX = smokeOutputData[0][0][i];
            float centerY = smokeOutputData[0][1][i];
            float width = smokeOutputData[0][2][i];
            float height = smokeOutputData[0][3][i];

            double x0 = (centerX - width / 2 - letterbox.getDw()) / letterbox.getRatio();
            double y0 = (centerY - height / 2 - letterbox.getDh()) / letterbox.getRatio();
            double x1 = (centerX + width / 2 - letterbox.getDw()) / letterbox.getRatio();
            double y1 = (centerY + height / 2 - letterbox.getDh()) / letterbox.getRatio();

            Point topLeft = new Point(x0, y0);
            Point bottomRight = new Point(x1, y1);
            Scalar color = new Scalar(42, 42, 165); // 如果使用的是R,G,B顺序

            Imgproc.rectangle(img, topLeft, bottomRight, color, minDwDh / ODConfig.lineThicknessRatio);
//            Point boxNameLoc = new Point(x0, y0 - 3);
//            Imgproc.putText(img, String.format("%.2f", confidence) + " smoke", boxNameLoc, Imgproc.FONT_HERSHEY_SIMPLEX, minDwDh / ODConfig.fontSizeRatio, color, minDwDh / ODConfig.lineThicknessRatio);

            // 构造中文文本
            String chineseText = String.format("吸烟检测：%.2f", confidence);

            // 计算文本起始位置（与原 putText 一致）
            Point textOrigin = new Point(x0, y0 - 10);

            // 使用封装方法绘制中文
            OpenCVChineseUtil.drawText(img, chineseText, "smoke", textOrigin, color, DEFAULT_FONT, (int) (minDwDh / ODConfig.fontSizeRatio * 6));

            log.info("Detected smoke object with confidence: " + confidence + " at coordinates: (" + x0 + ", " + y0 + ", " + x1 + ", " + y1 + ")");
            if (frameCounters.getOrDefault(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "smoke", 0) >= 3) {
            } else {
                frameCounters.merge(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "smoke", 1, Integer::sum);
                return null;
            }
            frameCounters.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "smoke", 0);
            if (LocalCache.CAMERA_CACHE.getIfPresent(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "smoke") == null) {
                YqCameraEvent yqCameraEvent = saveDetectionImage(img, DetectionConfig.getSmokeDetectionPath(), 222, yqCameraInfo);
                LocalCache.CAMERA_CACHE.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "smoke", true);
                return yqCameraEvent;
            } else {
                return null;
            }
        }
        return null;
    }

    private YqCameraEvent processFireDetection(Mat img, Letterbox letterbox, float[][][] fireOutputData, int minDwDh, YqCameraInfo yqCameraInfo) throws Exception {
        // 处理火灾检测结果
        int numBoxes = fireOutputData[0][0].length;
        for (int i = 0; i < numBoxes; i++) {
            float confidence = fireOutputData[0][4][i];
            if (confidence < FIRE_CONFIDENCE_THRESHOLD) continue;

            float centerX = fireOutputData[0][0][i];
            float centerY = fireOutputData[0][1][i];
            float width = fireOutputData[0][2][i];
            float height = fireOutputData[0][3][i];

            double x0 = (centerX - width / 2 - letterbox.getDw()) / letterbox.getRatio();
            double y0 = (centerY - height / 2 - letterbox.getDh()) / letterbox.getRatio();
            double x1 = (centerX + width / 2 - letterbox.getDw()) / letterbox.getRatio();
            double y1 = (centerY + height / 2 - letterbox.getDh()) / letterbox.getRatio();

            Point topLeft = new Point(x0, y0);
            Point bottomRight = new Point(x1, y1);
            Scalar color = new Scalar(0, 0, 255);

            Imgproc.rectangle(img, topLeft, bottomRight, color, minDwDh / ODConfig.lineThicknessRatio);
//            Point boxNameLoc = new Point(x0, y0 - 3);
//            Imgproc.putText(img, String.format("%.2f", confidence) + " fire", boxNameLoc, Imgproc.FONT_HERSHEY_SIMPLEX, minDwDh / ODConfig.fontSizeRatio, color, minDwDh / ODConfig.lineThicknessRatio);

            // 构造中文文本
            String chineseText = String.format("火焰检测：%.2f", confidence);

            // 计算文本起始位置（与原 putText 一致）
            Point textOrigin = new Point(x0, y0 - 10);

            // 使用封装方法绘制中文
            OpenCVChineseUtil.drawText(img, chineseText, "fire", textOrigin, color, DEFAULT_FONT, (int) (minDwDh / ODConfig.fontSizeRatio * 6));

            log.info("Detected fire object with confidence: " + confidence + " at coordinates: (" + x0 + ", " + y0 + ", " + x1 + ", " + y1 + ")");
            if (frameCounters.getOrDefault(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "fire", 0) >= 3) {
            } else {
                frameCounters.merge(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "fire", 1, Integer::sum);
                return null;
            }
            frameCounters.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "fire", 0);
            if (LocalCache.CAMERA_CACHE.getIfPresent(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "fire") == null) {
                YqCameraEvent yqCameraEvent = saveDetectionImage(img, DetectionConfig.getFireDetectionPath(), 333, yqCameraInfo);
                LocalCache.CAMERA_CACHE.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "fire", true);
                return yqCameraEvent;
            } else {
                return null;
            }
        }

        return null;
    }

    private YqCameraEvent processPhoneDetection(Mat img, Letterbox letterbox, float[][][] phoneOutputData, int minDwDh, YqCameraInfo yqCameraInfo) throws Exception {
        // 处理手机检测结果
        int numBoxes = phoneOutputData[0][0].length;
        for (int i = 0; i < numBoxes; i++) {
            float confidence = phoneOutputData[0][4][i];
            if (confidence < PHONE_CONFIDENCE_THRESHOLD) continue;

            float centerX = phoneOutputData[0][0][i];
            float centerY = phoneOutputData[0][1][i];
            float width = phoneOutputData[0][2][i];
            float height = phoneOutputData[0][3][i];

            double x0 = (centerX - width / 2 - letterbox.getDw()) / letterbox.getRatio();
            double y0 = (centerY - height / 2 - letterbox.getDh()) / letterbox.getRatio();
            double x1 = (centerX + width / 2 - letterbox.getDw()) / letterbox.getRatio();
            double y1 = (centerY + height / 2 - letterbox.getDh()) / letterbox.getRatio();

            Point topLeft = new Point(x0, y0);
            Point bottomRight = new Point(x1, y1);
            Scalar color = new Scalar(255, 0, 0);

            Imgproc.rectangle(img, topLeft, bottomRight, color, minDwDh / ODConfig.lineThicknessRatio);
//            Point boxNameLoc = new Point(x0, y0 - 3);
//            Imgproc.putText(img, String.format("%.2f", confidence) + " phone", boxNameLoc, Imgproc.FONT_HERSHEY_SIMPLEX, minDwDh / ODConfig.fontSizeRatio, color, minDwDh / ODConfig.lineThicknessRatio);

            // 构造中文文本
            String chineseText = String.format("手机检测：%.2f", confidence);

            // 计算文本起始位置（与原 putText 一致）
            Point textOrigin = new Point(x0, y0 - 10);

            // 使用封装方法绘制中文
            OpenCVChineseUtil.drawText(img, chineseText, "phone", textOrigin, color, DEFAULT_FONT, (int) (minDwDh / ODConfig.fontSizeRatio * 6));

            log.info("Detected phone object with confidence: " + confidence + " at coordinates: (" + x0 + ", " + y0 + ", " + x1 + ", " + y1 + ")");
            if (frameCounters.getOrDefault(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "phone", 0) >= 3) {
            } else {
                frameCounters.merge(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "phone", 1, Integer::sum);
                return null;
            }
            frameCounters.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "phone", 0);
            if (LocalCache.CAMERA_CACHE.getIfPresent(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "phone") == null) {
                YqCameraEvent yqCameraEvent = saveDetectionImage(img, DetectionConfig.getPhoneDetectionPath(), 444, yqCameraInfo);
                LocalCache.CAMERA_CACHE.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "phone", true);
                return yqCameraEvent;
            } else {
                return null;
            }
        }
        return null;
    }

    private YqCameraEvent processClothingDetection(Mat img, Letterbox letterbox, float[][][] clothingOutputData, int minDwDh, YqCameraInfo yqCameraInfo) throws Exception {
        // 处理常服检测结果
        int numBoxes = clothingOutputData[0][0].length;
        for (int i = 0; i < numBoxes; i++) {
            float confidence = clothingOutputData[0][5][i];
            if (confidence < CLOTHING_CONFIDENCE_THRESHOLD) continue;

            float centerX = clothingOutputData[0][0][i];
            float centerY = clothingOutputData[0][1][i];
            float width = clothingOutputData[0][2][i];
            float height = clothingOutputData[0][3][i];

            double x0 = (centerX - width / 2 - letterbox.getDw()) / letterbox.getRatio();
            double y0 = (centerY - height / 2 - letterbox.getDh()) / letterbox.getRatio();
            double x1 = (centerX + width / 2 - letterbox.getDw()) / letterbox.getRatio();
            double y1 = (centerY + height / 2 - letterbox.getDh()) / letterbox.getRatio();

            Point topLeft = new Point(x0, y0);
            Point bottomRight = new Point(x1, y1);
            Scalar color = new Scalar(0, 255, 255);

            Imgproc.rectangle(img, topLeft, bottomRight, color, minDwDh / ODConfig.lineThicknessRatio);
//            Point boxNameLoc = new Point(x0, y0 - 3);
//            Imgproc.putText(img, String.format("%.2f", confidence) + " clothing", boxNameLoc, Imgproc.FONT_HERSHEY_SIMPLEX, minDwDh / ODConfig.fontSizeRatio, color, minDwDh / ODConfig.lineThicknessRatio);

            // 构造中文文本
            String chineseText = String.format("常服检测：%.2f", confidence);

            // 计算文本起始位置（与原 putText 一致）
            Point textOrigin = new Point(x0, y0 - 10);

            // 使用封装方法绘制中文
            OpenCVChineseUtil.drawText(img, chineseText, "clothing", textOrigin, color, DEFAULT_FONT, (int) (minDwDh / ODConfig.fontSizeRatio * 6));

            log.info("Detected clothing object with confidence: " + confidence + " at coordinates: (" + x0 + ", " + y0 + ", " + x1 + ", " + y1 + ")");
            if (frameCounters.getOrDefault(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "clothing", 0) >= 3) {
            } else {
                frameCounters.merge(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "clothing", 1, Integer::sum);
                return null;
            }
            frameCounters.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "clothing", 0);
            if (LocalCache.CAMERA_CACHE.getIfPresent(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "clothing") == null) {
                YqCameraEvent yqCameraEvent = saveDetectionImage(img, DetectionConfig.getClothingDetectionPath(), 555, yqCameraInfo);
                LocalCache.CAMERA_CACHE.put(Constants.CAMERA_DETECTION + yqCameraInfo.getStreamUrl() + ":" + "clothing", true);
                return yqCameraEvent;
            } else {
                return null;
            }
        }

        return null;
    }

    private YqCameraEvent saveDetectionImage(Mat img, String detectionPath, int eventType, YqCameraInfo yqCameraInfo) throws Exception {
        String fileName = System.currentTimeMillis() + ".png";
        FileUtil.mkdir(detectionPath);
        Imgcodecs.imwrite(detectionPath + "/" + fileName, img);
        String outputPath = detectionPath + "/" + fileName;
        sendTcpFile(outputPath);
        Snowflake snowflake = IdUtil.getSnowflake(1L, 1L);
        String eventId = snowflake.nextIdStr();
        YqCameraEvent yqCameraEvent = YqCameraEvent.builder()
                .eventId(eventId)
                .eventStatus("0")
                .ignoreFlag("0")
                .parkCode("010001")
                .happenTime(new Date())
                .imageUrl(DetectionConfig.getSendAddress() + FileUploadUtils.getPathFileName(detectionPath, fileName))
                .videoUrl(DetectionConfig.getSendAddress() + FileUploadUtils.getPathFileName(detectionPath + "/video", fileName))
                .eventType(eventType)
                .srcName(yqCameraInfo.getName())
                .srcType("carema")
                .campCode(DetectionConfig.getCampCode())
                .campName(DetectionConfig.getCampName())
                .build();
        return yqCameraEvent;
    }



    private Integer getpersonGather() {
        return YoloVariables.pensonNum;
    }

    private List<SysDictData> getImportantCarrierCameraName() {
        return YoloVariables.importantCarrierCameraName;
    }

    private List<SysDictData> getMaterialsEquipmentCameraName() {
        return YoloVariables.materialsEquipmentCameraName;
    }

    private void sendCameraEvent(YqCameraEvent yqCameraEvent) {
        TCPMessage tcpMessage = TCPMessage.builder()
                .flag("2")
                .data(yqCameraEvent)
                .campCode(DetectionConfig.getCampCode())
                .build();
        NettyTcpClient nettyTcpClient = new NettyTcpClient();
        nettyTcpClient.sendMessage(tcpMessage);
        yqCameraEvent.setImageUrl(yqCameraEvent.getImageUrl().replace(DetectionConfig.getSendAddress(),""));
        yqCameraEvent.setVideoUrl(yqCameraEvent.getVideoUrl().replace(DetectionConfig.getSendAddress(),""));
        TCPMessage tcpMessage1 = TCPMessage.builder()
                .flag("2")
                .data(yqCameraEvent)
                .campCode(DetectionConfig.getCampCode())
                .build();
        postForTCP(tcpMessage1);
    }

    private void sendMaterialsEquipment(YqMaterialsEquipmentRecord yqMaterialsEquipmentRecord) {
        TCPMessage tcpMessage = TCPMessage.builder()
                .flag("3")
                .data(yqMaterialsEquipmentRecord)
                .campCode(DetectionConfig.getCampCode())
                .build();
        NettyTcpClient nettyTcpClient = new NettyTcpClient();
        nettyTcpClient.sendMessage(tcpMessage);
        yqMaterialsEquipmentRecord.setBackImageUrl(yqMaterialsEquipmentRecord.getBackImageUrl().replace(DetectionConfig.getSendAddress(),""));
        TCPMessage tcpMessage1 = TCPMessage.builder()
                .flag("3")
                .data(yqMaterialsEquipmentRecord)
                .build();
        postForTCP(tcpMessage1);
    }

    private void sendImportantCarrier(YqImportantCarrierRecord yqImportantCarrierRecord) {
        TCPMessage tcpMessage = TCPMessage.builder()
                .flag("4")
                .data(yqImportantCarrierRecord)
                .campCode(DetectionConfig.getCampCode())
                .build();
        NettyTcpClient nettyTcpClient = new NettyTcpClient();
        nettyTcpClient.sendMessage(tcpMessage);
        yqImportantCarrierRecord.setBackImageUrl(yqImportantCarrierRecord.getBackImageUrl().replace(DetectionConfig.getSendAddress(),""));
        TCPMessage tcpMessage1 = TCPMessage.builder()
                .flag("4")
                .data(yqImportantCarrierRecord)
                .build();
        postForTCP(tcpMessage1);
    }

    static class RecordTask {
        final long timestamp;
        final Size frameSize;

        public RecordTask(long timestamp, Size frameSize) {
            this.timestamp = timestamp;
            this.frameSize = frameSize;
        }
    }

    static class CircularFrameBuffer {
        private final Deque<Mat> buffer = new LinkedList<>();
        private final int capacity;

        public CircularFrameBuffer(int capacity) {
            this.capacity = capacity;
        }

        public synchronized void add(Mat frame) {
            Mat old = null;
            if (buffer.size() >= capacity) {
                old = buffer.removeFirst();
            }
            Mat clone = new Mat();
            frame.copyTo(clone);
            buffer.addLast(clone);
            if (old != null) old.release();
        }

        public synchronized List<Mat> getRecentFrames(int count) {
            List<Mat> result = new ArrayList<>();
            int n = Math.min(count, buffer.size());
            List<Mat> list = new ArrayList<>(buffer);
            for (int i = list.size() - n; i < list.size(); i++) {
                result.add(list.get(i).clone());
            }
            return result;
        }

        public synchronized void clear() {
            buffer.forEach(Mat::release);
            buffer.clear();
        }
    }

}
