package uav.util.deepsort;



import org.opencv.calib3d.Calib3d;
import org.opencv.core.*;
import org.opencv.features2d.DescriptorMatcher;
import org.opencv.features2d.SIFT;
import org.opencv.imgcodecs.Imgcodecs;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;

import java.util.*;




// 目标跟踪系统主类
public class ObjectTracker {
    private final List<Track> activeTracks = new ArrayList<>();
    private final List<Track> lostTracks = new ArrayList<>();
    private List<double[]> detections=new ArrayList<>();
    private HashMap<Integer, ArrayList> clsBoxHashMap=new HashMap();//按分类存放box数据也就是 float[][] detectorData
    private String trackerCls="";
    private int nextId = 1;
    private static final Logger logger = LoggerFactory.getLogger(ObjectTracker.class);
    private List<MatOfDMatch> knnMatches = new ArrayList<>();
    private DescriptorMatcher matcher = DescriptorMatcher.create(
            DescriptorMatcher.FLANNBASED
    );
    private String sortFeatureFlag="N";
    private SIFT sift = SIFT.create(
            500,    // 特征点数（0表示无限制）
            3,    // 金字塔层数(nOctaveLayers)
            0.04, // 对比度阈值(contrastThreshold)
            10.0, // 边缘阈值(edgeThreshold)
            1.6   // 初始sigma值:ml-citation{ref="8" data="citationList"}
    );

    private ArrayList<float[]> detList=new ArrayList<>();

    private static class Track {
        final int trackId;
        final KalmanFilter kf;
        int age = 1;
        int timeSinceUpdate = 0;
        int detLocation=-1;
        double topX=0;
        double topY=0;
        double bottomX=0;
        double bottomY=0;
        MatOfKeyPoint keyPoint;
        Mat descriptorsMat;


        Track(int id, double x, double y) {
            this.trackId = id;

            this.kf = new KalmanFilter((float)x, (float)y);
        }
    }
    public ArrayList<float[]> getDetList() {
        return detList;
    }

    public void setDetList(ArrayList<float[]> detList) {
        this.detList = detList;
    }
    public void update(float[][] detectorData , Mat img, List<Integer> trackerClsList) {
        clsBoxHashMap.clear();
        detList.clear();



    }
    /**
     * 目标跟踪更新
     *
     * @param detections 目标跟踪的信息数据
     *          * 原有的识别物的BOX 向量是6个，
     *          * detections[0] :目标物中心点X坐标
     *          * detections[1] :目标物中心点Y坐标
     *          * detections[2] :目标物左上角顶部的X坐标
     *          * detections[3] :目标物左上角顶部的Y坐标
     *          * detections[4] :目标物右下角底部的X坐标
     *          * detections[5] :目标物右下角底部的Y坐标
     * @param img 用于后期的特征值模型提取用，
     * @param iouMathThreshold iou匹配的最小值
     * @return <></>
     */
    public void processDetections(List<double[]> detections, Mat img,float iouMathThreshold,String sortFeatureFlag,Letterbox letterbox) {
        // Step 1: 预测所有跟踪目标
        this.sortFeatureFlag=sortFeatureFlag;
        for (Track track : activeTracks) {
            track.kf.predict();
            track.age++;
            track.timeSinceUpdate++;
        }
        this.detections=detections;

        // Step 2: 构建代价矩阵（马氏距离）
        double[][] costMatrix = new double[activeTracks.size()][detections.size()];
        for (int i = 0; i < activeTracks.size(); i++) {
            Track track = activeTracks.get(i);
            double[] predicted = track.kf.getPosition();
            for (int j = 0; j < detections.size(); j++) {
                double dx = predicted[0] - detections.get(j)[0];
                double dy = predicted[1] - detections.get(j)[1];
                costMatrix[i][j] = Math.sqrt(dx*dx + dy*dy); // 实际应使用马氏距离
                // logger.info("costMatrix["+i+"]["+j+"]:"+costMatrix[i][j]);
            }
        }

        // Step 3: 执行匈牙利算法
        HungarianAlgorithm ha = new HungarianAlgorithm(costMatrix);
        int[] matches = ha.execute();

        // Step 4: 更新匹配的跟踪器
        Set<Integer> matchedDetIndices = new HashSet<>();
        for (int i = 0; i < matches.length; i++) {

            if (matches[i] != -1  ) { // 距离阈值
                Track track = activeTracks.get(i);

                float iouRate=UavUtil.calculateIoU(
                        new Detection(detections.get(matches[i])[2], detections.get(matches[i])[3]
                                , detections.get(matches[i])[4]
                                , detections.get(matches[i])[5]),
                        new Detection(track.topX, track.topY, track.bottomX, track.bottomY)
                );

                if ( iouRate> iouMathThreshold && iouMathThreshold>-1) {

                    //logger.info("UavUtil.calculateIoU :" + i + "+iouRate "  + iouRate);
                    track.kf.update(detections.get(matches[i]));
                    track.timeSinceUpdate = 0;
                    track.topX = detections.get(matches[i])[2];
                    track.topY = detections.get(matches[i])[3];
                    track.bottomX = detections.get(matches[i])[4];
                    track.bottomY = detections.get(matches[i])[5];

                    //start 增加特征提取功能 at 20250515 by ztt
                    if(sortFeatureFlag.equals("Y")) {
                        MatOfKeyPoint keyPoint = new MatOfKeyPoint();
                        Mat descriptorsMat = new Mat();
                        sift.detectAndCompute(new Mat(img, new Rect(new Point((track.topX- letterbox.getDw())/letterbox.getRatio(),(track.topY- letterbox.getDh())/letterbox.getRatio())
                                        ,new Point((track.bottomX- letterbox.getDw())/letterbox.getRatio(),(track.bottomY- letterbox.getDh())/letterbox.getRatio())))
                                , new Mat(), keyPoint, descriptorsMat);
                        track.descriptorsMat = descriptorsMat;
                        track.keyPoint = keyPoint;
                    }
                    //end 增加特征提取功能 at 20250515 by ztt

                    track.detLocation = matches[i];

                    matchedDetIndices.add(matches[i]);
                } else
                {
                    logger.info("HungarianAlgorithm costMatrix[" + i + "]@[matches[" + i + "]]:" + costMatrix[i][matches[i]]);
                    if (costMatrix[i][matches[i]] < 30) {

                        track.kf.update(detections.get(matches[i]));
                        track.timeSinceUpdate = 0;
                        track.topX = detections.get(matches[i])[2];
                        track.topY = detections.get(matches[i])[3];
                        track.bottomX = detections.get(matches[i])[4];
                        track.bottomY = detections.get(matches[i])[5];

                        //start 增加特征提取功能 at 20250515 by ztt
                        if(sortFeatureFlag.equals("Y")) {
                            MatOfKeyPoint keyPoint = new MatOfKeyPoint();
                            Mat descriptorsMat = new Mat();
                            sift.detectAndCompute(new Mat(img, new Rect(new Point((track.topX- letterbox.getDw())/letterbox.getRatio(),(track.topY- letterbox.getDh())/letterbox.getRatio())
                                            ,new Point((track.bottomX- letterbox.getDw())/letterbox.getRatio(),(track.bottomY- letterbox.getDh())/letterbox.getRatio())))
                                    , new Mat(), keyPoint, descriptorsMat);
                            track.descriptorsMat = descriptorsMat;
                            track.keyPoint = keyPoint;
                        }
                        //end 增加特征提取功能 at 20250515 by ztt
                        track.detLocation = matches[i];
                        matchedDetIndices.add(matches[i]);
                    }else {
                        //超过特定的马氏距离
                        if (sortFeatureFlag.equals("Y")) {
                            try {
                                MatOfKeyPoint keyPoint = new MatOfKeyPoint();
                                Mat descriptorsMat = new Mat();

                                sift.detectAndCompute(new Mat(img, new Rect(new Point((detections.get(matches[i])[2] - letterbox.getDw()) / letterbox.getRatio(), (detections.get(matches[i])[3] - letterbox.getDh()) / letterbox.getRatio())
                                                , new Point((detections.get(matches[i])[4] - letterbox.getDw()) / letterbox.getRatio(), (detections.get(matches[i])[5] - letterbox.getDh()) / letterbox.getRatio())))
                                        , new Mat(), keyPoint, descriptorsMat);
                                knnMatches.clear();

                                matcher.knnMatch(descriptorsMat, track.descriptorsMat, knnMatches, 2);

                                // 应用Lowe's比率测试（0.7阈值）:ml-citation{ref="7" data="citationList"}

                                float ratioThresh = 0.7f;
                                List<DMatch> goodMatches = new ArrayList<>();
                                for (MatOfDMatch knnMatch : knnMatches) {
                                    List<DMatch> list = knnMatch.toList();
                                    if (list.size() >= 2 &&
                                            list.get(0).distance < ratioThresh * list.get(1).distance) {
                                        goodMatches.add(list.get(0));
                                    }
                                }

                                // RANSAC几何验证:ml-citation{ref="2,8" data="citationList"}
                                List<Point> pts1 = new ArrayList<>();
                                List<Point> pts2 = new ArrayList<>();
                                for (DMatch match : goodMatches) {
                                    pts1.add(keyPoint.toList().get(match.queryIdx).pt);
                                    pts2.add(track.keyPoint.toList().get(match.trainIdx).pt);
                                }

                                Mat inlierMask = new Mat();
                                if (!pts1.isEmpty() && !pts2.isEmpty()) {
                                    Calib3d.findFundamentalMat(
                                            new MatOfPoint2f(pts1.toArray(new Point[0])),
                                            new MatOfPoint2f(pts2.toArray(new Point[0])),
                                            Calib3d.FM_RANSAC,
                                            3.0,
                                            0.99,
                                            inlierMask
                                    );
                                }
                                logger.info("match qua: " + (goodMatches.size() / (double) knnMatches.size()) * 100 + "%");
                                if ((goodMatches.size() / (double) knnMatches.size()) > 0.7) {
                                    track.kf.update(detections.get(matches[i]));
                                    track.timeSinceUpdate = 0;
                                    track.topX = detections.get(matches[i])[2];
                                    track.topY = detections.get(matches[i])[3];
                                    track.bottomX = detections.get(matches[i])[4];
                                    track.bottomY = detections.get(matches[i])[5];

                                    //start 增加特征提取功能 at 20250515 by ztt
                                    track.descriptorsMat = descriptorsMat;
                                    track.keyPoint = keyPoint;
                                    //end 增加特征提取功能 at 20250515 by ztt
                                    track.detLocation = matches[i];
                                    matchedDetIndices.add(matches[i]);
                                }

                            }catch(Exception ex){
                                logger.info("deepsort ex:"+ex.toString());

                            }
                        }

                    }
                }
            }
        }

        // Step 5: 处理未匹配的检测（新目标）
        for (int j = 0; j < detections.size(); j++) {
            if (!matchedDetIndices.contains(j)) {
                /*
                 * start 未匹配到目标，重新对已匹配成功的进行二次IOU,预防一个目标物被模型识别出两个框"
                 * 匈牙利匹配找到最优的成本，最优的成本会出现两个相等的，而取最后一个，两个其实是同一个目标物的，两个框
                 * */
                float iouRate=0f;
                for (Track track : activeTracks) {
                    iouRate=UavUtil.calculateIoU(
                            new Detection(detections.get(j)[2], detections.get(j)[3]
                                    , detections.get(j)[4]
                                    , detections.get(j)[5]),
                            new Detection(track.topX, track.topY, track.bottomX, track.bottomY));
                    if(iouRate>0.80){
                        track.kf.update(detections.get(j));
                        track.timeSinceUpdate = 0;
                        track.topX = detections.get(j)[2];
                        track.topY = detections.get(j)[3];
                        track.bottomX = detections.get(j)[4];
                        track.bottomY = detections.get(j)[5];
                        //start 增加特征提取功能 at 20250515 by ztt
                        if(sortFeatureFlag.equals("Y")) {
                            MatOfKeyPoint keyPoint = new MatOfKeyPoint();
                            Mat descriptorsMat = new Mat();
                            sift.detectAndCompute(new Mat(img, new Rect(new Point((track.topX- letterbox.getDw())/letterbox.getRatio(),(track.topY- letterbox.getDh())/letterbox.getRatio())
                                            ,new Point((track.bottomX- letterbox.getDw())/letterbox.getRatio(),(track.bottomY- letterbox.getDh())/letterbox.getRatio())))
                                    , new Mat(), keyPoint, descriptorsMat);
                            track.descriptorsMat = descriptorsMat;
                            track.keyPoint = keyPoint;
                        }
                        //end 增加特征提取功能 at 20250515 by ztt
                        //track.detLocation = matches[j];
                        matchedDetIndices.add(j);
                        break;
                    }
                }
                if(iouRate>0.95){
                    continue;
                }
                /*
                 * end 未匹配到目标，重新对已匹配成功的进行二次IOU,预防一个目标物被模型识别出两个框"
                 * */

                double[] det = detections.get(j);
                Track track=new Track(nextId++, det[0], det[1]);
                track.detLocation=j;
                track.topX=det[2];
                track.topY=det[3];
                track.bottomX=det[4];
                track.bottomY=det[5];
                //start 增加特征提取功能 at 20250515 by ztt
                if(sortFeatureFlag.equals("Y")) {
                    MatOfKeyPoint keyPoint = new MatOfKeyPoint();
                    Mat descriptorsMat = new Mat();
                    Mat detMat = new Mat(img, new Rect(new Point((track.topX- letterbox.getDw())/letterbox.getRatio(),(track.topY- letterbox.getDh())/letterbox.getRatio())
                            ,new Point((track.bottomX- letterbox.getDw())/letterbox.getRatio(),(track.bottomY- letterbox.getDh())/letterbox.getRatio())));

                    sift.detectAndCompute(detMat, new Mat(), keyPoint, descriptorsMat);
                    track.descriptorsMat = descriptorsMat;
                    track.keyPoint = keyPoint;
                }
                //end 增加特征提取功能 at 20250515 by ztt
                activeTracks.add(track);
                // logger.info("detections.get("+j+"):"+detections.get(j)+" unmach; trackId:"+track.trackId);
            }
        }

        // Step 6: 移除丢失的目标
        Iterator<Track> it = activeTracks.iterator();
        while (it.hasNext()) {
            Track track = it.next();
            if (track.timeSinceUpdate > 80) { // 丢失阈值
                lostTracks.add(track);
                it.remove();
            }
        }
    }

    // 获取当前活跃跟踪目标
    public List<TrackInfo> getActiveTracks() {
        List<TrackInfo> infos = new ArrayList<>();
        for (Track track : activeTracks) {
            if (track.timeSinceUpdate == 0) {
                double[] pos = track.kf.getPosition();
                infos.add(new TrackInfo(track.trackId, pos[0], pos[1], track.age));
            }
        }
        return infos;
    }
    public void release(){
        activeTracks.clear();
        lostTracks.clear();
        detections.clear();
        clsBoxHashMap.clear();

    }

    // 获取当前帧跟踪目标
    public List<TrackInfo> getDetTracks() {
        List<TrackInfo> infos = new ArrayList<>();
        int k=detections.size();
        for (int i=0;i<k;i++){
            int z=0;
            for (Track track : activeTracks) {
                if (track.timeSinceUpdate == 0 && track.detLocation==i) {
                    double[] pos = track.kf.getPosition();
                    infos.add(new TrackInfo(track.trackId, pos[0], pos[1], track.age));
                }
            }
        }

        return infos;
    }

    public static class TrackInfo {
        public final int id;
        public final double x, y;
        public final int age;

        TrackInfo(int id, double x, double y, int age) {
            this.id = id;
            this.x = x;
            this.y = y;
            this.age = age;
        }
    }
}

