package dd.lo.flann;

import dd.lo.HelloCV;
import org.opencv.calib3d.Calib3d;
import org.opencv.core.Core;
import org.opencv.core.CvType;
import org.opencv.core.DMatch;
import org.opencv.core.KeyPoint;
import org.opencv.core.Mat;
import org.opencv.core.MatOfDMatch;
import org.opencv.core.MatOfKeyPoint;
import org.opencv.core.MatOfPoint;
import org.opencv.core.MatOfPoint2f;
import org.opencv.core.Point;
import org.opencv.core.Rect;
import org.opencv.core.Scalar;
import org.opencv.core.Size;
import org.opencv.features2d.BFMatcher;
import org.opencv.features2d.ORB;
import org.opencv.imgcodecs.Imgcodecs;
import org.opencv.imgproc.Imgproc;

import java.text.DecimalFormat;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.Map;

/**
 * 模仿聂鹏的代码的几个步骤：
 * 1、第一次比较，找到效果最好的关键点匹配
 * 2、对匹配的偏移进行归类，先按x偏移归类再按y偏移归类
 * 3、对大于4个匹配的集合计算关键点的分布外矩形的面积，取面积最大的集合作为好的匹配集合
 * 4、用好的匹配集合计算变换矩阵，并对待比较的图做变换，并将变换的匹配范围显示到queryImg上
 * 5、将最好的关键点匹配用变换矩阵进行变换，以使匹配点跟随图片，用变换后的关键点绘制点线匹配图
 * 6、将变换后的图片*0作为第1个通道，将变换后的图片作为第2个通道，trainImg作为第3个通道，生成比较合成图
 * 7、拼图，完成最终的结果图
 */
public class OrbMatcher {

    private static final int TRANSFORM_PT_CNT = 4;

    static {
        System.load(HelloCV.class.getResource("/libopencv_java481.dylib").getPath());
    }

    //颜色池，从左往右是红色系到紫色系，建议匹配度越高的就用越接近红色的颜色
    private static final List<Scalar> COLOR_POOL = Arrays.asList(new Scalar(49, 48, 214), new Scalar(85, 112, 225), new Scalar(110, 203, 253), new Scalar(227, 132, 9), new Scalar(231, 92, 108), new Scalar(54, 52, 45));

    private final ORB detector;
    private final BFMatcher matcher;

    //允许匹配偏移量出现偏差的多少，暂时取图片的高的1%，有最小值
    private final int deviation;

    private Mat queryImg;
    private Mat warpedQueryImg;
    private Mat trainImg;

    private Mat grayQueryImg;
    private Mat grayWarpedQueryImg;
    private Mat grayTrainImg;

    private void adaptImgChannel(Mat img) {
        //如果不足3个通道则补成3通道的图像
        if (img.channels() < 3) {
            List<Mat> channels = new ArrayList<>(3);
            Core.split(img, channels);
            while (channels.size() < 3) {
                Mat newChannel = new Mat();
                channels.get(0).copyTo(newChannel);
                channels.add(newChannel);
            }
            Core.merge(channels, img);
        }
        //如果超过3个通道的情况下需要将4通道的图片改成3通道图片
        if (img.channels() > 3) {
            List<Mat> channels = new ArrayList<>();
            Core.split(img, channels);
            Core.merge(channels.subList(0, 3), img);
        }
    }

    private void findKeypointAndDescriptor(ORB detector, Mat img, MatOfKeyPoint keyPoint, Mat descriptor) {
        detector.detectAndCompute(img, new Mat(), keyPoint, descriptor);
        descriptor.convertTo(descriptor, CvType.CV_32F);
    }

    private Mat drawMatches(KeyPoint[] queryKp, KeyPoint[] trainKp, DMatch[] ascSortedBestMatches, Mat queryImg, Mat trainImg) {
        Mat dst = new Mat();
        Core.hconcat(Arrays.asList(queryImg, trainImg), dst);
        //手动再对每个匹配的特征点用颜色池的颜色绘制连接线
        int i = 1;
        for (DMatch bestMatch : ascSortedBestMatches) {
            Scalar color = COLOR_POOL.get((int) Math.round(((double) i / ascSortedBestMatches.length) * 5));
            Point qp = queryKp[bestMatch.queryIdx].pt;
            Point tp = trainKp[bestMatch.trainIdx].pt;
            Point queryPt = new Point(qp.x, qp.y);
            Point trainPt = new Point(tp.x + queryImg.width(), tp.y); //train point在右侧，需要向右偏移queryImg图片的宽度
            Imgproc.circle(dst, queryPt, 8, color);
            Imgproc.circle(dst, trainPt, 8, color);
            Imgproc.line(dst, queryPt, trainPt, color, 2, Imgproc.LINE_AA);
            i++;
        }
        return dst;
    }

    public OrbMatcher(String queryImgPath, String trainImgPath) {
        queryImg = Imgcodecs.imread(queryImgPath, Imgcodecs.IMREAD_UNCHANGED);
        if (queryImg.empty()) {
            throw new RuntimeException("待比对图片读取失败");
        }
        trainImg = Imgcodecs.imread(trainImgPath, Imgcodecs.IMREAD_UNCHANGED);
        if (trainImg.empty()) {
            throw new RuntimeException("基准图片读取失败");
        }
        adaptImgChannel(queryImg);
        adaptImgChannel(trainImg);
        warpedQueryImg = new Mat();
        grayQueryImg = new Mat();
        grayWarpedQueryImg = new Mat();
        grayTrainImg = new Mat();
        detector = ORB.create(1000);
        matcher = BFMatcher.create(BFMatcher.BRUTEFORCE_HAMMING, true);
        //图像大小适配，将高度不够的图片放大到较大图片的高度
        if (queryImg.height() < trainImg.height()) {
            int targetWidth = trainImg.width() * queryImg.height() / trainImg.height();
            Imgproc.resize(queryImg, queryImg, new Size(targetWidth, trainImg.height()));
        } else if (trainImg.height() < queryImg.height()) {
            int targetWidth = queryImg.width() * trainImg.height() / queryImg.height();
            Imgproc.resize(trainImg, trainImg, new Size(targetWidth, queryImg.height()));
        }
        deviation = Math.max(3, (int) Math.round(queryImg.height() * 0.01));
    }

    public Mat getMatchResult() {
        //跑pipeline
        //1、第一次比较，找到效果最好的关键点匹配
        //转灰度图
        Imgproc.cvtColor(queryImg, grayQueryImg, Imgproc.COLOR_BGR2GRAY);
        Imgproc.cvtColor(trainImg, grayTrainImg, Imgproc.COLOR_BGR2GRAY);
        MatOfKeyPoint queryKeyPointMat = new MatOfKeyPoint(), trainKeyPointMat = new MatOfKeyPoint();
        Mat queryDescriptor = new Mat(), trainDescriptor = new Mat();
        findKeypointAndDescriptor(detector, grayQueryImg, queryKeyPointMat, queryDescriptor);
        findKeypointAndDescriptor(detector, grayTrainImg, trainKeyPointMat, trainDescriptor);
        MatOfDMatch matches = new MatOfDMatch();
        matcher.match(queryDescriptor, trainDescriptor, matches);
        List<DMatch> matchList = matches.toList();
        //2、对匹配的偏移进行归类，先按x偏移归类再按y偏移归类
        KeyPoint[] queryKeyPoints = queryKeyPointMat.toArray();
        KeyPoint[] trainKeyPoints = trainKeyPointMat.toArray();
        Map<Long, Map<Long, List<DMatch>>> offsetGroups = new HashMap<>();
        for (DMatch match : matchList) {
            long dx = Math.round(queryKeyPoints[match.queryIdx].pt.x - trainKeyPoints[match.trainIdx].pt.x);
            Map<Long, List<DMatch>> dxOffsetGroups = null;
            for (int i = -deviation; i <= deviation; ++i) {
                //将dx上下浮动1的情况找出来，防止因为round导致dx出现微小区别
                dxOffsetGroups = offsetGroups.get(dx + i);
                if (null != dxOffsetGroups) break;
            }
            if (null == dxOffsetGroups) {
                dxOffsetGroups = new HashMap<>();
                offsetGroups.put(dx, dxOffsetGroups);
            }
            long dy = Math.round(queryKeyPoints[match.queryIdx].pt.y - trainKeyPoints[match.trainIdx].pt.y);
            List<DMatch> dyOffsetGroups = null;
            for (int i = -deviation; i <= deviation; ++i) {
                //将dy上下浮动1的情况找出来，防止因为round导致dy出现微小区别
                dyOffsetGroups = dxOffsetGroups.get(dy + i);
                if (null != dyOffsetGroups) break;
            }
            if (null == dyOffsetGroups) {
                dyOffsetGroups = new ArrayList<>();
                dxOffsetGroups.put(dy, dyOffsetGroups);
            }
            dyOffsetGroups.add(match);
        }
        //3、对大于4个匹配的集合计算关键点的分布外矩形的面积，取面积最大的集合作为好的匹配集合
        List<DMatch> bestMatches = null;
        long mxCoverArea = 0;
        Rect mxQueryRect = null, mxTrainRect = null;
        for (Map<Long, List<DMatch>> dyOffsetGroups : offsetGroups.values()) {
            for (List<DMatch> group : dyOffsetGroups.values()) {
                if (group.size() < TRANSFORM_PT_CNT) continue;
                Point[] queryMatchPts = new Point[group.size()], trainMatchPts = new Point[group.size()];
                for (int i = 0; i < group.size(); ++i) {
                    queryMatchPts[i] = queryKeyPoints[group.get(i).queryIdx].pt;
                    trainMatchPts[i] = trainKeyPoints[group.get(i).trainIdx].pt;
                }
                Rect rect = Imgproc.boundingRect(new MatOfPoint(queryMatchPts));
                long area = (long) rect.width * rect.height;
                if (area > mxCoverArea) {
                    mxCoverArea = area;
                    bestMatches = group;
                    mxQueryRect = rect;
                    mxTrainRect = Imgproc.boundingRect(new MatOfPoint(trainMatchPts));
                }
            }
        }
        if (null == bestMatches) {
            throw new RuntimeException("没有找到好的匹配");
        }
        //4、用好的匹配集合计算变换矩阵，并对待比较的图做变换，并将变换的匹配范围显示到queryImg上
//        Point[] transQueryPts = new Point[bestMatches.size()], transTrainPts = new Point[bestMatches.size()];
//        for (int i = 0; i < bestMatches.size(); ++i) {
//            Point queryPoint = queryKeyPoints[bestMatches.get(i).queryIdx].pt;
//            Point trainPoint = trainKeyPoints[bestMatches.get(i).trainIdx].pt;
//            transQueryPts[i] = new Point(Math.round(queryPoint.x), Math.round(queryPoint.y));
//            transTrainPts[i] = new Point(Math.round(trainPoint.x), Math.round(trainPoint.y));
//        }
        Point[] transQueryPts = new Point[4], transTrainPts = new Point[4];
        transQueryPts[0] = new Point(mxQueryRect.x, mxQueryRect.y);
        transQueryPts[1] = new Point(mxQueryRect.x + mxQueryRect.width, mxQueryRect.y);
        transQueryPts[2] = new Point(mxQueryRect.x + mxQueryRect.width, mxQueryRect.y + mxQueryRect.height);
        transQueryPts[3] = new Point(mxQueryRect.x, mxQueryRect.y + mxQueryRect.height);
        transTrainPts[0] = new Point(mxTrainRect.x, mxTrainRect.y);
        transTrainPts[1] = new Point(mxTrainRect.x + mxTrainRect.width, mxTrainRect.y);
        transTrainPts[2] = new Point(mxTrainRect.x + mxTrainRect.width, mxTrainRect.y + mxTrainRect.height);
        transTrainPts[3] = new Point(mxTrainRect.x, mxTrainRect.y + mxTrainRect.height);
        Mat M = Calib3d.findHomography(new MatOfPoint2f(transQueryPts), new MatOfPoint2f(transTrainPts), Calib3d.RANSAC);
        if (M.empty()) {
            throw new RuntimeException("没有找到好的匹配变换");
        }
        Imgproc.warpPerspective(queryImg, warpedQueryImg, M, trainImg.size());
//        //画匹配范围需要使用反向的变换矩阵
//        Mat reverseM = Calib3d.findHomography(new MatOfPoint2f(transTrainPts), new MatOfPoint2f(transQueryPts), Calib3d.RANSAC);
//        MatOfPoint trainImgRange = new MatOfPoint(new Point(2, 2), new Point(queryImg.width() - 2, 2), new Point(queryImg.width() - 2, queryImg.height() - 2), new Point(2, queryImg.height() - 2)); //左上、左下、右下、右上，4个点
//        trainImgRange.convertTo(trainImgRange, CvType.CV_32FC1); //转换需要32F
//        Core.perspectiveTransform(trainImgRange, trainImgRange, reverseM);
//        trainImgRange.convertTo(trainImgRange, CvType.CV_32SC1); //绘制需要32S
//        Imgproc.polylines(queryImg, Arrays.asList(trainImgRange), false, new Scalar(0, 0, 255), 5, Imgproc.LINE_AA);
        Imgproc.rectangle(queryImg, mxQueryRect, new Scalar(0, 0, 255), 5, Imgproc.LINE_AA);
        //输出相似度，匹配点外界矩形覆盖的面积占queryImg面积的一半的比例，最高0.99
        double similarity = ((double) mxCoverArea / ((long) queryImg.height() * queryImg.width())) * 2;
        similarity = Math.min(0.99, similarity);
        DecimalFormat df = new DecimalFormat("#.##");
        System.out.printf("相似度：%s\n", df.format(similarity));
        //5、将最好的关键点匹配用变换矩阵进行变换，以使匹配点跟随图片，用变换后的关键点绘制点线匹配图
        Point[] queryBestKeyPoints = new Point[bestMatches.size()];
        int i = 0;
        for (DMatch bestMatch : bestMatches) {
            Point pt = queryKeyPoints[bestMatch.queryIdx].pt;
            queryBestKeyPoints[i] = new Point(pt.x, pt.y);
            i++;
        }
        MatOfPoint queryBestKeyPointMat = new MatOfPoint(queryBestKeyPoints);
        int oriType = queryBestKeyPointMat.type();
        queryBestKeyPointMat.convertTo(queryBestKeyPointMat, CvType.CV_32FC1);
        Core.perspectiveTransform(queryBestKeyPointMat, queryBestKeyPointMat, M);
        queryBestKeyPointMat.convertTo(queryBestKeyPointMat, oriType);
        queryBestKeyPoints = queryBestKeyPointMat.toArray();
        i = 0;
        for (DMatch bestMatch : bestMatches) {
            Point pt = queryKeyPoints[bestMatch.queryIdx].pt;
            pt.x = queryBestKeyPoints[i].x;
            pt.y = queryBestKeyPoints[i].y;
            i++;
        }
        Mat matchImg = drawMatches(queryKeyPoints, trainKeyPoints, bestMatches.toArray(new DMatch[0]), warpedQueryImg, trainImg);
        //6、将变换后的图片*0作为第1个通道，将变换后的图片作为第2个通道，trainImg作为第3个通道，生成比较合成图
        Imgproc.cvtColor(warpedQueryImg, grayWarpedQueryImg, Imgproc.COLOR_BGR2GRAY);
        Imgproc.cvtColor(trainImg, trainImg, Imgproc.COLOR_BGR2GRAY);
        Mat mergeImg = new Mat();
        Core.merge(Arrays.asList(
                Mat.zeros(warpedQueryImg.size(), CvType.CV_8UC1),
                grayWarpedQueryImg,
                trainImg
        ), mergeImg);
        //6、拼图，并用最新找到的关键点画线
        Mat dst = new Mat();
        Core.hconcat(Arrays.asList(queryImg, matchImg, mergeImg), dst);
        return dst;
    }
}
