﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using SDR = System.Drawing;
using System.Runtime.InteropServices;
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Features2D;
using Emgu.CV.Structure;
using Emgu.CV.Util;
using Emgu.CV.WPF;


using System.Linq;
using System.Text;
using System.Windows;
using System.Windows.Controls;
using System.IO;
using Microsoft.Win32;
using System.Windows.Media.Imaging;

namespace jsmag
{
    class Examinator_SIFT
    {
        private int nFeatures;
        private int nOctaveLayers;       
        private double contrastThreshold;
        private double edgeThreshold;
        private double sigma;

        public void SetSIFTParameters(int nFeatures, int nOctaveLayers, double contrastThreshold, double edgeThreshold, double sigma)
        {
            this.nFeatures = nFeatures;
            this.nOctaveLayers = nOctaveLayers;
            this.contrastThreshold = contrastThreshold;
            this.edgeThreshold = edgeThreshold;
            this.sigma = sigma;
        }

        private Result SiftSingleMatch(Image<Gray, Byte> firstImage, Image<Gray, Byte> secondImage)
        {
            Result singleResult = new Result();
            Stopwatch watch;
            HomographyMatrix homography = null;

            SIFTDetector siftCPU = new SIFTDetector(nFeatures,nOctaveLayers,contrastThreshold,edgeThreshold,sigma);
            VectorOfKeyPoint secondImageKeyPoints;
            VectorOfKeyPoint firstImageKeyPoints;
            Matrix<int> indices;

            Matrix<byte> mask;
            int k = 2;
            double uniquenessThreshold = 0.8;

            // extract features from the observed image
            watch = Stopwatch.StartNew();
            firstImageKeyPoints = siftCPU.DetectKeyPointsRaw(firstImage, null);
            watch.Stop();
            singleResult.firstImageFeaturesFindingTime = watch.ElapsedMilliseconds;

            watch = Stopwatch.StartNew();
            Matrix<float> firstImageDescriptors = siftCPU.ComputeDescriptorsRaw(firstImage, null, firstImageKeyPoints);
            watch.Stop();
            singleResult.firstImageDescriptorsCountingTime = watch.ElapsedMilliseconds;
            singleResult.firstImageNumberOfGeneratedFeatures = firstImageKeyPoints.Size;
            singleResult.firstImageTimePer1Feature = (float)singleResult.firstImageFeaturesFindingTime / (float)singleResult.firstImageNumberOfGeneratedFeatures;
            singleResult.firstImageTimePer1Descriptor = (float)singleResult.firstImageDescriptorsCountingTime / (float)singleResult.firstImageNumberOfGeneratedFeatures;
            //extract features from the object image
            watch = Stopwatch.StartNew();
            secondImageKeyPoints = siftCPU.DetectKeyPointsRaw(secondImage, null);
            watch.Stop();
            singleResult.secondImageFeaturesFindingTime = watch.ElapsedMilliseconds;
            watch = Stopwatch.StartNew();
            Matrix<float> secondImageDescriptors = siftCPU.ComputeDescriptorsRaw(secondImage, null, secondImageKeyPoints);
            watch.Stop();
            singleResult.secondImageDescriptorsCountingTime = watch.ElapsedMilliseconds;
            singleResult.secondImageNumberOfGeneratedFeatures = secondImageKeyPoints.Size;
            singleResult.secondImageTimePer1Feature = (float)singleResult.secondImageFeaturesFindingTime / (float)singleResult.secondImageNumberOfGeneratedFeatures;
            singleResult.secondImageTimePer1Descriptor = (float)singleResult.secondImageDescriptorsCountingTime / (float)singleResult.secondImageNumberOfGeneratedFeatures;



            watch = Stopwatch.StartNew();
            BruteForceMatcher<float> matcher = new BruteForceMatcher<float>(DistanceType.L2);
            matcher.Add(secondImageDescriptors);

            indices = new Matrix<int>(firstImageDescriptors.Rows, k);
            using (Matrix<float> dist = new Matrix<float>(firstImageDescriptors.Rows, k))
            {
                matcher.KnnMatch(firstImageDescriptors, indices, dist, k, null);
                mask = new Matrix<byte>(dist.Rows, 1);
                mask.SetValue(255);
                Features2DToolbox.VoteForUniqueness(dist, uniquenessThreshold, mask);
            }

            int nonZeroCount = CvInvoke.cvCountNonZero(mask);
            if (nonZeroCount >= 4)
            {
                nonZeroCount = Features2DToolbox.VoteForSizeAndOrientation(secondImageKeyPoints, firstImageKeyPoints, indices, mask, 1.5, 20);
                if (nonZeroCount >= 4)
                    homography = Features2DToolbox.GetHomographyMatrixFromMatchedFeatures(secondImageKeyPoints, firstImageKeyPoints, indices, mask, 2);
            }

            watch.Stop();
            singleResult.matchingTime = watch.ElapsedMilliseconds;
            singleResult.numberOfMatchedFeatures = nonZeroCount;
            singleResult.percentOfMatching = 100 * nonZeroCount / singleResult.firstImageNumberOfGeneratedFeatures;


            //Draw the matched keypoints
            Image<Bgr, Byte> result = Features2DToolbox.DrawMatches(secondImage, secondImageKeyPoints, firstImage, firstImageKeyPoints,
               indices, new Bgr(255, 255, 255), new Bgr(255, 255, 255), mask, Features2DToolbox.KeypointDrawType.DEFAULT);

            singleResult.matchedImages = result;
            singleResult.matchedImages_toDisplay = BitmapSourceConvert.ToBitmapSource(result);


            return singleResult;
        }

        public List<Result> RunSIFT(List<Image<Gray, Byte>> imageList)
        {
            List<Result> resultList = new List<Result>();

            for (int i = 1; i < imageList.Count; i++)
            {
                resultList.Add(SiftSingleMatch(imageList[0], imageList[i]));
            }
            return resultList;
        }

        

    }
}
