﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Runtime.InteropServices;
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Features2D;
using Emgu.CV.Structure;
using Emgu.CV.UI;
using System.IO;
using System.Threading;

namespace MAVI.BillRecognition.BillDetection.source
{
    public class SURFBillDetection
    {
     
        public static object lockL= new object(); 
        public static List<string> DaList = new List<string>();
        // the image on which the detection is made
        private Bitmap imagineLuata;
        private string testImageFront;
        private string testImageBack;
        private string numeleBancnotei;

        //maximum number of features found between the front of the image and the back
        int max = 0;

        // setter for the image to be processed
        public Bitmap ImagineLuata
        {
            set { this.imagineLuata = value; }
        }

        public string TestImageFront
        {
            set { this.testImageFront = value; }
        }

        public string TestImageBack
        {
            set { this.testImageBack = value; }
        }
        public string NumeleBancnotei
        {
            set { this.numeleBancnotei = value; }
        }
        
        
        // method for finding the maximim number of detected features 
        // for one bill , comparing the front and back of that bill
        public void Run()
        {
            
            int x = 0;
            int x1 = 0;
            string numberAndName = "";

            String filePath3 = @"C:/ptProiect/imagineLuata/image.jpg";
            FileStream fs3 = new FileStream(filePath3, FileMode.Open, FileAccess.Read);
            Bitmap imagineLuata = new Bitmap(fs3);
            
            // set the path for the front part of the standard test image
            String filePath1 = testImageFront;
            FileStream fs1 = new FileStream(filePath1, FileMode.Open, FileAccess.Read);
            Bitmap imagineTest = new Bitmap(fs1);
             
            // SURFDetect returs the number of matched features between the taken image 
            // and the standard test bill (front part of the bill)
            x= SURFDetect(imagineTest,imagineLuata);

            // attributes the number of matched features to variable max
            if (x >max)
            {
               max = x;
            }

            // set the path for the back part of the standard test image
            String filePath2 = testImageBack;
            FileStream fs2 = new FileStream(filePath2, FileMode.Open, FileAccess.Read);
            Bitmap imagineTestBack = new Bitmap(fs2);

            // x1 is the returned value by function SURFDetect of matched features
            // between the image and the standard test image
            x1 = SURFDetect(imagineTestBack, imagineLuata);

            // is the number of matched features is grater for the back side of the standard test image 
            // than that of the front side of the test image, then max is now x1
            if (x1>max)
            {
               max = x1;
            }

           // returns the value of max and the bills name as a string separated by the character "|"
           //return max+"|"+bill.Name;
            //numberAndName = max + "|" + bill.name;
            numberAndName = max + "|" + numeleBancnotei;
            lock (lockL)
            {
                DaList.Add(numberAndName);
            }

   
        }


        public static int SURFDetect(Bitmap imageTest, Bitmap imageTaken)
        {
          
            int length = 0;

            // sets a new SURFDetector to search for matching features
            // with the Fast-Hessian method (fastest method : 120 msec)
            SURFDetector surfParam = new SURFDetector(600, false);

            // convert to grayscale the standard test image
            Image<Gray, Byte> modelImage = new Image<Gray, byte>(imageTest);

            //extract features from the object image
            ImageFeature[] modelFeatures = surfParam.DetectFeatures(modelImage, null);

            //Create a Feature Tracker
            Features2DTracker tracker = new Features2DTracker(modelFeatures);

            // convert to grayscale the taken image 
            Image<Gray, Byte> observedImage = new Image<Gray, byte>(imageTaken);

            //observedImage._EqualizeHist();

            Stopwatch watch = Stopwatch.StartNew();
            // extract features from the observed image
            ImageFeature[] imageFeatures = surfParam.DetectFeatures(observedImage, null);

            Features2DTracker.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
            matchedFeatures = Features2DTracker.VoteForUniqueness(matchedFeatures, 0.8);
            matchedFeatures = Features2DTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
            HomographyMatrix homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);
            watch.Stop();

            //Merge the object image and the observed image into one image for display
            //Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);

           /* #region draw lines between the matched features
            foreach (Features2DTracker.MatchedImageFeature matchedFeature in matchedFeatures)
            {
                PointF p = matchedFeature.ObservedFeature.KeyPoint.Point;
                p.Y += modelImage.Height;
                res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.KeyPoint.Point, p), new Gray(0), 1);
            }
            #endregion
            */ 
   
            #region draw the project region on the image
            if (homography != null) 
            {  //draw a rectangle along the projected model
               /* Rectangle rect = modelImage.ROI;
                PointF[] pts = new PointF[] { 
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
                homography.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                    pts[i].Y += modelImage.Height;

                res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
                */ 
                 length = matchedFeatures.Length;
                
                //n = 1;
            }
            #endregion
            
            
            //ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds));
            return length;
            
        }

        /// <summary>
        /// Check if both the managed and unmanaged code are compiled for the same architecture
        /// </summary>
        /// <returns>Returns true if both the managed and unmanaged code are compiled for the same architecture</returns>
        static bool IsPlaformCompatable()
        {
            int clrBitness = Marshal.SizeOf(typeof(IntPtr)) * 8;
            if (clrBitness != CvInvoke.UnmanagedCodeBitness)
            {
                Debug.WriteLine(String.Format("Platform mismatched: CLR is {0} bit, C++ code is {1} bit."
                   + " Please consider recompiling the executable with the same platform target as C++ code.",
                   clrBitness, CvInvoke.UnmanagedCodeBitness));
                return false;
            }
            return true;
        }
        }
    }



