﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Runtime.InteropServices;
using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Features2D;
using Emgu.CV.Structure;
using Emgu.CV.UI;
using System.IO;
using System.Threading;

namespace MAVI.BillRecognition.BillDetection.source
{
    public class Class1
    {
        /// <summary>
        /// The main entry point for the application.
        /// </summary>


        int max = 0;
         
        
         public string Run(Bancnota bancnota)
        {
            
            int x = 0;
            
            int x1 = 0;
            String filePath = @"C:/ptProiect/imagineLuata/milionu.jpg";
            FileStream fs = new FileStream(filePath, FileMode.Open, FileAccess.Read);
            Bitmap imagineLuata = new Bitmap(fs);

            String filePath1 = bancnota.TestImageFront;
            FileStream fs1 = new FileStream(filePath1, FileMode.Open, FileAccess.Read);
            Bitmap imagineTest = new Bitmap(fs1);
             
           x= SURFDetect(imagineTest,imagineLuata);
           if (x >max)
           {
               //Console.WriteLine("Gasit bancnota fata" );
               //c++;
               max = x;
           }

           

           String filePath2 = bancnota.TestImageBack;
           FileStream fs2 = new FileStream(filePath2, FileMode.Open, FileAccess.Read);
           Bitmap imagineTestBack = new Bitmap(fs2);

           x1 = SURFDetect(imagineTestBack, imagineLuata);
           if (x1>max)
           {
               //Console.WriteLine("Gasit bancnota spate" + bancnota.Name);
               //c++;
               max = x1;
           }
            
             
             

           return max+"|"+bancnota.Name;  
                 
             
        }
        public static int SURFDetect(Bitmap imageTest, Bitmap imageTaken)
        {
            long time1 = DateTime.Now.Ticks;
            //int n = 0;

            int length = 0;

            SURFDetector surfParam = new SURFDetector(600, false);

            Image<Gray, Byte> modelImage = new Image<Gray, byte>(imageTest);
            //extract features from the object image
            ImageFeature[] modelFeatures = surfParam.DetectFeatures(modelImage, null);

            //Create a Feature Tracker
            Features2DTracker tracker = new Features2DTracker(modelFeatures);

            Image<Gray, Byte> observedImage = new Image<Gray, byte>(imageTaken);

            //observedImage._EqualizeHist();

            Stopwatch watch = Stopwatch.StartNew();
            // extract features from the observed image
            ImageFeature[] imageFeatures = surfParam.DetectFeatures(observedImage, null);

            Features2DTracker.MatchedImageFeature[] matchedFeatures = tracker.MatchFeature(imageFeatures, 2, 20);
            matchedFeatures = Features2DTracker.VoteForUniqueness(matchedFeatures, 0.8);
            matchedFeatures = Features2DTracker.VoteForSizeAndOrientation(matchedFeatures, 1.5, 20);
            HomographyMatrix homography = Features2DTracker.GetHomographyMatrixFromMatchedFeatures(matchedFeatures);
            watch.Stop();

            //Merge the object image and the observed image into one image for display
            //Image<Gray, Byte> res = modelImage.ConcateVertical(observedImage);

           /* #region draw lines between the matched features
            foreach (Features2DTracker.MatchedImageFeature matchedFeature in matchedFeatures)
            {
                PointF p = matchedFeature.ObservedFeature.KeyPoint.Point;
                p.Y += modelImage.Height;
                res.Draw(new LineSegment2DF(matchedFeature.SimilarFeatures[0].Feature.KeyPoint.Point, p), new Gray(0), 1);
            }
            #endregion
            */ 
             
            
            
             

            #region draw the project region on the image
            if (homography != null) 
            {  //draw a rectangle along the projected model
               /* Rectangle rect = modelImage.ROI;
                PointF[] pts = new PointF[] { 
               new PointF(rect.Left, rect.Bottom),
               new PointF(rect.Right, rect.Bottom),
               new PointF(rect.Right, rect.Top),
               new PointF(rect.Left, rect.Top)};
                homography.ProjectPoints(pts);

                for (int i = 0; i < pts.Length; i++)
                    pts[i].Y += modelImage.Height;

                res.DrawPolyline(Array.ConvertAll<PointF, Point>(pts, Point.Round), true, new Gray(255.0), 5);
                */ 
                 length = matchedFeatures.Length;
                
                //n = 1;
            }
            #endregion
            long time2 = DateTime.Now.Ticks;
            Console.WriteLine("Timp pentru SURF:" + (time2 - time1));
            //ImageViewer.Show(res, String.Format("Matched in {0} milliseconds", watch.ElapsedMilliseconds));
            return length;
            
        }

        /// <summary>
        /// Check if both the managed and unmanaged code are compiled for the same architecture
        /// </summary>
        /// <returns>Returns true if both the managed and unmanaged code are compiled for the same architecture</returns>
        static bool IsPlaformCompatable()
        {
            int clrBitness = Marshal.SizeOf(typeof(IntPtr)) * 8;
            if (clrBitness != CvInvoke.UnmanagedCodeBitness)
            {
                Debug.WriteLine(String.Format("Platform mismatched: CLR is {0} bit, C++ code is {1} bit."
                   + " Please consider recompiling the executable with the same platform target as C++ code.",
                   clrBitness, CvInvoke.UnmanagedCodeBitness));
                return false;
            }
            return true;
        }
        }
    }



