﻿using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Data;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Diagnostics;
using System.Windows.Forms;
using Emgu.CV;
using Emgu.CV.Structure;
using Emgu.Util;
using Emgu.CV.VideoSurveillance;
using Emgu.CV.OCR;
using Emgu.CV.GPU;
using Emgu.CV.UI;
using Emgu.CV.CvEnum;


namespace OpenCV_STD
{
    class ProcessEngine
    {
        #region JUNKS !!!!!!!!

        private Rectangle enlarge(Rectangle rect)
        {
            return new Rectangle(
                rect.Location.X * 2,
                rect.Location.Y * 2,
                rect.Width * 2,
                rect.Height * 2
                );
        }

        private Rectangle shrink(Rectangle rect)
        {
            return new Rectangle(
                rect.Location.X / 2,
                rect.Location.Y / 2,
                rect.Width / 2,
                rect.Height / 2
                );
        }

        #endregion

        public bool foundFace { set; get; }
        Rectangle[] faceRects;
        Rectangle firstFace = new Rectangle();

        Rectangle[] eyeRects;

        Rectangle searchBound;

        string faceFileName = "haarcascade_frontalface_default.xml";
        string eyeFileName = "haarcascade_eye.xml";

        // Haar Feature Classifier
        HaarCascade face;
        HaarCascade eye;


        // Get Results
        public Rectangle[] GetFaceRect()
        {
            return faceRects;
        }

        // Constructor
        public ProcessEngine()
        {
            foundFace = false;

            face = new HaarCascade(faceFileName);
            eye = new HaarCascade(eyeFileName);
            KalmanInit();
        }



        // Use Classifier to search for face in the image
        public Image<Bgr, Byte> FindFace( Image<Bgr, Byte> imageLarge)
        {
            searchBound = imageLarge.ROI;
//            imageLarge.ROI = new Rectangle();
            Image<Bgr, Byte> imageSmall = imageLarge.Resize(0.5, INTER.CV_INTER_NN);
                        
            Stopwatch watch;
            
            #region GPU
            if (GpuInvoke.HasCuda)
            {
                using (GpuCascadeClassifier face = new GpuCascadeClassifier(faceFileName))
                using (GpuCascadeClassifier eye = new GpuCascadeClassifier(eyeFileName))
                {
                    watch = Stopwatch.StartNew();
                    using (GpuImage<Bgr, Byte> gpuImage = new GpuImage<Bgr, byte>(imageSmall))
                    using (GpuImage<Gray, Byte> gpuGray = gpuImage.Convert<Gray, Byte>())
                    {
                        Rectangle[] faceRegion = face.DetectMultiScale(gpuGray, 1.1, 10, Size.Empty);
                        foreach (Rectangle f in faceRegion)
                        {
                            //draw the face detected in the 0th (gray) channel with blue color
                            imageSmall.Draw(f, new Bgr(Color.Blue), 2);
                            using (GpuImage<Gray, Byte> faceImg = gpuGray.GetSubRect(f))
                            {
                                //For some reason a clone is required.
                                //Might be a bug of GpuCascadeClassifier in opencv
                                using (GpuImage<Gray, Byte> clone = faceImg.Clone())
                                {
                                    Rectangle[] eyeRegion = eye.DetectMultiScale(clone, 1.1, 10, Size.Empty);

                                    foreach (Rectangle e in eyeRegion)
                                    {
                                        Rectangle eyeRect = e;
                                        eyeRect.Offset(f.X, f.Y);
                                        imageSmall.Draw(eyeRect, new Bgr(Color.Red), 2);
                                    }
                                }
                            }
                        }
                    }
                    watch.Stop();
                }
            }
            #endregion
            else
            {
                //Read the HaarCascade objects
                {
                    watch = Stopwatch.StartNew();
                    using (Image<Gray, Byte> graySmall = imageSmall.Convert<Gray, Byte>()) //Convert it to Grayscale
                    {
                        //normalizes brightness and increases contrast of the image
                        graySmall._EqualizeHist();

                        //Detect the faces  from the gray scale image and store the locations as rectangle
                        //The first dimensional is the channel
                        //The second dimension is the index of the rectangle in the specific channel
                        MCvAvgComp[] facesDetected = face.Detect(
                           graySmall,
                           1.2,
                           2,
                           Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING | Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT,
                           new Size(60, 60));

                        // clear faces
                        faceRects = new Rectangle[facesDetected.Length];
                        int pt = 0;

                        foreach (MCvAvgComp f in facesDetected)
                        {
                            // save faces     
                                // coordinary: global, large scale
                            faceRects[pt] = enlarge(f.rect);
                            faceRects[pt].Offset(searchBound.Location);
                            pt++;
                            
                            //draw the face detected in the 0th (gray) channel with blue color
                            imageSmall.Draw(f.rect, new Bgr(Color.Blue), 2);

                            //Set the region of interest on the faces
                            //graySmall.ROI = f.rect;

                            Rectangle faceRectLarge = enlarge(f.rect);

                            #region Eye Detection
                            //////////===========    New Eye Detection   ==============///////



                            //#region Luca Del Tongo Search Roi based on Face Metric Estimation --- based on empirical measuraments on a couple of photos ---  a really trivial heuristic

                            //// Our Region of interest where find eyes will start with a sample estimation using face metric
                            //Int32 yCoordStartSearchEyes = faceRectLarge.Top + (faceRectLarge.Height * 3 / 11);
                            //Point startingPointSearchEyes = new Point(faceRectLarge.X, yCoordStartSearchEyes);
                            //Point endingPointSearchEyes = new Point((faceRectLarge.X + faceRectLarge.Width), yCoordStartSearchEyes);

                            //Size searchEyesAreaSize = new Size(faceRectLarge.Width, (faceRectLarge.Height * 2 / 9));
                            //Point lowerEyesPointOptimized = new Point(faceRectLarge.X, yCoordStartSearchEyes + searchEyesAreaSize.Height);
                            //Size eyeAreaSize = new Size(faceRectLarge.Width / 2, (faceRectLarge.Height * 2 / 9));
                            //Point startingLeftEyePointOptimized = new Point(faceRectLarge.X + faceRectLarge.Width / 2, yCoordStartSearchEyes);

                            //Rectangle possibleROI_eyes = new Rectangle(startingPointSearchEyes, searchEyesAreaSize);
                            //Rectangle possibleROI_rightEye = new Rectangle(startingPointSearchEyes, eyeAreaSize);
                            //Rectangle possibleROI_leftEye = new Rectangle(startingLeftEyePointOptimized, eyeAreaSize);
                            ////possibleROI_rightEye.Offset(ROI.Location);
                            ////possibleROI_eyes.Offset(ROI.Location);
                            ////possibleROI_leftEye.Offset(ROI.Location);

                            //#endregion

                            //#region Drawing Utilities
                            //imageLarge.ROI = searchBound;
                            //// Let's draw our search area, first the upper line
                            //imageLarge.Draw(new LineSegment2D(startingPointSearchEyes, endingPointSearchEyes), new Bgr(Color.White), 3);
                            //// draw the bottom line
                            //imageLarge.Draw(new LineSegment2D(lowerEyesPointOptimized, new Point((lowerEyesPointOptimized.X + f.rect.Width), (yCoordStartSearchEyes + searchEyesAreaSize.Height))), new Bgr(Color.White), 3);
                            //// draw the eyes search vertical line
                            //imageLarge.Draw(new LineSegment2D(startingLeftEyePointOptimized, new Point(startingLeftEyePointOptimized.X, (yCoordStartSearchEyes + searchEyesAreaSize.Height))), new Bgr(Color.White), 3);

                            //MCvFont font = new MCvFont(FONT.CV_FONT_HERSHEY_TRIPLEX, 0.6d, 0.6d);
                            //imageLarge.Draw("Search Eyes Area", ref font, new Point((startingLeftEyePointOptimized.X - 80), (yCoordStartSearchEyes + searchEyesAreaSize.Height + 15)), new Bgr(Color.Yellow));
                            //imageLarge.Draw("Right Eye Area", ref font, new Point(startingPointSearchEyes.X, startingPointSearchEyes.Y - 10), new Bgr(Color.Yellow));
                            //imageLarge.Draw("Left Eye Area", ref font, new Point(startingLeftEyePointOptimized.X + searchEyesAreaSize.Height / 2, startingPointSearchEyes.Y - 10), new Bgr(Color.Yellow));
                            //#endregion


                            ////////////===========   New Eye Detection   ============///////////
                            //var grayLarge = imageLarge.Convert<Gray, Byte>();

                            //// Save Eye Rects
                            //eyeRects = new Rectangle[2];

                            //// left eye
                            //grayLarge.ROI = possibleROI_leftEye;
                            //MCvAvgComp[] leftEyeDetected = eye.Detect(
                            //    grayLarge,
                            //    1.1,
                            //    1,
                            //    HAAR_DETECTION_TYPE.DO_CANNY_PRUNING | HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT,
                            //    new Size(5, 5)
                            //    );
                            //foreach (var e in leftEyeDetected)
                            //{
                            //    var eyeRect = e.rect;
                            //    eyeRect.Offset(grayLarge.ROI.Location);
                                
                            //    //eyeRect.Location = new Point(eyeRect.Location.X / 2, eyeRect.Location.Y / 2);
                            //    //eyeRect.Height /= 2;
                            //    //eyeRect.Width /= 2;
                            //    imageSmall.ROI = new Rectangle();
                            //    imageSmall.Draw(shrink(eyeRect), new Bgr(Color.Yellow), 1);

                            //    eyeRect.Offset(searchBound.Location);
                            //    eyeRects[0] = eyeRect;
                            //}
                            //// right eye
                            //grayLarge.ROI = possibleROI_rightEye;
                            //MCvAvgComp[] rightEyeDetected = eye.Detect(
                            //    grayLarge,
                            //    1.1,
                            //    1,
                            //    HAAR_DETECTION_TYPE.DO_CANNY_PRUNING | HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT,
                            //    new Size(5, 5)
                            //    );
                            //foreach (var e in rightEyeDetected)
                            //{
                            //    var eyeRect = e.rect;
                            //    eyeRect.Offset(grayLarge.ROI.Location);
                            //    //eyeRect.Location = new Point(eyeRect.Location.X / 2, eyeRect.Location.Y / 2);
                            //    //eyeRect.Height /= 2;
                            //    //eyeRect.Width /= 2;
                            //    imageSmall.ROI = new Rectangle();
                            //    imageSmall.Draw(shrink(eyeRect), new Bgr(Color.Yellow), 1);

                            //    eyeRect.Offset( searchBound.Location);
                            //    eyeRects[1] = eyeRect;
                            //}




                            ////////////===========    Original  Eye Detection    ===========//////////////
                            ////MCvAvgComp[] eyesDetected = eye.Detect(
                            ////   gray,
                            ////   1.1,
                            ////   10,
                            ////   Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING,
                            ////   new Size(20, 20));
                            ////gray.ROI = Rectangle.Empty;

                            ////foreach (MCvAvgComp e in eyesDetected)
                            ////{
                            ////    Rectangle eyeRect = e.rect;
                            ////    eyeRect.Offset(f.rect.X, f.rect.Y);
                            ////    image.Draw(eyeRect, new Bgr(Color.Red), 2);
                            ////}
                            #endregion
                        }
                    }
                    watch.Stop();
                }
            }

            //display the image 
            //ImageViewer.Show(image, String.Format(
            //   "Completed face and eye detection using {0} in {1} milliseconds",
            //   GpuInvoke.HasCuda ? "GPU" : "CPU",
            //   watch.ElapsedMilliseconds));

            if (faceRects.Length == 0)
            {
                foundFace = false;
            }
            else
            {
                foundFace = true;
                firstFace = faceRects[0];
            }


            return imageSmall;
        }


        #region Kalman Filter

        public Kalman kal = new Kalman(4, 2, 0);

        private void KalmanInit()
        {
            // initial state
            Matrix<float> state = new Matrix<float>(new float[]
                {
                    0.0f, 0.0f, 0.0f, 0.0f
                });
            kal.CorrectedState = state;
            
            // utility
            kal.TransitionMatrix = new Matrix<float>(new float[,]
                {
                    {1f, 0, 1, 0},
                    {0, 1f, 0, 1},
                    {0, 0, 1, 0},
                    {0, 0, 0, 1}
                });

            kal.MeasurementMatrix = new Matrix<float>(new float[,]
                {
                    { 1, 0, 0, 0 },
                    { 0, 1, 0, 0 }
                });
            kal.MeasurementMatrix.SetIdentity();

            // Noise
            kal.ProcessNoiseCovariance = new Matrix<float>(4, 4);
            kal.ProcessNoiseCovariance.SetIdentity(new MCvScalar(1.0e-2));
            kal.MeasurementNoiseCovariance = new Matrix<float>(2, 2);
            kal.MeasurementNoiseCovariance.SetIdentity(new MCvScalar(1.0e-1));
            kal.ErrorCovariancePost = new Matrix<float>(4, 4);
            kal.ErrorCovariancePost.SetIdentity();

        }


        public PointF[] filterPoints(PointF pt)
        {
            Matrix<float> state = new Matrix<float>(4, 1);
            state[0, 0] = state[1, 0] = state[2, 0] = state[3, 0] = 0f;

            state[0, 0] = pt.X; state[1, 0] = pt.Y;
            Matrix<float> prediction = kal.Predict();
            PointF predictPoint = new PointF(prediction[0, 0], prediction[1, 0]);

            Matrix<float> measurement = kal.MeasurementMatrix*state;
            PointF measurePoint = new PointF(measurement[0, 0], measurement[1, 0]);

            Matrix<float> estimated = kal.Correct(measurement);
            PointF estimatedPoint = new PointF(estimated[0,0], estimated[1,0]);

            PointF[] results = new PointF[2];
            results[0] = predictPoint;
            results[1] = estimatedPoint;
            return results;
        }


        #endregion

        public Image<Bgr, Byte> TrackFace(Image<Bgr, Byte> image)
        {
            if (faceRects.Length != 0)
            {
                return image;
            }
            else
            {
                return FindFace(image);
            }
        }

        #region Interface

        public Rectangle GetFirstFace()
        {
            return firstFace;
        }

        public Rectangle[] GetEyeRects()
        {
            return eyeRects;
        }

        #endregion

    }
}
