﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.ComponentModel;
using Emgu.CV;
using Emgu.CV.Structure;
using System.Windows.Forms;
using System.Drawing;
using System.Threading;

namespace ThreeDI.IP
{
    public class DetectionManager
    {
        #region [Members]

        private const double _ImageWidth = 320;
        private const double _ImageHeight = 240;

        //this is an approximation, for FOV = 45 degrees, 320x240 pixels
        private const double _HorizontalGradesPerPixel = 53.0 / _ImageWidth;
        //private const double _VerticalGradesPerPixel = 40.0 / _ImageHeight; //for now it is useless

        private const double _HeadWidth = 0.12; //supposing head's width is 12 cm 
        private const double _PI = 3.141592654 / 180;

        private Capture _capture;

        HaarCascade _FaceTrainingSet;
        //HaarCascade _EyeTrainingSet;

        #endregion

        #region [Singeleton]

        private static DetectionManager _Manager;

        public static DetectionManager Instance
        {
            get
            {
                if (_Manager == null)
                    _Manager = new DetectionManager();
                return _Manager;
            }
        }

        #endregion

        #region [Constructor]

        private DetectionManager()
        {
            //Initialize the capture device
            _capture = new Capture(0);
            _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_WIDTH, _ImageWidth);
            _capture.SetCaptureProperty(Emgu.CV.CvEnum.CAP_PROP.CV_CAP_PROP_FRAME_HEIGHT, _ImageHeight);


            //Read the HaarCascade objects
            _FaceTrainingSet = new HaarCascade(@"Cascades\haarcascade_frontalface_alt_tree.xml");
            //_EyeTrainingSet = new HaarCascade(@"Cascades\haarcascade_mcs_lefteye.xml");

        }

        #endregion

        #region [ Tracking Process ]

        public TrackingInfo ProcessFrame()
        {
            Image<Bgr, Byte> frame = _capture.QueryFrame(); //capture frame
            Image<Gray, Byte> gray = frame.Convert<Gray, Byte>(); //Convert it to Grayscale

            var info = new TrackingInfo();

            //normalizes brightness and increases contrast of the image
            gray._EqualizeHist();

            //Detect the faces  from the gray scale image and store the locations as rectangle
            //The first dimensional is the channel
            //The second dimension is the index of the rectangle in the specific channel
            MCvAvgComp[][] facesDetected = gray.DetectHaarCascade(_FaceTrainingSet,  // le f 2D arr? ana msh baraga3 the biggest?
               1.1,
               10,
               Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.DO_CANNY_PRUNING | Emgu.CV.CvEnum.HAAR_DETECTION_TYPE.FIND_BIGGEST_OBJECT,
               new Size(20, 20));

            if (facesDetected.Length > 0)
            {
                if (facesDetected[0].Length > 0)
                {
                    MCvAvgComp face = facesDetected[0][0];
                    info.FaceRectangle = face.rect;

                    // calculating Z-Coordinate using camera angle (focus angle) and tangent operation
                    double angle = (face.rect.Width) * _HorizontalGradesPerPixel * _PI;
                    info.HeadDistance = (_HeadWidth / 2) / (Math.Tan(angle / 2)); //in meters
                    info.HeadDistance = 1 + 5 * info.HeadDistance * 100;

                }
            }
            return info;
        }

        #endregion
    }
}
