﻿using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using FinalYear.Helpers;
using FinalYear.OpticalFlow.Properties;
using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Linq;
using System.Threading;

namespace FinalYear.OpticalFlow.Classes
{
    /// <summary>
    /// Navigator class that binds to the main form
    /// </summary>
    public class Navigator : IDisposable
    {
        #region Private Variables

        private int m_BlockSize;
        private Capture m_Capture;
        private Image<Gray, Byte> m_CurrentPyrBufferParam;
        private double m_KMean;
        private int m_MaxFeatureCount;
        private double m_MinDistance;
        private int m_MinimumCollisionMagnitude;
        private MCvTermCriteria m_OpticalFlowTerminationCriteria;
        private Image<Gray, Byte> m_PreviousPyrBufferParam;
        private double m_QualityLevel;
        private MCvTermCriteria m_SubCornerTerminationCriteria;
        private byte[] m_TrackingStatus;
        private bool useSonar;
        private string translateDirection = string.Empty;

        private CollisionHistogram currentCollisionHistogram { get; set; }

        private List<LineEquation> equations { get; set; }

        private int frameHeight { get; set; }

        private int frameWidthCenter { get; set; }

        private OpticalFlowStore opticalFlowStoreData { get; set; }

        private SonarSensor sensor { get; set; }

        private CollisionWarning soundWarning { get; set; }

        private OpticalFlowGPU flow { get; set; }

        #endregion Private Variables

        #region Form Constructor

        /// <summary>
        /// Initializes a new instance of the <see cref="Navigator" /> class.
        /// </summary>
        public Navigator()
        {
            // flow = new OpticalFlowGPU();
            soundWarning = new CollisionWarning();
            sensor = new SonarSensor();
            currentCollisionHistogram = new CollisionHistogram();
            m_Capture = new Capture(0);
            m_MaxFeatureCount = Settings.Default.MaxFeatureCount;
            m_BlockSize = Settings.Default.BlockSize;
            m_QualityLevel = Settings.Default.QualityLevel;
            m_MinDistance = Settings.Default.MinDistance;
            m_MinimumCollisionMagnitude = Settings.Default.MinimumCollisionMagnitude;

            m_SubCornerTerminationCriteria.max_iter = 20;
            m_SubCornerTerminationCriteria.epsilon = 0.1;
            m_SubCornerTerminationCriteria.type = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_EPS | Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;

            m_OpticalFlowTerminationCriteria.max_iter = 20;
            m_OpticalFlowTerminationCriteria.epsilon = 0.3;
            m_OpticalFlowTerminationCriteria.type = Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_EPS | Emgu.CV.CvEnum.TERMCRIT.CV_TERMCRIT_ITER;
        }

        #endregion Form Constructor

        #region Public Variables

        /// <summary>
        /// Occurs when [changed].
        /// </summary>
        public event EventHandler Changed;

        /// <summary>
        /// Gets or sets the size of the block.
        /// </summary>
        /// <value>
        /// The size of the block.
        /// </value>
        public int BlockSize
        {
            get { return m_BlockSize; }
            set
            {
                if (value != m_BlockSize)
                {
                    m_BlockSize = value;
                    RaiseChangedEvent();
                }
            }
        }

        /// <summary>
        /// Gets the collision data.
        /// </summary>
        /// <value>
        /// The collision data.
        /// </value>
        public OpticalFlowStore OpticalFlowStoreData
        {
            get
            {
                if (this.opticalFlowStoreData == null)
                {
                    return new OpticalFlowStore(MinimumCollisionMagnitude);
                }
                return this.opticalFlowStoreData;
            }
        }

        /// <summary>
        /// Gets the current collision histogram.
        /// </summary>
        /// <value>
        /// The current collision histogram.
        /// </value>
        public CollisionHistogram CurrentCollisionHistogram
        {
            get
            {
                if (this.currentCollisionHistogram == null)
                {
                    return new CollisionHistogram();
                }
                return this.currentCollisionHistogram;
            }
        }

        /// <summary>
        /// Gets the current found features.
        /// </summary>
        /// <value>
        /// The current found features.
        /// </value>
        public PointF[] CurrentFoundFeatures { get; private set; }

        /// <summary>
        /// Gets the current found features count.
        /// </summary>
        /// <value>
        /// The current found features count.
        /// </value>
        public int CurrentFoundFeaturesCount
        {
            get
            {
                if (this.CurrentFoundFeatures == null)
                {
                    return 0;
                }
                return this.CurrentFoundFeatures.Length;
            }
        }

        /// <summary>
        /// Gets the current gray image.
        /// </summary>
        /// <value>
        /// The current gray image.
        /// </value>
        public Image<Gray, Byte> CurrentGrayImage { get; private set; }

        /// <summary>
        /// Gets the current image.
        /// </summary>
        /// <value>
        /// The current image.
        /// </value>
        public Image<Bgr, Byte> CurrentImage { get; private set; }

        /// <summary>
        /// Gets the flow image.
        /// </summary>
        /// <value>
        /// The flow image.
        /// </value>
        public Image<Bgr, Byte> FlowImage { get; private set; }

        /// <summary>
        /// Gets or sets the K mean.
        /// </summary>
        /// <value>
        /// The K mean.
        /// </value>
        public double KMean
        {
            get { return m_KMean; }
            set
            {
                if (value != m_KMean)
                {
                    m_KMean = value;
                    RaiseChangedEvent();
                }
            }
        }

        /// <summary>
        /// Gets or sets the max feature count.
        /// </summary>
        /// <value>
        /// The max feature count.
        /// </value>
        public int MaxFeatureCount
        {
            get { return m_MaxFeatureCount; }
            set
            {
                if (value != m_MaxFeatureCount)
                {
                    m_MaxFeatureCount = value;
                    RaiseChangedEvent();
                }
            }
        }

        /// <summary>
        /// Gets or sets the min distance.
        /// </summary>
        /// <value>
        /// The min distance.
        /// </value>
        public double MinDistance
        {
            get { return m_MinDistance; }
            set
            {
                if (value != m_MinDistance)
                {
                    m_MinDistance = value;
                    RaiseChangedEvent();
                }
            }
        }

        public int MinimumCollisionMagnitude
        {
            get
            {
                return m_MinimumCollisionMagnitude;
            }
            set
            {
                m_MinimumCollisionMagnitude = value;

                //RaiseChangedEvent();
            }
        }

        /// <summary>
        /// Gets the not tracked features count.
        /// </summary>
        /// <value>
        /// The not tracked features count.
        /// </value>
        public int NotTrackedFeaturesCount { get; private set; }

        public PointF[] PreviousFoundFeatures { get; private set; }

        /// <summary>
        /// Gets the previous found features count.
        /// </summary>
        /// <value>
        /// The previous found features count.
        /// </value>
        public int PreviousFoundFeaturesCount
        {
            get
            {
                if (this.PreviousFoundFeatures == null)
                {
                    return 0;
                }
                return this.PreviousFoundFeatures.Length;
            }
        }

        public Image<Gray, Byte> PreviousGrayImage { get; private set; }

        /// <summary>
        /// Gets or sets the quality level.
        /// </summary>
        /// <value>
        /// The quality level.
        /// </value>
        public double QualityLevel
        {
            get { return m_QualityLevel; }
            set
            {
                if (value != m_QualityLevel)
                {
                    m_QualityLevel = value;
                    RaiseChangedEvent();
                }
            }
        }

        public PointF[] TrackedFeatures { get; private set; }

        /// <summary>
        /// Gets the tracked features count.
        /// </summary>
        /// <value>
        /// The tracked features count.
        /// </value>
        public int TrackedFeaturesCount
        {
            get
            {
                if (this.TrackedFeatures == null)
                {
                    return 0;
                }
                return this.TrackedFeatures.Length;
            }
        }

        /// <summary>
        /// Gets the collision data.
        /// </summary>
        /// <value>
        /// The collision data.
        /// </value>
        public string TranslateDirection
        {
            get
            {
                if (string.IsNullOrEmpty(this.translateDirection))
                {
                    return string.Empty;
                }
                return this.translateDirection;
            }
            set
            {
                //if (value != this.translateDirection)
                //{
                this.translateDirection = value;

                ///();
                //}
            }
        }

        public bool UseSonar
        {
            get { return useSonar; }
            set
            {
                useSonar = value;
                RaiseChangedEvent();
            }
        }

        #endregion Public Variables

        public void Dispose()
        {
            Settings.Default.MaxFeatureCount = this.MaxFeatureCount;
            Settings.Default.BlockSize = this.BlockSize;
            Settings.Default.QualityLevel = this.QualityLevel;
            Settings.Default.MinDistance = this.MinDistance;
            Settings.Default.MinimumCollisionMagnitude = this.MinimumCollisionMagnitude;
            if (m_Capture != null)
            {
                m_Capture.Dispose();
            }

            m_CurrentPyrBufferParam.Dispose();
            m_PreviousPyrBufferParam.Dispose();
        }

        /// <summary>
        /// Processes the frame.
        /// </summary>
        public void ProcessFrame()
        {
            this.CurrentImage = m_Capture.QueryFrame().Rotate(90, new Bgr());
            this.FlowImage = this.CurrentImage.Clone();

            this.PreviousGrayImage = this.CurrentGrayImage;
            this.CurrentGrayImage = this.CurrentImage.Convert<Gray, Byte>();
            this.frameWidthCenter = FlowImage.Size.Width / 2;
            this.frameHeight = FlowImage.Size.Height;

            PointF[][] foundFeaturesInChannels = this.CurrentGrayImage.GoodFeaturesToTrack(this.MaxFeatureCount, this.QualityLevel, this.MinDistance, this.BlockSize);

            //if (this.PreviousGrayImage != null)
            //{
            //    flow.GoodFeaturesToTrack(this.PreviousGrayImage, this.CurrentGrayImage, this.MaxFeatureCount, this.QualityLevel, this.MinDistance);

            //}

            // Next we find refine the location of the found features
            this.CurrentGrayImage.FindCornerSubPix(foundFeaturesInChannels, new Size(Constants.WINDOWSIZE, Constants.WINDOWSIZE), new Size(-1, -1), m_SubCornerTerminationCriteria);
            this.PreviousFoundFeatures = this.CurrentFoundFeatures;
            this.CurrentFoundFeatures = foundFeaturesInChannels[0];
            DrawCenterAndGroundLines();

            //DrawFoundFeaturesMarkers();

            if (this.PreviousGrayImage == null)
            {
                m_PreviousPyrBufferParam = new Image<Gray, byte>(this.CurrentImage.Width + 8, this.CurrentImage.Height / 3);
                m_CurrentPyrBufferParam = new Image<Gray, byte>(this.CurrentImage.Width + 8, this.CurrentImage.Height / 3);
            }
            else
            {
                ComputeOpticalFlow();

                //DrawTrackedFeaturesMarkers();
                DrawFlowVectors();
                CalculateFlowByRegion();
                CalculateBalanceStrategy();
            }
        }

        /// <summary>
        /// Calculates the flow by region.
        /// </summary>
        private void CalculateFlowByRegion()
        {
            this.opticalFlowStoreData = new OpticalFlowStore(this.MinimumCollisionMagnitude);

            for (int i = 0; i < this.TrackedFeatures.Length; i++)
            {
                // if the features have been mapped from frames
                if (m_TrackingStatus[i] == 1)
                {
                    PointF originalPoint = this.PreviousFoundFeatures[i];
                    PointF matchedPoint = this.TrackedFeatures[i];

                    GroundCoordinate measurer = new GroundCoordinate(originalPoint, matchedPoint);

                    // check if the features are within twice of the minimum collision magnitude
                    if (measurer.hypotenuse < m_MinimumCollisionMagnitude * 1.5)
                    {
                        // if the coordinates lie within the center of the screen, then calculate the TTC
                        if ((originalPoint.Y > frameHeight * 2 / 7) && (originalPoint.X > (frameWidthCenter * 2) * 2 / 7) && (originalPoint.Y < frameHeight * 5 / 7) && (originalPoint.X < (frameWidthCenter * 2) * 5 / 7))
                        {
                            opticalFlowStoreData.collisionCount++;
                            opticalFlowStoreData.collisionTime += (originalPoint.Y - matchedPoint.Y);

                            // Points that are situated closer to the camera generate larger optical flow vectors than
                            // that with points thats are farther.
                            this.opticalFlowStoreData.currentCenterDistances.Add(measurer);
                        }

                        // if the coordinates lie within the left of the screen
                        if ((originalPoint.Y > frameHeight * 2 / 7) && (originalPoint.X < frameWidthCenter))
                        {
                            measurer.currentRegion = SCREENREGION.LEFT;
                            opticalFlowStoreData.leftGroundCoordinates.Add(measurer);
                        }

                        if ((originalPoint.Y < frameHeight * 5 / 7) && (originalPoint.X > frameWidthCenter))
                        {
                            measurer.currentRegion = SCREENREGION.RIGHT;
                            opticalFlowStoreData.rightGroundCoordinates.Add(measurer);
                        }
                    }
                }
            }

            opticalFlowStoreData.ComputeGroundCount();

            //m_collisionData.currentCenterDistances.HarvestFOE();

            // get a mean for the collision
            opticalFlowStoreData.totalCollisionTime += opticalFlowStoreData.collisionTime / opticalFlowStoreData.collisionCount;
        }

        /// <summary>
        /// Calculates the balance strategy.
        /// This is calculated based on the optical flow readings when
        /// either the user is or the obstacles or both are in motion
        /// </summary>
        private void CalculateBalanceStrategy()
        {
            OpticalFlowStore collideLocal = this.OpticalFlowStoreData;

            // check if the sonar sensor is enabled
            if ((useSonar == true) && ((collideLocal != null)))
            {
                ComputeCumulativeCollision(collideLocal);
            }
            else
            {
                ComputeCumulativeCollision(collideLocal);
            }

            //ComputeCumulativeCollision(collideLocal);

            if (currentCollisionHistogram != null)
            {
                if (this.opticalFlowStoreData.currentCenterDistances.CheckCenterCollision() && (currentCollisionHistogram.CumulativeCollision.Count() > 4) && (currentCollisionHistogram.CheckTimeBetweenCollisions()))
                {
                    this.translateDirection = string.Empty;
                    if ((sensor.ObstacleInRange() == true) || (useSonar == false))
                    {
                        // execute balance strategy code
                        double leftFlow = this.OpticalFlowStoreData.left.flow;
                        double rightFlow = this.OpticalFlowStoreData.right.flow;
                        double flowDifference = leftFlow - rightFlow;

                        //if ((flowDifference >= -1.5) && (flowDifference <= 1.5))
                        //{
                        //    //if the difference between the left and right flow magnitudes are
                        //    //between +1 and -1 then assume it is either a corridor being traversed
                        //    //or at a dead end
                        //    this.translateDirection = SCREENREGION.BALANCING.ToString();
                        //}
                        if (leftFlow > rightFlow)
                        {
                            // translate right
                            this.translateDirection = SCREENREGION.RIGHT.ToString();
                        }
                        else if (leftFlow < rightFlow)
                        {
                            // turn left
                            this.translateDirection = SCREENREGION.LEFT.ToString();
                        }

                        // if there is a collision, then swap the current time to the previous collision time
                        // notify the user of the impending collision and then put the main thread on sleep
                        // this is to compensate for the change in the user's direction.
                        this.currentCollisionHistogram.PreviousCollisionTime = this.currentCollisionHistogram.CurrentCollisionTime;
                        soundWarning.PlayWarning(EnumUtil.ParseEnum<SCREENREGION>(this.translateDirection));
                        currentCollisionHistogram.SonarCollision = false;

                        // sleep the current thread so there would not be any collision
                        // warnings when the user is turning
                        Thread.Sleep(100);
                    }
                    else
                    {
                        this.translateDirection = string.Empty;
                    }
                }
            }
            else
            {
                this.translateDirection = string.Empty;
            }
        }

        // <summary>
        /// Computes the collision historical.
        /// check if the collision value at that time is greater than the defined maximum collision coefficient
        /// or check if it is less than the minimum collision coefficient</summary>
        /// else if this does not work reset the collision histogram
        /// <param name="opticalFlowStoreData">The collide local.</param>
        private void ComputeCumulativeCollision(OpticalFlowStore opticalFlowStoreData)
        {
            FocusofExpansion.Calculate(PreviousFoundFeatures, TrackedFeatures);
            if (((opticalFlowStoreData.totalCollisionTime >= Constants.MAXIMUMCOLLISIONCOEFFICIENT) || (opticalFlowStoreData.totalCollisionTime <= Constants.MINIMUMCOLLISIONCOEFFICIENT)))
            {
                currentCollisionHistogram.CumulativeCollision.Add(opticalFlowStoreData.totalCollisionTime);

                if (useSonar == true)
                {
                    currentCollisionHistogram.SonarCollision = true;
                    currentCollisionHistogram.SonarCollisionDistance = sensor.distance;
                }
                else
                {
                    currentCollisionHistogram.SonarCollision = false;
                    currentCollisionHistogram.SonarCollisionDistance = string.Empty;
                }

                currentCollisionHistogram.CurrentCollisionTime = DateTime.Now;
            }
            else
            {
                currentCollisionHistogram.SonarCollision = false;
                currentCollisionHistogram.CumulativeCollision = new List<float>();
                currentCollisionHistogram.SonarCollisionDistance = string.Empty;
            }
        }

        /// <summary>
        /// Computes the optical flow using the Lucas and Kanade Algorithm
        /// </summary>
        private void ComputeOpticalFlow()
        {
            PointF[] trackedFeatures;
            float[] trackedErrors;

            LKFLOW_TYPE flags = LKFLOW_TYPE.DEFAULT;

            // reuse the tracked frame
            if (this.TrackedFeatures != null)
            {
                // existing prefilled pyramid
                m_PreviousPyrBufferParam = m_CurrentPyrBufferParam;
                flags = LKFLOW_TYPE.CV_LKFLOW_PYR_A_READY;
            }

            Emgu.CV.OpticalFlow.PyrLK(
                this.PreviousGrayImage,
                this.CurrentGrayImage,
                m_PreviousPyrBufferParam,
                m_CurrentPyrBufferParam,
                this.PreviousFoundFeatures,
                new Size(Constants.WINDOWSIZE, Constants.WINDOWSIZE),
                5, // level
                m_OpticalFlowTerminationCriteria,
                flags,
                out trackedFeatures,
                out m_TrackingStatus,
                out trackedErrors);

            this.TrackedFeatures = trackedFeatures;

            int notTrackedFeatures = 0;
            for (int i = 0; i < m_TrackingStatus.Length; i++)
            {
                if (m_TrackingStatus[i] == 0)
                {
                    notTrackedFeatures++;
                }
            }
            this.NotTrackedFeaturesCount = notTrackedFeatures;
        }

        /// <summary>
        /// Draws the center and ground lines.
        /// </summary>
        private void DrawCenterAndGroundLines()
        {
            LineSegment2DF lineSegment = new LineSegment2DF(new PointF() { X = frameWidthCenter, Y = 0 }, new PointF() { X = frameWidthCenter, Y = frameHeight });
            this.FlowImage.Draw(lineSegment, new Bgr(Color.Yellow), 1);

            LineSegment2DF lineSegmentRight = new LineSegment2DF(new PointF() { X = (frameWidthCenter * 2) * 5 / 7, Y = 0 }, new PointF() { X = (frameWidthCenter * 2) * 5 / 7, Y = frameHeight });
            LineSegment2DF lineSegmentLeft = new LineSegment2DF(new PointF() { X = (frameWidthCenter * 2) * 2 / 7, Y = 0 }, new PointF() { X = (frameWidthCenter * 2) * 2 / 7, Y = frameHeight });
            this.FlowImage.Draw(lineSegmentRight, new Bgr(Color.Yellow), 1);
            this.FlowImage.Draw(lineSegmentLeft, new Bgr(Color.Yellow), 1);
        }

        /// <summary>
        /// Draws the flow vectors.
        /// </summary>
        private void DrawFlowVectors()
        {
            for (int i = 0; i < this.TrackedFeatures.Length; i++)
            {
                if (m_TrackingStatus[i] == 1)
                {
                    LineSegment2DF lineSegment = new LineSegment2DF(this.PreviousFoundFeatures[i], this.TrackedFeatures[i]);
                    Debug.WriteLine("Line Length" + lineSegment.Length);
                    this.FlowImage.Draw(lineSegment, new Bgr(Color.Red), 1);
                    CircleF circle = new CircleF(this.TrackedFeatures[i], 2.0f);
                    this.FlowImage.Draw(circle, new Bgr(Color.Red), 1);
                }
            }
        }

        /// <summary>
        /// Draws the found features markers.
        /// </summary>
        private void DrawFoundFeaturesMarkers()
        {
            foreach (PointF foundFeature in this.CurrentFoundFeatures)
            {
                CircleF circle = new CircleF(foundFeature, 3.0f);
                this.CurrentImage.Draw(circle, new Bgr(Color.Lime), 2);
            }
        }

        /// <summary>
        /// Draws the tracked features markers.
        /// </summary>
        private void DrawTrackedFeaturesMarkers()
        {
            this.equations = new List<LineEquation>();
            for (int i = 0; i < this.TrackedFeatures.Length; i++)
            {
                if (m_TrackingStatus[i] == 1)
                {
                    PointF originalPoint = this.PreviousFoundFeatures[i];
                    PointF matchedPoint = this.TrackedFeatures[i];
                    CircleF circle = new CircleF(this.TrackedFeatures[i], 3.0f);
                    this.CurrentImage.Draw(circle, new Bgr(Color.Red), 2);
                }
            }
        }

        /// <summary>
        /// Raises the changed event.
        /// </summary>
        private void RaiseChangedEvent()
        {
            EventHandler handler = this.Changed;
            if (handler != null)
            {
                handler(this, EventArgs.Empty);
            }
        }
    }
}