﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Linq;
using System.Text;
using System.Timers;
using Emgu.CV;
using Emgu.CV.Structure;
using Emgu.CV.UI;
using FinalYear.Helpers;

namespace FinalYear.SixtyFour
{
    public class ProcessFlow
    {
      
        private static EfficiencyTimer timer;



        /// <summary>
        /// Processes the lucas kanade.
        /// </summary>
        /// <param name="currentFrame">The current frame.</param>
        /// <param name="previousFrame">The previous frame.</param>
        /// <returns>
        /// The previous and the current frame in ImageBox format
        /// </returns>
        public static ImageBox[] ProcessLucasKanade(Image<Gray, Byte> currentFrame, Image<Gray, Byte> previousFrame, Image<Bgr, byte> currentColorFrame, Parameters values = null)
        {
            Stopwatch currentTime = new Stopwatch();
            ImageBox[] arrImages = new ImageBox[2];
            PointF[] nextFeature;
            byte[] status;
            float[] trackError;
            int numberOfMatchedFeatures = 0;

            Image<Gray, Byte> previousGrayFrame = previousFrame;//.Convert<Gray, byte>();
            Image<Gray, Byte> currentGrayFrame = currentFrame;//.Convert<Gray, byte>(); 
            currentColorFrame = currentGrayFrame.Convert<Bgr, byte>();
           
            System.Drawing.Size cvImageSize = previousGrayFrame.Size;
            timer = new EfficiencyTimer(Procedure.ProcessLucasKanade);
            var goodFeatures = previousGrayFrame.GoodFeaturesToTrack(Constants.NUMBEROFFEATURES, 0.1, .5 , 5);

            //previousGrayFrame.FindCornerSubPix(goodFeatures, new Size(15, 15), new Size(-1, -1), new MCvTermCriteria(20, 0.0001));
           
            // Using the Pyramidial Lucas and Kanade Algorithm
            OpticalFlow.PyrLK(previousGrayFrame, currentGrayFrame, goodFeatures[0], cvImageSize, 2, new MCvTermCriteria(20, 0.3d), out nextFeature, out status, out trackError);
            timer.TerminateTimer();
            System.Diagnostics.Debug.WriteLine(currentTime.Elapsed);

            MCvFont curr = new MCvFont(Emgu.CV.CvEnum.FONT.CV_FONT_HERSHEY_PLAIN, .8, .2);

            timer = new EfficiencyTimer(Procedure.RenderFeaturePoints);

            // draw all the good features being tracked
            int k = 0;

            //if ( goodFeatures[0].Count() > 0 && nextFeature.Count() > 0)
            //{
            //    using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\" + Guid.NewGuid() + ".txt", false))
            //    {
            //        for (int i = 0; i < goodFeatures[0].Count(); i++)
            //        {


            //            file.WriteLine(string.Format("{0}|{1}", goodFeatures[0][i], nextFeature[i]));
            //        }

            //        file.Close();
            //    }

            //}
           

            // normalise the features from fram n and n + 1
            if (status.Count() > 0)
            {
                for (int i = k = 0; i < Constants.NUMBEROFFEATURES; i++)
                {
                    if (i < status.Count())
                    {
                        if (status[i] == 0)
                        {
                            continue;
                        }
                        else
                        {
                            goodFeatures[0][k] = goodFeatures[0][i];
                            nextFeature[k] = nextFeature[i];
                            k++;
                        }
                    }                    
                }
            }
          

            numberOfMatchedFeatures = k;
           // FocusOfExpansion(goodFeatures, NextFeature);

            timer.TerminateTimer();

           // calculate the flow vectors
            int topCount = 0,  rightGroundCount = 0, leftGroundCount = 0,
                 collisionCount = 0;
            double topCountX = 0, topCountY = 0, totalLeftGroundCountX = 0, totalLeftGroundCountY = 0, totalRightGroundCountX = 0, totalRightGroundCountY = 0, totalCollisionTime = 0;
            int countflow = 0;
            double sumflow = 0;
            double meanflow = 0;
            int tempCount = 0;

            // filter out the good features and then calculate the euclidean distance from the 
            // feature in frame n and n+1
            for (int i = 0; i < numberOfMatchedFeatures; i++)
            {
                double x1 = goodFeatures[0][i].X;
                double x2 = nextFeature[i].X;
                double y1 = goodFeatures[0][i].Y;
                double y2 = nextFeature[i].Y;
                double hypotenuse = Math.Sqrt(Math.Pow((y1 - y2), 2.0) + Math.Pow((x1 - x2), 2.0));

                // if the hypotenuse is not too large, do this to weed out unnecessary feature matches
                // because the idea of OF is that there would not be a huge difference between the points
                // of a particular feature in frame n and n+1
                if (hypotenuse < 4)
                {
                    // draw a circle to each of the features that have been identified
                    // track the features that are not fixed or unreliable 
                    currentColorFrame.Draw(new CircleF() { Center = goodFeatures[0][i], Radius = 2 }, new Bgr(Color.Yellow), 2); //new Bgr(Color.Red), 1);
                    goodFeatures[0][tempCount] = goodFeatures[0][i];
                    nextFeature[tempCount] = nextFeature[i];

                    tempCount++;
                }
            }

            // equate the total number of features found from this iteration as the total number of features found
            numberOfMatchedFeatures = tempCount;

            //using (System.IO.StreamWriter file = new System.IO.StreamWriter(@"C:\"+Guid.NewGuid()+".txt",false))
            //{
                // iterate once more through all the features, compute the euclidean distance
                // if the distance 
                for (int i = 0; i < numberOfMatchedFeatures; i++)
                {
                    double x1 = goodFeatures[0][i].X;
                    double x2 = nextFeature[i].X;
                    double y1 = goodFeatures[0][i].Y;
                    double y2 = nextFeature[i].Y;

                    // calculate the euclidean distance between the two matched features
                    double hypotenuse = Math.Sqrt(Math.Pow((y1 - y2), 2.0) + Math.Pow((x1 - x2), 2.0));

                    //file.WriteLine(string.Format("P1 ({0},{1}) P2 ({2},{3})", x1, y1, x2, y2));
                    countflow++;
                    sumflow += hypotenuse;
                }

               // file.Close();
            //}

           
            meanflow = sumflow / countflow;
            tempCount = 0;



            for (int i = 0; i < numberOfMatchedFeatures; i++)
            {
                double x1 = goodFeatures[0][i].X;
                double x2 = nextFeature[i].X;
                double y1 = goodFeatures[0][i].Y;
                double y2 = nextFeature[i].Y;
                double hypotenuse = Math.Sqrt(Math.Pow((y1 - y2), 2.0) + Math.Pow((x1 - x2), 2.0));

                if (hypotenuse / meanflow < 3.0)
                {
                    goodFeatures[0][tempCount] = goodFeatures[0][i];
                    nextFeature[tempCount] = nextFeature[i];

                    tempCount++;
                }
            }

            numberOfMatchedFeatures = tempCount;

            // get the current heights and widths of the image frame and scale accordingly
            int frameAdjustedHeight = currentFrame.Size.Height * 2 / 7;
            int frameAdjustedWidth = currentFrame.Size.Width / 2;
            for (int i = 0; i < numberOfMatchedFeatures; i++)
            {
                double x1 = goodFeatures[0][i].X;
                double x2 = nextFeature[i].X;
                double y1 = goodFeatures[0][i].Y;
                double y2 = nextFeature[i].Y;

                if (y1 < frameAdjustedHeight)
                {
                    topCount++;
                    topCountY += (y1 - y2);
                    topCountX += (x1 - x2);
                }

                if ((y1 > frameAdjustedHeight) && (x1 < frameAdjustedWidth))
                {
                    leftGroundCount++;
                    totalLeftGroundCountX += (x1 - x2);
                    totalLeftGroundCountY += (y1 - y2);
                }

                if ((y1 > frameAdjustedHeight) && (x1 > frameAdjustedWidth))
                {
                    rightGroundCount++;
                    totalRightGroundCountX += (x1 - x2);
                    totalRightGroundCountY += (y1 - y2);
                }

                // collision time for the center area
                if ((y1 > currentFrame.Size.Height * 2 / 7) && (x1 > currentFrame.Size.Width * 2 / 7) && (y1 < currentFrame.Size.Height * 5 / 7) && (x1 < currentFrame.Size.Width * 5 / 7))
                {
                    // if the coordinates lie within the center of the screen, then calculate the TTC
                    // TODO: The depth measure? or assume that the objects lie within a specific range of the camera
                    // increment the number of collisions
                    collisionCount++;
                    totalCollisionTime += (y1 - y2);
                }
            }

            for (int i = 0; i < numberOfMatchedFeatures; i++)
            {
                double x1 = goodFeatures[0][i].X;
                double x2 = nextFeature[i].X;
                double y1 = goodFeatures[0][i].Y;
                double y2 = nextFeature[i].Y;
                double angle;
                angle = Math.Atan2((double)y1 - y2, (double)x1 - x2);
                double hypotenuse = Math.Sqrt(Math.Pow((y1 - y2), 2.0) + Math.Pow((x1 - x2), 2.0));

                currentColorFrame.Draw(new CircleF() { Center = goodFeatures[0][i], Radius = 2 }, new Bgr(Color.RoyalBlue), 1);// new Bgr(Color.Blue), 1);
                x2 = (x1 - 2 * hypotenuse * Math.Cos(angle));
                y2 = (y1 - 2 * hypotenuse * Math.Sin(angle));

                currentColorFrame.Draw(new LineSegment2DF(new Point() { X = (int)x1, Y = (int)y1 }, new Point() { X = (int)x2, Y = (int)y1 }), new Bgr(Color.Pink), 1);

                x1 = (int)(x2 + 4 * Math.Cos(angle + Math.PI / 4));
                y1 = (int)(y2 + 4 * Math.Sin(angle + Math.PI / 4));
                currentColorFrame.Draw(new LineSegment2DF(new Point() { X = (int)x1, Y = (int)y1 }, new Point() { X = (int)x2, Y = (int)y1 }), new Bgr(Color.Pink), 1);

                x1 = (int)(x2 + 4 * Math.Cos(angle - Math.PI / 4));
                y1 = (int)(y2 + 4 * Math.Sin(angle - Math.PI / 4));
                currentColorFrame.Draw(new LineSegment2DF(new Point() { X = (int)x1, Y = (int)y1 }, new Point() { X = (int)x2, Y = (int)y1 }), new Bgr(Color.Pink), 1);

            }

            currentColorFrame.Draw(string.Format("Collision Time {0}", totalCollisionTime), ref curr, new Point() { X = 15, Y = 20 }, new Bgr(Color.Red));
            currentColorFrame.Draw(string.Format("Left Ground X {0}", totalLeftGroundCountX), ref curr, new Point() { X = 15, Y = 30 }, new Bgr(Color.Red));
            currentColorFrame.Draw(string.Format("Right Ground X {0}", totalRightGroundCountX), ref curr, new Point() { X = 15, Y = 40 }, new Bgr(Color.Red));
            // resize the image so it would fit the current imagebox for easier viewing
            arrImages[0] = new ImageBox();
            arrImages[1] = new ImageBox();
            arrImages[0].Image = previousFrame.Resize(181, 217, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            arrImages[1].Image = currentColorFrame;//.Resize(181, 217, Emgu.CV.CvEnum.INTER.CV_INTER_CUBIC);
            
            return arrImages;
        }

        //public static void DrawMatches(PointF[][] previousFeatures, PointF[] currentFeatures)
        //{ 
            
        //}

        public static void FocusOfExpansion(PointF[][] previousFeatures, PointF[] currentFeatures)
        { 
            // calculate the time to collision for the matched features
            if (previousFeatures != null && currentFeatures != null)
            { 
                if(previousFeatures[0].Length > 0 && currentFeatures.Length > 0)
                {
                    // use this algorithm to calculate the flow vector in the form of (u,v) 
                    // based on the coordinates T(x,y) and T1 (x+u, y+v)
                    for (int i = 0; i < previousFeatures[0].Length; i++)
                    {
                        System.Diagnostics.Debug.WriteLine("in this line of code");
                        System.Diagnostics.Debug.WriteLine("Coordinates x {0} and x1 {1}, y {2} and y1 {3}, vector (u,v) ({4},{5})", previousFeatures[0][i].X, currentFeatures[i].X, previousFeatures[0][i].Y, currentFeatures[i].Y, previousFeatures[0][i].X - currentFeatures[i].X, previousFeatures[0][i].Y - currentFeatures[i].Y);
                    }
                        
                    
                }
            }
        }
    }
}