﻿using System;
using System.Collections.Generic;
using System.Diagnostics;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using AForge;
using AForge.Imaging;
using AForge.Imaging.Filters;
using AForge.Math.Geometry;
using Microsoft.Robotics.Services.DepthCamSensor;
using Microsoft.Robotics.Services.Sensors.Kinect.Proxy;


namespace NestedParticleFilter
{
    /// <summary>
    /// This class performs image processing with regards to the data gathered by the kinect sensor
    /// </summary>
    public class ImageProcessing
    {
        private const string path = "c:\\Users\\Anousha\\Desktop\\Observations\\";
        const int AreaThreshold = 500; // pixels
        private const int RobotAreaThreshold = 200;
        private const int ObservationThreshold = 4000; // 4000 mm
        private static int iteration = 0;
        private const int RobotH = 350;
        private const double RobotS = 0.85;
        private const double RobotV = 0.51;
        private const int RobotHSimulation = 1;
        private const double RobotSSimulation = 0.1;
        private const double RobotVSimulation = 1;
        /// <summary>
        /// This function will analyze the environment and if any of the known landmarks are identified,
        /// it adds them to the list of landmarks that have been identified
        /// </summary>
        /// <param name="image"></param>
        /// <param name="depthState"></param>
        /// <param name="knownLandmarks"></param>
        /// <param name="identifiedLandmarks"></param>
        public static Bitmap AnalyzeEnvironmet (Bitmap image, DepthCamSensorState depthState, List<Landmark> knownLandmarks, out List<IdentifiedLandmark> identifiedLandmarks)
        {
            Debug.Assert(image != null, "image cannot be null");
            Debug.Assert(depthState != null, "Depth information cannot be null");
            Debug.Assert(knownLandmarks != null, "known landmarks cannot be null");
            iteration++;
            identifiedLandmarks = new List<IdentifiedLandmark>();
            short[] depthImage = depthState.DepthImage;

            ///////////////////////// Filtering the colors out////////////////
            Median smoothFilter = new Median();
            
            // filter blue from the image 
            HSLFiltering hslFiltering = new HSLFiltering();
            hslFiltering.Hue = new IntRange(219, 249);
            hslFiltering.Saturation = new Range(0.31f, 0.81f);
            hslFiltering.Luminance = new Range(0f, 1f);
            //hslFiltering.Saturation = new DoubleRange(0.5, 0.6);
            //hslFiltering.Luminance = new DoubleRange(0.11, 0.31);
            Bitmap blueBitmap = hslFiltering.Apply(image);

            // filter pink from the image
            hslFiltering.Hue = new IntRange(290, 320);
            hslFiltering.Saturation = new Range(0.1f, 0.7f);
            hslFiltering.Luminance = new Range(0f, 1f);
            Bitmap pinkBmp = hslFiltering.Apply(image);

            // filter green from the image
            hslFiltering.Hue = new IntRange(58, 88);
            hslFiltering.Saturation = new Range(0.5f, 1.0f);
            hslFiltering.Luminance = new Range(0.15f, 70f);
            Bitmap greenBmp = hslFiltering.Apply(image);

            Pen redPen = new Pen(Color.Red);
            redPen.Width = 3;

            IntPoint? center;
            CustomisedColor observedColor;
            bool isQuadrilateral = false;

            #region Checking Pink Blobs

            List<IntPoint> edgePoints;
            Graphics graphics = Graphics.FromImage(image);
            center = GetBlobsAveragePoses(pinkBmp, out isQuadrilateral, out observedColor, out edgePoints,AreaThreshold);
            pinkBmp.Save(path+ "filteredpink" + iteration + ".bmp", ImageFormat.Bmp);
            Graphics g = Graphics.FromImage(pinkBmp);

            if (center != null)
            {
                double pinkDepth = DepthAnalysis.GetDepth(depthImage, depthState.DepthImageSize.Width, center.Value.X, center.Value.Y);
                double pinkAngle = DepthAnalysis.GetAngle(pinkBmp.Width, center.Value.X);
                //Debug.WriteLine("Pink Angle: " + pinkAngle + " pink Depth: " + pinkDepth);
                List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints);
                graphics.DrawPolygon(redPen, ToPointsArray(corners));
                g.DrawPolygon(redPen, ToPointsArray(corners));
                pinkBmp.Save(path + "detectedpink" + iteration + ".bmp", ImageFormat.Bmp);
                if (pinkDepth < ObservationThreshold && pinkDepth > 0 )
                {
                    Debug.WriteLine("here");
                    identifiedLandmarks.InsertRange(identifiedLandmarks.Count,
                        Landmark.IdentifyLandmarkByColor(knownLandmarks, observedColor, pinkDepth, pinkAngle));
                    
                    if (identifiedLandmarks.Count!=0)
                       Debug.WriteLine("Detected");
                }
                //pinkBmp.Save(path+ "detectedpink" + iteration + ".bmp", ImageFormat.Bmp);
            }
            #endregion

            #region Checking for blue blobs
            center = GetBlobsAveragePoses(blueBitmap, out isQuadrilateral, out observedColor, out edgePoints, AreaThreshold);
            //blueBitmap.Save(path + "bluefilter" + iteration + ".bmp");
            g = Graphics.FromImage(blueBitmap);
            if (center != null)
            {
                double blueDepth = DepthAnalysis.GetDepth(depthImage, depthState.DepthImageSize.Width, center.Value.X, center.Value.Y);
                double blueAngle = DepthAnalysis.GetAngle(blueBitmap.Width, center.Value.X);
                List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints);
                graphics.DrawPolygon(redPen, ToPointsArray(corners));
                g.DrawPolygon(redPen, ToPointsArray(corners));
                if (blueDepth < ObservationThreshold && blueDepth > 0 )//&& isQuadrilateral)
                {
                    Landmark.IdentifyLandmarkByColor(knownLandmarks, observedColor, blueDepth, blueAngle);
                    identifiedLandmarks.InsertRange(identifiedLandmarks.Count,
                        Landmark.IdentifyLandmarkByColor(knownLandmarks, observedColor, blueDepth, blueAngle));

                }
               // blueBitmap.Save(path + "blue" + iteration + ".bmp", ImageFormat.Bmp);
            }
            #endregion


            #region Checkking green blobs
            g = Graphics.FromImage(greenBmp);
            center = GetBlobsAveragePoses(greenBmp, out isQuadrilateral, out observedColor, out edgePoints, AreaThreshold);
            //greenBmp.Save(path+ "greenfilter" + iteration + ".bmp", ImageFormat.Bmp);
            if (center != null)
            {
                //Debug.WriteLine("Center:" + center.Value.X + " " + center.Value.Y + " " + isQuadrilateral);
                double greenDepth = DepthAnalysis.GetDepth(depthImage, depthState.DepthImageSize.Width, center.Value.X, center.Value.Y);
                double angle = DepthAnalysis.GetAngle(greenBmp.Width, center.Value.X);
                List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints);
                graphics.DrawPolygon(redPen, ToPointsArray(corners));
                
                g.DrawPolygon(redPen, ToPointsArray(corners));
                if (greenDepth < ObservationThreshold && greenDepth > 0 )//&& isQuadrilateral)
                {
                    identifiedLandmarks.InsertRange(identifiedLandmarks.Count,
                         Landmark.IdentifyLandmarkByColor(knownLandmarks, observedColor, greenDepth, angle));

                }
               // greenBmp.Save(path + "green" + iteration + ".bmp", ImageFormat.Bmp);
            }

            #endregion

            //#endregion

            if (identifiedLandmarks.Count != 0)
            {
                double minDepth = (double)KinectReservedSampleValues.MaxValidDepth;
                // find the min range
                foreach (IdentifiedLandmark t in identifiedLandmarks)
                {
                    double range = t.Range;
                    if (range < minDepth)
                    {
                        minDepth = range;
                    }
                }
                //remove those landmarks that are further away););
                for (int index = 0; index < identifiedLandmarks.Count; index++)
                {
                    if (identifiedLandmarks[index].Range > minDepth)
                    {
                        identifiedLandmarks.Remove(identifiedLandmarks[index]);
                        // when a landmark is removed, the rest of the landmarks are pushed back
                        // to fill in the gap of the removed landmark. index must be decreased
                        // so that all the landmarks in the list are scanned and nothing is skiped
                        index = index - 1;
                    }
                }
               
            }
            return image;
           
        }

        /// <summary>
        /// This function will scan the environment and if it detects the robot in the image, it will return the true .
        /// </summary>
        /// <param name="image">image of the environment</param>
        /// <param name="depthState">depth state</param>
        public static bool FilterRobot(Bitmap image, DepthCamSensorState depthState, out double depth, out double angle, out CustomisedColor observedColor)
        {
            depth = 0;
            angle = 0;
            Debug.Assert(image != null, "image cannot be null");
            Debug.Assert(depthState != null, "Depth information cannot be null");
            bool identified = false;
            Median smoothFilter = new Median();

            iteration++;
            short[] depthImage = depthState.DepthImage;
            List<IntPoint> edgePoints;
            ///////////////////////// Filtering the colors out////////////////
            // filter Bright Red from the image 
            HSLFiltering hslFiltering = new HSLFiltering();
            hslFiltering.Hue = new IntRange(340, 360);
            hslFiltering.Saturation = new Range(0.3f, 1f);
            hslFiltering.Luminance = new Range(0.0f, 1f);
            Bitmap redBitmap = hslFiltering.Apply(image);
            redBitmap = smoothFilter.Apply(redBitmap);

            Pen redPen = new Pen(Color.Red);
            redPen.Width = 3;

            IntPoint? center;
            bool isQuadrilateral = false;

            #region Checking for Red blobs
            center = GetBlobsAveragePoses(redBitmap, out isQuadrilateral, out observedColor, out edgePoints, RobotAreaThreshold);
            Graphics graphics = Graphics.FromImage(image);
            Graphics g = Graphics.FromImage(redBitmap);
            if (center != null)
            {
                depth = DepthAnalysis.GetDepth(depthImage, depthState.DepthImageSize.Width, center.Value.X, center.Value.Y);
                angle = DepthAnalysis.GetAngle(redBitmap.Width, center.Value.X);
                //Debug.WriteLine("\tRobot Angle: " + angle + " robot's depth:  " + depth);
                Debug.WriteLine("\tRobot's Observed Color : " + observedColor);
                List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints);
                graphics.DrawPolygon(redPen, ToPointsArray(corners));
                g.DrawPolygon(redPen, ToPointsArray(corners));
                if (depth < ObservationThreshold && depth > 0 )
                {
                    identified = IdentifyRobotByColor(observedColor, false);

                }
              //  redBitmap.Save("c:\\Users\\Anousha\\Desktop\\Observations\\robotIdentified" + iteration + ".bmp", ImageFormat.Bmp);
            }
            #endregion

            //Debug.WriteIf(identified, "\t Robot Identified...");
            //Debug.WriteIf(!identified, "Not Identified...");

            return identified;
        }

        /// <summary>
        /// This function will scan the environment and if it detects the robot in the image, it will return the true .
        /// </summary>
        /// <param name="image">bitmap image taken from webcam</param>
        /// <param name="depthState">depth state of the kinecet</param>
        /// <param name="color"></param>
        /// <param name="depth">depth of the identified object (in mm)</param>
        public static bool FilterRobotInSimulator(Bitmap image, DepthCamSensorState depthState, out double depth, out double angle, out CustomisedColor color)
        {
            depth = 0;
            angle = 0;
            Debug.Assert(image != null, "image cannot be null");
            Debug.Assert(depthState != null, "Depth information cannot be null");
            bool identified = false;
            Median smoothFilter = new Median();

            iteration++;
            short[] depthImage = depthState.DepthImage;
            List<IntPoint> edgePoints;
            ///////////////////////// Filtering the colors out////////////////
            // filter White from the image 
            HSLFiltering hslFiltering = new HSLFiltering();
            hslFiltering.Hue = new IntRange(0, 10);
            hslFiltering.Saturation = new Range(0.0f, 0.1f);
            hslFiltering.Luminance = new Range(0.0f, 1f);
            Bitmap whiteBitmap = hslFiltering.Apply(image);
            whiteBitmap = smoothFilter.Apply(whiteBitmap);

            Pen redPen = new Pen(Color.Red);
            redPen.Width = 3;

            IntPoint? center;
            CustomisedColor observedColor;
            bool isQuadrilateral = false;

            #region Checking for White blobs
            //whiteBitmap.Save("c:\\Users\\Anousha\\Desktop\\Observations\\robot" + iteration + ".bmp", ImageFormat.Bmp);
            center = GetBlobsAveragePoses(whiteBitmap, out isQuadrilateral, out observedColor, out edgePoints, RobotAreaThreshold);
            //Debug.WriteLine("Observed Color : " + observedColor);
            color = observedColor;
            Graphics graphics = Graphics.FromImage(image);
            Graphics g = Graphics.FromImage(whiteBitmap);
            if (center != null)
            {
                //x = width, y = height
                //Debug.WriteLine("Robot Blob's Center:" + center.Value.X + " " + center.Value.Y + " " + isQuadrilateral);
                
                
                depth = DepthAnalysis.GetDepth(depthImage, depthState.DepthImageSize.Width, center.Value.X, center.Value.Y);
                angle = DepthAnalysis.GetAngle(whiteBitmap.Width, center.Value.X);
                //Debug.WriteLine("\tRobot Angle: " + angle + " Robot depth:  " + depth);
               // Debug.WriteLine("\tRobot's observed Color : " + observedColor);
                List<IntPoint> corners = PointsCloud.FindQuadrilateralCorners(edgePoints);
                graphics.DrawPolygon(redPen, ToPointsArray(corners));
                g.DrawPolygon(redPen, ToPointsArray(corners));
                if (depth < ObservationThreshold && depth > 0)
                {
                    //Debug.WriteLine("In identified robot by color....");
                    identified = IdentifyRobotByColor(observedColor, true);

                }
               // whiteBitmap.Save("c:\\Users\\Anousha\\Desktop\\Observations\\robotIdentified" + iteration + ".bmp", ImageFormat.Bmp);
            }
            #endregion

            //Debug.WriteIf(identified, "\t Robot Identified...");
            //Debug.WriteIf(!identified, "\tNot Identified...");

            return identified;
        }
        /// <summary>
        /// 
        /// </summary>
        /// <param name="observedColor"></param>
        /// <param name="inSimulation"></param>
        /// <returns></returns>
        /// <exception cref="ArgumentNullException"><paramref name="observedColor" /> is <c>null</c>.</exception>
        private static bool IdentifyRobotByColor(CustomisedColor observedColor, bool inSimulation)
        {
            if (observedColor == null) throw new ArgumentNullException("observedColor");
            if (!inSimulation)
            {
                if (observedColor.Hue >= (RobotH - 15)
                    && observedColor.Hue <= (RobotH + 15)
                    && observedColor.Saturation >= (RobotS - 0.5)
                    && observedColor.Saturation <= (RobotS + 0.5)
                    && observedColor.Luminance >= (RobotV - 0.5)
                    && observedColor.Luminance <= (RobotV + 0.5))
                    return true;
            }
            else
            {
                if (observedColor.Hue >= (RobotHSimulation - 15)
                   && observedColor.Hue <= (RobotHSimulation + 15)
                   && observedColor.Saturation >= (RobotSSimulation - 0.5)
                   && observedColor.Saturation <= (RobotSSimulation + 0.5)
                   && observedColor.Luminance >= (RobotVSimulation - 0.5)
                   && observedColor.Luminance <= (RobotVSimulation + 0.5))
                    return true;
            }
            return false;
        }

        /// <summary>
        ///Conver list of AForge.NET's points to array of .NET points 
        /// </summary>
        /// <param name="points"></param>
        /// <returns></returns>
        private static System.Drawing.Point[] ToPointsArray(List<IntPoint> points)
        {
            System.Drawing.Point[] array = new System.Drawing.Point[points.Count];
            
            for (int i = 0, n = points.Count; i < n; i++)
            {
                array[i] = new System.Drawing.Point(points[i].X, points[i].Y);
            }

            return array;
        }

       

        /// <summary>
        /// Gets the average center of the specified color of blobs.
        /// For example if the image is filled with pink bolbs ... it gets the average of
        /// all the blob's center
        /// </summary>
        /// <param name="bitmap">image of blobs</param>
        /// <param name="isQuadrilateral">if any of the blobs are quadrilateral</param>
        /// <returns></returns>
        private static IntPoint? GetBlobsAveragePoses(Bitmap bitmap, out bool isQuadrilateral, out CustomisedColor observedColor, out List<IntPoint> edgePoints, double threshold)
        {
            edgePoints = new List<IntPoint>();
            isQuadrilateral = false;
            observedColor = new CustomisedColor();
            BlobCounter blobCounter = new BlobCounter();
            blobCounter.ProcessImage(bitmap);
            IntPoint center = new IntPoint(0, 0);
            double r = 0, g = 0, b = 0;
            int count = 0;
            //try
            //{
                Blob[] blobs = blobCounter.GetObjects(bitmap, false);
                foreach (Blob t in blobs)
                {

                    if (t.Area > threshold)
                    {
                        r += t.ColorMean.GetHue();
                        g += t.ColorMean.GetSaturation();
                        b += t.ColorMean.GetBrightness();
                        center.X += t.CenterOfGravity.X; // width
                        center.Y += t.CenterOfGravity.Y; // height
                        blobCounter.ProcessImage(bitmap);
                        edgePoints = blobCounter.GetBlobsEdgePoints(t);
                        List<IntPoint> corners;
                        var shapeChecker = new SimpleShapeChecker();
                        // if the shape is quadrilateral then see the color.
                        if (edgePoints.Count != 0 && shapeChecker.IsQuadrilateral(edgePoints, out corners))
                            isQuadrilateral = true;
                        count++;
                    }
                   
                }
               // Debug.WriteLine("There are " + count + " blobs");

            //}
            //catch (Exception e)
            //{
            //    Debug.WriteLine(e.ToString());
            //}
            if (count == 0)
                return null;
            r /= count;
            g /= count;
            b /= count;
            observedColor.Luminance = b;
            observedColor.Hue = r;
            observedColor.Saturation = g;
            center.X /= count;
            center.Y /= count;
            return center;
        }

       
        /// <summary>
        /// Make bitmap out of depth information
        /// </summary>
        /// <param name="width"></param>
        /// <param name="height"></param>
        /// <param name="depthData"></param>
        /// <returns></returns>
        public static Bitmap MakeDepthBitmap(int width, int height, short[] depthData)
        {
            Debug.Assert(depthData!= null, "DepthData is null!!!");
            // NOTE: This code implicitly assumes that the width is a multiple
            // of four bytes because Bitmaps have to be longword aligned.
            byte[] buff = new byte[width * height * 3];
            byte val;
            int j = 0;
            for (int i = 0; i < width * height; i++)
            {
                // Convert the data to a suitable range
                if (depthData[i] >= (short)KinectReservedSampleValues.MaxDepthDataValue)
                {
                    val = byte.MaxValue;
                }
                else
                {
                    val =
                        (byte)
                        (((double)depthData[i] / (double)KinectReservedSampleValues.MaxDepthDataValue) * byte.MaxValue);
                }
                // Set all R, G and B values the same, i.e. gray scale
                buff[j++] = val;
                buff[j++] = val;
                buff[j++] = val;
            }

            // NOTE: Windows Forms do not support Format16bppGrayScale which
            // would be the ideal way to display the data. Instead it is
            // converted to RGB with all the color values the same, i.e.
            // 8-bit gray scale.
            Bitmap bmp= null;
            try
            {
                bmp = new Bitmap(width, height, PixelFormat.Format24bppRgb);


                BitmapData data = bmp.LockBits(
                    new Rectangle(0, 0, bmp.Width, bmp.Height),
                    ImageLockMode.WriteOnly,
                    PixelFormat.Format24bppRgb
                    );

                Marshal.Copy(buff, 0, data.Scan0, buff.Length);
                bmp.UnlockBits(data);
            }
            catch( Exception e)
            {
                Debug.WriteLine(e.ToString());
            }
            return bmp;
        }

        /// <summary>
        /// 
        /// </summary>
        /// <param name="list"></param>
        /// <exception cref="ArgumentNullException"><paramref name="list" /> is <c>null</c>.</exception>
        private static void PrintList (IEnumerable<IdentifiedLandmark> list)
        {
            if (list == null) throw new ArgumentNullException("list", "Argument shall not be null");
            foreach (IdentifiedLandmark t in list)
                Debug.WriteLine(t.ToString());
        }
    }
}
