﻿#define USE_HSV

// Revision 5

using System;
using System.Collections.Generic;
using System.Drawing;
using System.Text;
using System.Runtime.InteropServices;

using Emgu.CV;
using Emgu.CV.CvEnum;
using Emgu.CV.Structure;
using Emgu.Util;

using Microsoft.Xna.Framework;
using Color = Microsoft.Xna.Framework.Color;


namespace NOVA.Components.Move
{
    public class ImageProcessing
    {
        private PSEyeInterface m_Eye;
        Image<Bgra, Byte> tempimg = new Image<Bgra,byte>(640, 480);
        Image<Bgr, Byte> img = new Image<Bgr,byte>(640, 480);
        Image<Hsv, Byte> hsvImage;
        public Image<Gray, Byte> gray;
        //private float m_fDimension;
        //private RectangleF rect;

        private Color m_trackingColor;
        private Vector2 m_Position;
        private float m_fDistance;
        private float m_fDistanceLast;
        private int[] m_RGBValues;
        public bool threading;
        public bool calibrated;

        public Image<Gray, byte> result;

        public Color TrackingColor
        {
            get { return m_trackingColor; }
            set { m_trackingColor = value; }
        }
        public Vector2 Position
        {
            get { return m_Position; }
        }
        public float Distance
        {
            get { return m_fDistance; }
        }
        public int[] HistogramValues
        {
            get { return m_RGBValues; }
        }
 
        public ImageProcessing()
        {
            m_Eye = new PSEyeInterface(CLEyeCameraColorMode.CLEYE_COLOR_RAW, CLEyeCameraResolution.CLEYE_VGA, CLEyeFrameRate.CLEYE_FRAMERATE_60);

            // Calculate some constants for faster calculation later on
            //m_fDimension = (float)(1 / (320f * 480f));
            //rect = new RectangleF(640.0f * 0.3f, 480.0f * 0.3f, 640.0f * 0.4f, 480.0f * 0.4f);

            m_trackingColor = Color.Black;
            m_Position = Vector2.Zero;
            m_fDistance = 0.0f;

            result = new Image<Gray, byte>(640, 480, new Gray(0));

            threading = false;
            calibrated = false;

            // Add the event callback, which is executed, when a new image is available
            if(m_Eye.CameraFound)
                m_Eye.ImageArrivedEvent += new PSEyeInterface.ImageArrivedEventHandler(myEye_ImageArrivedEvent);           
        }

        void myEye_ImageArrivedEvent(object sender, ImageArrivedEventArgs e)
        {
            try
            {
                // New image arrived, which can be processed
                tempimg = new Image<Bgra, byte>(e.Width, e.Height, e.Stride, e.DataPointer);
                img[0] = tempimg[0];
                img[1] = tempimg[1];
                img[2] = tempimg[2];

                // If already in calibration mode (Tracking color non-zero), 
                // then process the new image and try to track the bulb.
                // If in calibration mode, calculate the RGB histogram values
                // needed for the calibration.
                if (threading && calibrated)
                {
                    GetBulbPosition();
                }
            }
            catch (CvException exc)
            {
                Console.Write(exc.Message);
            }
        }

        public PSEyeInterface Eye
        {
            get { return m_Eye; }
        }


        public Bitmap returnImage()
        {
            return img.Resize(480, 320, INTER.CV_INTER_CUBIC).ToBitmap();
        }
        
        public void setCameraParameters(int autogain, int autoexposure, int autowhitebalance,
            int gain, int exposure, int wbred, int wbgreen, int wbblue)
        {
            m_Eye.camParams.AutoExposure = autoexposure;
            m_Eye.camParams.AutoGain = autogain;
            m_Eye.camParams.AutoWhiteBalance = autowhitebalance;
            m_Eye.camParams.Exposure = exposure;
            m_Eye.camParams.Gain = gain;
            m_Eye.camParams.WhiteBalanceBlue = wbblue;
            m_Eye.camParams.WhiteBalanceGreen = wbgreen;
            m_Eye.camParams.WhiteBalanceRed = wbred;

            m_Eye.setCamParams();
        }          

        public int[] CalculateRGBHistogramValues()
        {
            hsvImage = new Image<Hsv, byte>(new Size(480, 320));
            hsvImage.ConvertFrom<Bgr, Byte>(img);

            DenseHistogram hist = new DenseHistogram(180, new RangeF(0, 180));

            // Calculate of the histogram of the hue
            CvInvoke.cvCalcHist(new System.IntPtr[] { hsvImage.Split()[0].Ptr }, hist, false, IntPtr.Zero);

            // Return the yellow, cyan and pink-values of the hue histogram
#if COMPLEMENT
            int[] values = new int[] { (int)hist[0], (int)hist[120], (int)hist[60] };
#else
            int[] values = new int[] { (int)hist[30], (int)hist[90], (int)hist[150] };
#endif
            m_RGBValues = values;
            return values;
        }

        public Image<Gray, byte> returnThresholdedImage()//Microsoft.Xna.Framework.Color color)
        {
            int iState = -1;
            bool bUseSaturation = false;
            bool bUseValue = true;
            
#if !USE_HSV
            int iThreshold = 254;   // TODO

            Image<Gray, byte> R = new Image<Gray, byte>(480, 320);
            Image<Gray, byte> G = new Image<Gray, byte>(480, 320);
            Image<Gray, byte> B = new Image<Gray, byte>(480, 320);
            Image<Gray, byte> result = new Image<Gray, byte>(480, 320);

            // Split the camera into it's RGB channels
            Image<Gray, byte>[] channels = img.Resize(480, 320, INTER.CV_INTER_CUBIC).Split();

            // Pink state
            if (m_trackingColor.B > 0 && m_trackingColor.G == 0 && m_trackingColor.R > 0)
                iState = 0;

            // Yellow state
            if (m_trackingColor.B == 0 && m_trackingColor.G > 0 && m_trackingColor.R > 0)
                iState = 1;

            // Cyan state
            if (m_trackingColor.B > 0 && m_trackingColor.G > 0 && m_trackingColor.R == 0)
                iState = 2;

            // Process the state by converting into a binary image.
            // If a pixel value is above 220, then it's on and gets a value of 255 or zero otherwise.
            switch (iState)
            {
                case 0: // In pink state, filter only red and blue channel
                    B = channels[0].ThresholdBinary(new Gray(iThreshold), new Gray(255));
                    R = channels[2].ThresholdBinary(new Gray(iThreshold), new Gray(255));

                    result = R.And(B);
                    break;
                case 1: // In yellow state, filter only red and green channel
                    G = channels[1].ThresholdBinary(new Gray(iThreshold), new Gray(255));
                    R = channels[2].ThresholdBinary(new Gray(iThreshold), new Gray(255));

                    result = R.And(G);
                    break;
                case 2: // In cyan state, filter only green and blue channel
                    B = channels[0].ThresholdBinary(new Gray(iThreshold), new Gray(255));
                    G = channels[1].ThresholdBinary(new Gray(iThreshold), new Gray(255));

                    result = B.And(G);
                    break;
                default: // Not tracking specified, return black
                    return result;

            }
#else
            // Convert camera image to HSV format
            hsvImage = new Image<Hsv, byte>(new Size(640, 480));
            hsvImage.ConvertFrom<Bgr, Byte>(img.Resize(640, 480, INTER.CV_INTER_CUBIC));
            
            // Resize and split in H-, S- and V-channel
            Image<Gray, byte>[] channels = hsvImage.Split();

            Image<Gray, byte> resultH = new Image<Gray, byte>(640, 480);
            Image<Gray, byte> resultS = new Image<Gray, byte>(640, 480);
            Image<Gray, byte> resultV = new Image<Gray, byte>(640, 480);

            Image<Gray, byte> temp = new Image<Gray, byte>(640, 480);

            // Create a gray-scale mask for color-filtering
            Image<Gray, byte> mask = new Image<Gray, byte>(640, 480, new Gray(150));

            // Red state
            if (m_trackingColor.B == 0 && m_trackingColor.G == 0 && m_trackingColor.R > 0)
                iState = 0;

            // Green state
            if (m_trackingColor.B == 0 && m_trackingColor.G > 0 && m_trackingColor.R == 0)
                iState = 1;

            // Blue state
            if (m_trackingColor.B > 0 && m_trackingColor.G == 0 && m_trackingColor.R == 0)
                iState = 2;

            //// Pink state
            //if (m_trackingColor.B > 0 && m_trackingColor.G == 0 && m_trackingColor.R > 0)
            //    iState = 3;

            //// Cyan state
            //if (m_trackingColor.B > 0 && m_trackingColor.G > 0 && m_trackingColor.R == 0)
            //    iState = 4;

            


            // Filter the hue channel for the specific color:
            // Threshold the saturation channel to keep the highly saturated spots
            // Threshold the value channel to keep the bright spots
#if OLD
            switch (iState)
            {
                case 0:
                    resultH = channels[0].InRange(new Gray(0), new Gray(40));
                    resultS = channels[1].InRange(new Gray(0), new Gray(90));//ThresholdBinary(new Gray(70), new Gray(255));
                    resultV = channels[2].ThresholdBinary(new Gray(240), new Gray(255));
                    break;                
                case 1:
                    resultH = channels[0].ThresholdBinaryInv(new Gray(90), new Gray(255));//InRange(new Gray(40), new Gray(80)).Not();
                    resultS = channels[1]./*InRange(new Gray(0), new Gray(80));*/ThresholdBinary(new Gray(70), new Gray(255));
                    resultV = channels[2].ThresholdBinary(new Gray(240), new Gray(255));
                    break;
                case 2:
                    resultH = channels[0]./*ThresholdBinaryInv(new Gray(0), new Gray(255));*/InRange(new Gray(80), new Gray(140));
                    resultS = channels[1].ThresholdBinaryInv(new Gray(0), new Gray(255));
                    resultV = channels[2].ThresholdBinary(new Gray(240), new Gray(255));
                    break;
                case 3:
                    resultH = channels[0].ThresholdBinaryInv(new Gray(0), new Gray(255));//InRange(new Gray(120), new Gray(200));
                    resultS = channels[1].InRange(new Gray(0), new Gray(80));//ThresholdBinary(new Gray(70), new Gray(255));
                    resultV = channels[2].ThresholdBinary(new Gray(240), new Gray(255));
                    break;
                case 4:
                    resultH = channels[0].ThresholdBinaryInv(new Gray(0), new Gray(255));//InRange(new Gray(70), new Gray(110));
                    resultS = channels[1].InRange(new Gray(0), new Gray(80));//ThresholdBinary(new Gray(70), new Gray(255));
                    resultV = channels[2].ThresholdBinary(new Gray(240), new Gray(255));
                    break;                
            }
#endif
            switch (iState)
            {
                case 0:
                    resultH = channels[0].InRange(new Gray(0), new Gray(30));
                    resultS = channels[1].InRange(new Gray(70), new Gray(160));
                    resultV = channels[2].ThresholdBinary(new Gray(230), new Gray(255));
                    break;
                case 1:
                    resultH = channels[0].InRange(new Gray(40), new Gray(90));
                    resultS = channels[1].InRange(new Gray(70), new Gray(160));
                    resultV = channels[2].ThresholdBinary(new Gray(150), new Gray(255));
                    break;
                case 2:
                    resultH = channels[0].InRange(new Gray(90), new Gray(120));
                    resultS = channels[1].InRange(new Gray(70), new Gray(255));
                    resultV = channels[2].ThresholdBinary(new Gray(240), new Gray(255));
                    break;
                //case 3:
                //    resultH = channels[0]./*ThresholdBinaryInv(new Gray(0), new Gray(255));*/InRange(new Gray(120), new Gray(170));
                //    resultS = channels[1]./*InRange(new Gray(0), new Gray(80));*/ThresholdBinary(new Gray(70), new Gray(255));
                //    resultV = channels[2].ThresholdBinary(new Gray(240), new Gray(255));
                //    break;
                //case 4:
                //    resultH = channels[0]./*ThresholdBinaryInv(new Gray(0), new Gray(255));*/InRange(new Gray(70), new Gray(110));
                //    resultS = channels[1]./*InRange(new Gray(0), new Gray(80));*/ThresholdBinary(new Gray(120), new Gray(255));
                //    resultV = channels[2].ThresholdBinary(new Gray(240), new Gray(255));
                //    break;
            }

            // Create the resulting image from the spots, which are visible in
            // all filtered image
            result = resultH;                  
                        
            if (bUseSaturation)
                result = result.And(resultS);
                    
            if(bUseValue)
                result = result.And(resultV);
#endif

            // Reduce some background noise by using a low-pass filtering
            // if the non-zero spots are big enough
            if(result.CountNonzero()[0] > 30)
                result = result.Erode(3).Dilate(3);

            return result;
        }
             
        public Vector3 GetBulbPosition()//Microsoft.Xna.Framework.Color color)
        {
            Vector3 position = Vector3.Zero;

            // Calculate an optimated threshold image, which is equal to a 
            // binary image, where only the controller is non-black.
            gray = returnThresholdedImage();//color);
            
            // Calculate the moments of the binary image
            MCvMoments moments = gray.GetMoments(true);
            
            int iX = (int)moments.GravityCenter.x;
            int iY = (int)moments.GravityCenter.y;

            // Only if valid center ...
            if (iX > 0 && iY > 0)
            {
                // Try to find the amount of pixels which are (still) white in left, right
                // top and bottom direction starting from the moment center. These values
                // are grounding the bulb radius calculation.
                int real_radiusR, real_radiusL, real_radiusT, real_radiusB;
                real_radiusR = real_radiusL = real_radiusT = real_radiusB = 0;

                // Determine the "right radius" until pixel is non-black
                while (gray[new System.Drawing.Point(iX + real_radiusR, iY)].Intensity > 0)
                {
                    real_radiusR++;

                    // Stop earlier when reaching the image limits
                    if (iX + real_radiusR >= gray.ROI.Width)
                        break;
                }

                // Determine the "left radius" until pixel is non-black
                while (gray[new System.Drawing.Point(iX - real_radiusL, iY)].Intensity > 0)
                {
                    real_radiusL++;

                    // Stop earlier when reaching the image limits
                    if (iX - real_radiusL <= 0)
                        break;
                }

                // Determine the "top radius" until pixel is non-black
                while (gray[new System.Drawing.Point(iX, iY - real_radiusT)].Intensity > 0)
                {
                    real_radiusT++;

                    // Stop earlier when reaching the image limits
                    if (iY - real_radiusT <= 0)
                        break;
                }

                // Determine the "bottom radius" until pixel is non-black
                while (gray[new System.Drawing.Point(iX, iY + real_radiusB)].Intensity > 0)
                {
                    real_radiusB++;

                    // Stop earlier when reaching the image limits
                    if (iY + real_radiusB >= gray.ROI.Height)
                        break;
                }

                // Calculate the "real" radius using pythagoras and the four preliminary values 
                int temp = ((real_radiusR + real_radiusL) * (real_radiusR + real_radiusL) + (real_radiusB + real_radiusT) * (real_radiusB + real_radiusT));
                double newradius = ((float)Math.Sqrt(temp)) / 3.0;
                
                // Create a circle with the found center and radius
                CircleF circ = new CircleF(new PointF(iX, iY), (float)newradius);

                // Calculate the histogram of the binary image
                DenseHistogram hist = new DenseHistogram(256, new RangeF(0, 256));
                CvInvoke.cvCalcHist(new System.IntPtr[] { gray.Ptr }, hist, false, IntPtr.Zero);

                // Get the histogram value of the color "white" in the binary image
                int whiteHist = (int)hist[255];
                double dMaxHist = (double)(whiteHist + whiteHist * 0.25f);
                double dMinHist = (double)(whiteHist - whiteHist * 0.25f);
                
                // Only if the area of the circle has "nearly as much white pixels" as the whole image (defined by the histogram), 
                // then the only white area in the binary image is the bulb and the data is valid. Otherwise there are some bright
                // spots in the background and moment center is disturbed and does not represent the bulb center!
                if ((circ.Area <= dMaxHist) && (circ.Area >= dMinHist))
                {
                    //float fDistance = (1 - (float)hist[0] * m_fDimension);
                    float fAlpha = 75.0f; // alpha in [grad]
                    float fFocalLength = 25.0f; // Focal Length in [cm]
                    float fDiagonal = (float)Math.Sqrt(640 * 640 + 480 * 480);
                    float fG = fDiagonal / (2.0f * circ.Radius);//2.0f * 2.25f; // Diameter of the controller bulb in [cm]. Real radius is 22.5mm 
                    float fB = 2.0f * fFocalLength * (float)Math.Tan((double)MathHelper.ToRadians(fAlpha) / 2.0);
                    float fA = fB / fG;
                    
                    m_fDistance = (fA + 1.0f) * fFocalLength / fA;

                    // Calculate a relative position values between 0 und 1 based on the image size
                    position = new Vector3((float)(iX / 640.0f), (float)(iY / 480.0f), m_fDistance);
                    m_Position = new Vector2((float)(iX / 640.0f), (float)(iY / 480.0f));

                    m_fDistanceLast = m_fDistance;
                }
                //else if (circ.Radius == 0) // Maybe the bulb cannot be detected as a ball, but the position is mostly right
                //{
                //    // Calculate a relative position values between 0 und 1 based on the image size
                //    position = new Vector3((float)(iX / 480.0f), (float)(iY / 320.0f), m_fDistanceLast);
                //    m_Position = new Vector2((float)(iX / 480.0f), (float)(iY / 320.0f));
                //}
            }

            return position;
        }

        public double GetBulbRadius()//Microsoft.Xna.Framework.Color color)
        {
            Vector3 position = Vector3.Zero;

            // Calculate an optimated threshold image, which is equal to a 
            // binary image, where only the controller is non-black.
            gray = returnThresholdedImage();

            // Calculate the moments of the binary image
            MCvMoments moments = gray.GetMoments(true);

            int iX = (int)moments.GravityCenter.x;
            int iY = (int)moments.GravityCenter.y;

            // Only if valid center ...
            if (iX > 0 && iY > 0)
            {
                // Try to find the amount of pixels which are (still) white in left, right
                // top and bottom direction starting from the moment center. These values
                // are grounding the bulb radius calculation.
                int real_radiusR, real_radiusL, real_radiusT, real_radiusB;
                real_radiusR = real_radiusL = real_radiusT = real_radiusB = 0;

                // Determine the "right radius" until pixel is non-black
                while (gray[new System.Drawing.Point(iX + real_radiusR, iY)].Intensity > 0)
                {
                    real_radiusR++;

                    // Stop earlier when reaching the image limits
                    if (iX + real_radiusR >= gray.ROI.Width)
                        break;
                }

                // Determine the "left radius" until pixel is non-black
                while (gray[new System.Drawing.Point(iX - real_radiusL, iY)].Intensity > 0)
                {
                    real_radiusL++;

                    // Stop earlier when reaching the image limits
                    if (iX - real_radiusL <= 0)
                        break;
                }

                // Determine the "top radius" until pixel is non-black
                while (gray[new System.Drawing.Point(iX, iY - real_radiusT)].Intensity > 0)
                {
                    real_radiusT++;

                    // Stop earlier when reaching the image limits
                    if (iY - real_radiusT <= 0)
                        break;
                }

                // Determine the "bottom radius" until pixel is non-black
                while (gray[new System.Drawing.Point(iX, iY + real_radiusB)].Intensity > 0)
                {
                    real_radiusB++;

                    // Stop earlier when reaching the image limits
                    if (iY + real_radiusB >= gray.ROI.Height)
                        break;
                }

                // Calculate the "real" radius using pythagoras and the four preliminary values 
                int temp = ((real_radiusR + real_radiusL) * (real_radiusR + real_radiusL) + (real_radiusB + real_radiusT) * (real_radiusB + real_radiusT));
                double newradius = ((float)Math.Sqrt(temp)) / 3.0;

                // Create a circle with the found center and radius
                CircleF circ = new CircleF(new PointF(iX, iY), (float)newradius);

                // Calculate the histogram of the binary image
                DenseHistogram hist = new DenseHistogram(256, new RangeF(0, 256));
                CvInvoke.cvCalcHist(new System.IntPtr[] { gray.Ptr }, hist, false, IntPtr.Zero);

                // Get the histogram value of the color "white" in the binary image
                int whiteHist = (int)hist[255];
                double dMaxHist = (double)(whiteHist + whiteHist * 0.25f);
                double dMinHist = (double)(whiteHist - whiteHist * 0.25f);

                // Only if the area of the circle has "nearly as much white pixels" as the whole image (defined by the histogram), 
                // then the only white area in the binary image is the bulb and the data is valid. Otherwise there are some bright
                // spots in the background and moment center is disturbed and does not represent the bulb center!
                if ((circ.Area <= dMaxHist) && (circ.Area >= dMinHist))
                {
                    return newradius;
                }
            }

            return -1.0;
        }

        public void Dispose()
        {
            m_Eye.Dispose();
        }
    }
}
