﻿#region Usings
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Diagnostics;
using System.Drawing;
using System.Drawing.Imaging;
using System.Runtime.InteropServices;
using System.Windows;
using System.Windows.Interop;
using System.Windows.Media;
using System.Windows.Media.Imaging;
using System.Windows.Threading;
using LaserTag.Infrastructure.Events;
using LaserTag.Infrastructure.ViewModels;
using LaserTag.Module.AForgeModule.Services;
using LaserTag.Module.AForgeModule.VideoProcessors;
using LaserTag.Module.AForgeModule.Views;
using Microsoft.Practices.Composite.Events;
using Microsoft.Practices.Unity;
using Point=System.Windows.Point;
using Size=System.Windows.Size;

#endregion

namespace LaserTag.Module.AForgeModule.ViewModels
{
    public class AForgeVideoProcessorSetupViewModel : ViewModelBase,
                                                      IAForgeVideoProcessorSetupViewModel
    {
        #region Local Fields
        private bool m_viewActivated;

        /// <summary>
        /// Reference to the UI dispatcher.  This is set on the ctor
        /// </summary>
        private readonly Dispatcher m_uiDispatcher;

        /// <summary>
        /// Our helper class that processes the video
        /// </summary>
        private readonly AForgeVideoProcessor m_videoProcessor;

        /// <summary>
        /// The event mediator that is passed by dependency injection
        /// </summary>
        private readonly IEventAggregator m_eventAggregator;

        /// <summary>
        /// This is the bitmap we use to write our post processed video to.  It is applied
        /// to an Image's Source property
        /// </summary>
        private WriteableBitmap m_postProcessedBitmap;

        /// <summary>
        /// Backing field for the hue maximum
        /// </summary>
        private int m_hueMaximum = 359;

        /// <summary>
        /// Backing field for the hue minimum
        /// </summary>
        private int m_hueMinimum;

        /// <summary>
        /// Backing field for the luminance maximum
        /// </summary>
        private double m_luminanceMaximum = 1;

        /// <summary>
        /// Backing field for the luminance minimum
        /// </summary>
        private double m_luminanceMinimum;

        /// <summary>
        /// Backing field for the saturation maxium
        /// </summary>
        private double m_saturationMaximum = 1;

        /// <summary>
        /// Backing field for the saturation minimum
        /// </summary>
        private double m_saturationMinimum;

        /// <summary>
        /// The location of our tracked point
        /// </summary>
        private Point m_trackedObject;

        /// <summary>
        /// The pixel area of our target (the video frame)
        /// </summary>
        private Size m_targetAreaSize;

        /// <summary>
        /// The native pointer to the WriteableBitmap's back buffer
        /// </summary>
        private IntPtr m_writeableBitmapBackBuffer;

        /// <summary>
        /// The WritableBitmap's pixel width
        /// </summary>
        private int m_writeableBitmapWidth;
        
        /// <summary>
        /// The WriteableBitmap's pixel height
        /// </summary>
        private int m_writeableBitmapHeight;

        private Rect m_targetSearchArea;

        #endregion

        #region P/Invoke
        /// <summary>
        /// P/Invoke for fast unmanaged memory copying.
        /// </summary>
        [DllImport("Kernel32.dll", EntryPoint = "RtlMoveMemory")]
        private static extern void CopyMemory(IntPtr destination, IntPtr source, [MarshalAs(UnmanagedType.U4)] int length);
        #endregion


        public AForgeVideoProcessorSetupViewModel(IAForgeVideoProcessorSetupView view, 
                                                  IUnityContainer container, 
                                                  IEventAggregator eventAggregator, 
                                                  IHslConfiguration hslConfiguration)
        {
            m_hslConfiguration = hslConfiguration;

            /* Get the current UI dispatcher */
            m_uiDispatcher = Dispatcher.CurrentDispatcher;

            /* Store reference to the event aggregator so we can publish events */
            m_eventAggregator = eventAggregator;

            /* Create a new instance of our video processor using dependency injection */
            m_videoProcessor = container.Resolve<AForgeVideoProcessor>();

            /* Set our video processor default values */
            SetVideoProcessorValues();

            /* Subscribe to the event that tells us that new objects have been detected */
            m_eventAggregator.GetEvent<NewObjectsDetectedEvent>().Subscribe(OnNewObjectsDetected /* Callback */, 
                                                                            false /* Use weak refs */);

            /* We subscribe to the NewPostProcessedVideoFrameEvent.  This sends us a new, unprocessed video frame. The last
             * parameter, 'false', says we wish to use weak references.  This should avoid any possible memory leaks that
             * you get in standard .NET events/delegates when you for get to unhook or derefence. */
            m_eventAggregator.GetEvent<NewPostProcessedVideoFrameEvent>().Subscribe(NewPostProcessedVideoFrameEvent,
                                                                                    false);

            m_eventAggregator.GetEvent<SystemShutdownEvent>().Subscribe(OnSystemShutdown,
                                                                                   false);

            /* Set our view model, so our XAML binding will work */
            view.SetViewModel(this);
            
            View = view;

            LoadProcessorConfiguration();
        }

        private void OnSystemShutdown(bool e)
        {
            SaveProcessorConfiguration();
        }

        #region Public Properties


        public Rect TargetSearchArea
        {
            get { return m_targetSearchArea; }
            set
            {
                m_targetSearchArea = value;
                m_videoProcessor.TrackSearchAreaRatio = m_targetSearchArea;
            }
        }

        /// <summary>
        /// The bitmap, after it has been processed by the vision routines
        /// </summary>
        public WriteableBitmap PostProcessedBitmap
        {
            get
            {
                return m_postProcessedBitmap;
            }
            set
            {
                m_postProcessedBitmap = value;
                InvokePropertyChanged("PostProcessedBitmap");
            }
        }

        /// <summary>
        /// Gets or sets the minimum hue to filter.
        /// Minimum is 0.0.  Maxium is 359
        /// </summary>
        public int HueMinimum
        {
            get { return m_hueMinimum; }
            set
            {
                m_hueMinimum = value;
                SetVideoProcessorValues();
                InvokePropertyChanged("HueMinimum");
            }
        }

        /// <summary>
        /// Gets or sets the maximum hue to filter.
        /// Minimum is 0.0.  Maxium is 359.
        /// </summary>
        public int HueMaximum
        {
            get { return m_hueMaximum; }
            set
            {
                m_hueMaximum = value;
                SetVideoProcessorValues();
                InvokePropertyChanged("HueMaximum");
            }
        }

        /// <summary>
        /// Gets or sets the minimum saturation to filter.
        /// Minimum is 0.0.  Maxium is 1.0.
        /// </summary>
        public double SaturationMinimum
        {
            get { return m_saturationMinimum; }
            set
            {
                m_saturationMinimum = value;
                SetVideoProcessorValues();
                InvokePropertyChanged("SaturationMinimum");
            }
        }

        /// <summary>
        /// Gets or sets the maximum saturation to filter.
        /// Minimum is 0.0.  Maxium is 1.0
        /// </summary>
        public double SaturationMaximum
        {
            get { return m_saturationMaximum; }
            set
            {
                m_saturationMaximum = value;
                SetVideoProcessorValues();
                InvokePropertyChanged("SaturationMaximum");
            }
        }

        /// <summary>
        /// Gets or sets the minimum luminance value to filter.
        /// Minimum is 0.0.  Maxium is 1.0.
        /// </summary>
        public double LuminanceMinimum
        {
            get { return m_luminanceMinimum; }
            set
            {
                m_luminanceMinimum = value;
                SetVideoProcessorValues();
                InvokePropertyChanged("LuminanceMinimum");
            }
        }

        /// <summary>
        /// Gets or sets maxium luminance value to filter
        /// Minimum is 0.0.  Maxium is 1.0.
        /// </summary>
        public double LuminanceMaximum
        {
            get { return m_luminanceMaximum; }
            set
            {
                m_luminanceMaximum = value;
                SetVideoProcessorValues();
                InvokePropertyChanged("LuminanceMaximum");
            }
        }

        /// <summary>
        /// Gets or set the center coordinates of the tracked object.
        /// in it's native units (pixels).
        /// </summary>
        public Point TrackedObjectCenter
        {
            get { return m_trackedObject; }
            set
            {
                m_trackedObject = value;
                InvokePropertyChanged("TrackedObjectCenter");
            }
        }

        /// <summary>
        /// Gets or sets the target area size (video frame size).
        /// </summary>
        public Size TargetAreaSize
        {
            get { return m_targetAreaSize; }
            set
            {
                m_targetAreaSize = value;
                InvokePropertyChanged("TargetAreaSize");
            }
        }
        #endregion

        private int counter = 0;
        private IHslConfiguration m_hslConfiguration;

        #region Private Methods
        /// <summary>
        /// Callback from the event mediator, notifying of a new unprocessed frame
        /// This could be called from a thread that is *not* the UI/Dispatcher thread.
        /// </summary>
        /// <param name="image">The unprocessed, raw video frame</param>
        private void NewPostProcessedVideoFrameEvent(Bitmap image)
        {
            counter++;

            if (counter % 10 != 0)
                return;

            if(m_postProcessedBitmap == null)
            {
                /* We have to touch the m_postProcessedBitmap, so we have
                 * to queue this method on the dispatcher to run on the UI thread
                 * or else we will get the dreaded cross threaded exception */
                m_uiDispatcher.Invoke((Action)delegate
                {
                    /* If we have not created a new WriteableBitmap, do so now */
                    if (m_postProcessedBitmap == null)
                    {
                        PostProcessedBitmap = new WriteableBitmap(image.Width,
                                                                  image.Height,
                                                                  96,
                                                                  96,
                                                                  PixelFormats.Bgr24,
                                                                  null);

                        m_writeableBitmapBackBuffer = PostProcessedBitmap.BackBuffer;
                    }

                    /* Cache our settings so we don't have to keep doing a Dispatcher.Invoke */
                    m_writeableBitmapBackBuffer = PostProcessedBitmap.BackBuffer;
                    m_writeableBitmapWidth = PostProcessedBitmap.PixelWidth;
                    m_writeableBitmapHeight = PostProcessedBitmap.PixelHeight;
                });
            }

            /* We lock our GDI bitmap, so we can get the native pointer to the pixels */
            BitmapData bmpData = image.LockBits(new Rectangle(0, 0, m_writeableBitmapWidth, m_writeableBitmapHeight),
                                                ImageLockMode.ReadOnly,
                                                System.Drawing.Imaging.PixelFormat.Format24bppRgb);

            /* Copy the GDI bitmap pixels to our WPF WriteableBitmap */
            CopyMemory(m_writeableBitmapBackBuffer,  /* The WriteableBitmap's backbuffer native pointer */
                       bmpData.Scan0, /* The GDI bitmap's pointer to pixel data */
                       m_writeableBitmapWidth * m_writeableBitmapHeight * 3 /* The total amount of bytes. */);

            /* We are done with the GDI bitmap */
            image.UnlockBits(bmpData);

            m_uiDispatcher.BeginInvoke((Action)delegate
            {
                /* Lock our WriteableBitmap so we can update it */
                PostProcessedBitmap.Lock();
                /* Tell WPF that the entire WriteableBitmap is dirty and to redraw it */
                PostProcessedBitmap.AddDirtyRect(new Int32Rect(0, 0, m_writeableBitmapWidth, m_writeableBitmapHeight));
                /* We are done with the WriteableBitmap */
                PostProcessedBitmap.Unlock();
            });
        }

        private void OnNewObjectsDetected(List<DetectedObject> objects)
        {
            if (objects.Count == 0)
                return;

            /* Use only the first (biggest) object found */
            Rect objectArea = objects[0].ObjectArea;

            TrackedObjectCenter = objects[0].GetCenter();

            TargetAreaSize = new Size(objects[0].DetectionAreaSize.Width, objects[0].DetectionAreaSize.Height);
        }

        private void LoadProcessorConfiguration()
        {
            HueMinimum = m_hslConfiguration.HueMinimum;
            HueMaximum = m_hslConfiguration.HueMaximum;

            SaturationMinimum = m_hslConfiguration.SaturationMinimum;
            SaturationMaximum = m_hslConfiguration.SaturationMaximum;

            LuminanceMinimum = m_hslConfiguration.LuminanceMinimum;
            LuminanceMaximum = m_hslConfiguration.LuminanceMaximum;
        }

        private void SaveProcessorConfiguration()
        {
            m_hslConfiguration.HueMinimum = HueMinimum;
            m_hslConfiguration.HueMaximum = HueMaximum;

            m_hslConfiguration.SaturationMinimum = SaturationMinimum;
            m_hslConfiguration.SaturationMaximum = SaturationMaximum;

            m_hslConfiguration.LuminanceMinimum = LuminanceMinimum;
            m_hslConfiguration.LuminanceMaximum = LuminanceMaximum;

            m_hslConfiguration.Save();
        }

        private void SetVideoProcessorValues()
        {
            m_videoProcessor.HueMinimum = HueMinimum;
            m_videoProcessor.HueMaximum = HueMaximum;
            m_videoProcessor.SaturationMinimum = SaturationMinimum;
            m_videoProcessor.SaturationMaximum = SaturationMaximum;
            m_videoProcessor.LuminanceMinimum = LuminanceMinimum;
            m_videoProcessor.LuminanceMaximum = LuminanceMaximum;

            m_videoProcessor.BlobMinimumHeight = 3;
            m_videoProcessor.BlobMinimumWidth = 3;
        }
        #endregion

        #region IAForgeVideoProcessorSetupViewModel Members

        public object View { get; private set; }

        public bool ViewActivated
        {
            get
            {
                return m_viewActivated;
            }
            set
            {
                m_viewActivated = value;
                InvokePropertyChanged("ViewActivated");
            }
        }

        #endregion
    }
}