// --------------------------------------------------------------------------------------------------------------------
// <copyright file="KinectAgentMessageProcessor.cs" company="Microsoft Corporation">
// The MIT License (MIT)
// 
// Copyright (c) 2014, Microsoft Corporation
// 
// Permission is hereby granted, free of charge, to any person obtaining a copy
//  of this software and associated documentation files (the "Software"), to deal
//  in the Software without restriction, including without limitation the rights
//  to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
//  copies of the Software, and to permit persons to whom the Software is
//  furnished to do so, subject to the following conditions:
// 
// The above copyright notice and this permission notice shall be included in
//  all copies or substantial portions of the Software.
// 
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
//  IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
//  FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
//  AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
//  LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
//  OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
//  THE SOFTWARE.
// </copyright>
// --------------------------------------------------------------------------------------------------------------------
namespace Microsoft.Robotics.Vision.Runtime.Cameras
{
    using System;
    using System.IO; 
    using System.Runtime.Serialization;
    using Microsoft.Robotics.Numerics;
    using Microsoft.Robotics.Runtime;
    using Microsoft.Robotics.Vision.Cameras;

    /// <summary>
    /// Transform a "raw" depth Kinect image to a standard version that downstream components consume
    /// Current processing steps are:
    /// 1) Clamp depth readings to lie in range [0 - MaximumDepthImageRange]
    /// 2) Assign pixel values less than  0 or having NoReadingValue the value NoReadingValue (Note 
    ///      the kinect driver creates KinectAgentRawMessage with noReadingValue) 
    /// 3) Flip images from right-left (Kinect format) to left to right (Historical mars format)
    /// 4) Apply a linear rescaling of the depth. d' = m*d + c. We found this type of correction improves
    ///     absolute depth readings
    /// </summary>
    [DataContract]
    public class KinectAgentMessageProcessor : ConsumerProducerAgent<DepthCameraAgentMessage<KinectDepthCameraParameters>, DepthCameraAgentMessage<KinectDepthCameraParameters>>
    {
        /// <summary>
        /// Floor filtering file name
        /// </summary>
        private const string FloorFilteringDataFilename = "FloorCalibration.xml";

        /// <summary>
        /// Kinect calibration data file name
        /// </summary>
        private const string KinectCalibrationDataFileName = "DepthCalibration.xml";

        /// <summary>
        /// Bucket calibration data file to load at startup
        /// </summary>
        [DataMember(Name = "CalibrationDataFolder")]
        private readonly string calibrationDataFolder = default(string);        

        /// <summary>
        /// Maximum horizon displacement. Determines how much tilt up will floor filter tolerate before stopping to 
        /// look for floor. For a perfectly level, never tilting camera - the value would be 0.5 (middle of the frame). 
        /// In realistic floor-roving robot scenarios - set this value to 0.7 or more
        /// </summary>
        [DataMember(Name = "MaxAllowedHorizonDisplacement")]
        private readonly double? maxAllowedHorizonDisplacement;

        /// <summary>
        /// Minimum horizon displacement. Determines how much tilt up will floor filter tolerate before stopping to 
        /// look for floor. For a perfectly level, never tilting camera - the value would be 0.5 (middle of the frame). 
        /// For realistic floor-roving robot scenarios - set this value to -0.1 or less
        /// </summary>
        [DataMember(Name = "MinAllowedHorizonDisplacement")]
        private readonly double? minAllowedHorizonDisplacement; 

        /// <summary>
        /// Maximum roughness determines how much of a rough surface will floor filter tolerate before considering floor too noisy to learn
        /// Note. This value is a variance of front 10% of the pixels, thus, tilting up will result in increased observed "roughness"
        /// The value of 200-400 is a good ballpark for Ev4
        /// </summary>
        [DataMember(Name = "MaxSurfaceRoughness")]
        private readonly double? maxSurfaceRoughness; 

        /// <summary>
        /// Maximum vertical shift up in Kinect height that determines how much of variance in calculating B coefficient will floor filter tolerate before 
        /// considering floor too far from calibration point. The value of 10-15 is a good ballpark for Ev4
        /// </summary>
        [DataMember(Name = "MaxVerticalTravelPercentFromCalibrated")]
        private readonly double? maxVerticalTravelPercentFromCalibrated;

        /// <summary>
        /// Maximum vertical shift down in Kinect height that determines how much of variance in calculating B coefficient will floor filter tolerate before 
        /// considering floor too far from calibration point. The value of 5 is a good ballpark for Ev4
        /// </summary>
        [DataMember(Name = "MinVerticalTravelPercentFromCalibrated")]
        private readonly double? minVerticalTravelPercentFromCalibrated;

        /// <summary>
        /// Determines the slope of floor cutoff plane. The higher - the steeper the plane. Depending on how steep the cutoff plane is
        /// we will be able to detect more low level objects at farther distances, albeit at increased risk of false negatives (floor not
        /// marked as such) especially when robot is on challenging terrain such as uneven surfaces, etc.
        /// As a guideline: 0 - results no slope, just clearance 0.5 - 1 - is a moderate slope up (0.5 used by default). 2 and more should 
        /// be considered aggressive
        /// </summary>
        [DataMember(Name = "FloorCutoffPlaneSlopeAggressiveness")]
        private readonly double? floorCutoffPlaneSlopeAggressiveness;

        /// <summary>
        /// Clearance has two ingredients: Constant shift (this) and linear component as a function of depth in mm at the front of the robot.
        /// Those two parameters are used together to allow a wide range of tilt and roll angles (since different clearances are optimal at different tilts)
        /// Note. This value can be easily learned at calibration time provided camera can be programmatically tilted. However, at the moment it is not, and so
        /// this value has been discovered empirically and should be transferable across robots.
        /// </summary>
        [DataMember(Name = "FloorCutoffClearanceConstantShift")]
        private readonly double? floorCutoffClearanceConstantShift;

        /// <summary>
        /// Clearance has two ingredients: Constant shift and linear component as a function of depth in mm at the front of the robot (this).
        /// Those two parameters are used together to allow a wide range of tilt and roll angles (since different clearances are optimal at different angles)
        /// Note. This value can be easily learned at calibration time provided camera can be programmatically tilted. However, at the moment it is not, and so
        /// this value has been discovered empirically and should be transferable across robots.
        /// </summary>
        [DataMember(Name = "floorCutoffClearanceLinearFunctionOfDistance")]
        private readonly double? floorCutoffClearanceLinearFunctionOfDistance;

        /// <summary>
        /// Ceiling cutoff height in MM from the ground. The ceiling cutoff plane will be parallel to the learned floor plane regardless
        /// of tilt/roll of the robot.
        /// </summary>
        [DataMember(Name = "CeilingCutoffInMM")]
        private readonly double? ceilingCutoffInMM;

        /// <summary>
        /// Disable depth mirroring
        /// </summary>
        [DataMember(Name = "DisableDepthMirror")]
        private bool? disableDepthMirror;

        /// <summary>
        /// Disable floor filtering that removes pixels below the floor plane.
        /// By default filtering is enabled.
        /// </summary>
        [DataMember(Name = "DisableFloorFiltering")]
        private bool? disableFloorFiltering;

        /// <summary>
        /// Disable all processing
        /// </summary>
        [DataMember(Name = "DisableAll")]
        private bool? disableAll;

        /// <summary>
        /// Correction helper utility
        /// </summary>
        private IKinectCalibrationDataLoader calibrationDataLoader;

        /// <summary>
        /// Floor filter injected from outside or created new
        /// </summary>
        private IKinectFloorFilter floorFilter;
        
        /// <summary>
        /// Utility class for transforming messages
        /// </summary>
        private KinectMessageProcessorUtilities kinectMessageProcessorUtilities;

        /// <summary>
        /// Initializes a new instance of the <see cref="KinectAgentMessageProcessor"/> class. 
        /// </summary>
        /// <param name="name">Processor name</param>
        /// <param name="producer">Message producer - normally KinectAgent</param>
        /// <param name="calibrationDataFolder">Calibration data folder where all calibration files are</param>        
        /// <param name="disableDepthMirror">Flag to disable flipping the depth image</param>
        /// <param name="maxAllowedHorizonDisplacement">Defines how much tilt up we allow (beyond this - we wont extract floor)</param>
        /// <param name="minAllowedHorizonDisplacement">Defines how much tilt down we allow (beyond this - we wont extract floor)</param>
        /// <param name="maxSurfaceRoughness">Max allowed floor roughness</param>
        /// <param name="maxVerticalTravelPercentFromCalibrated">Max allowed vertical shift up in percent as compared to calibration data</param>
        /// <param name="minVerticalTravelPercentFromCalibrated">Max allowed vertical shift down in percent as compared to calibration data</param>
        /// <param name="floorCutoffPlaneSlopeAggressiveness">Floor plane slope aggressiveness factor</param>
        /// <param name="floorCutoffClearanceConstantShift">Floor plane clearance constant shift factor</param>
        /// <param name="floorCutoffClearanceLinearFunctionOfDistance">Floor plane clearance linear factor of depth at the bottom of the frame</param>
        /// <param name="ceilingCutoffInMM">Ceiling cutoff in MM from the ground</param>
        /// <param name="disableFloorFiltering">Disable floor filtering of the image</param>
        /// <param name="disableAll">Disable all process and pass through the raw kinect data</param>
        public KinectAgentMessageProcessor(
            string name, 
            IProducer<DepthCameraAgentMessage<KinectDepthCameraParameters>> producer,
            string calibrationDataFolder,
            bool disableDepthMirror,
            double maxAllowedHorizonDisplacement,
            double minAllowedHorizonDisplacement,
            double maxSurfaceRoughness,
            double maxVerticalTravelPercentFromCalibrated,
            double minVerticalTravelPercentFromCalibrated,
            double floorCutoffPlaneSlopeAggressiveness,
            double floorCutoffClearanceConstantShift,
            double floorCutoffClearanceLinearFunctionOfDistance,
            double ceilingCutoffInMM,
            bool disableFloorFiltering = false,
            bool disableAll = false)
            : base(name, producer.Name)
        {
            this.calibrationDataFolder = calibrationDataFolder;
            this.disableDepthMirror = disableDepthMirror;
            this.maxAllowedHorizonDisplacement = maxAllowedHorizonDisplacement;
            this.minAllowedHorizonDisplacement = minAllowedHorizonDisplacement;
            this.maxSurfaceRoughness = maxSurfaceRoughness;
            this.maxVerticalTravelPercentFromCalibrated = maxVerticalTravelPercentFromCalibrated;
            this.minVerticalTravelPercentFromCalibrated = minVerticalTravelPercentFromCalibrated;
            this.floorCutoffPlaneSlopeAggressiveness = floorCutoffPlaneSlopeAggressiveness;
            this.floorCutoffClearanceConstantShift = floorCutoffClearanceConstantShift;
            this.floorCutoffClearanceLinearFunctionOfDistance = floorCutoffClearanceLinearFunctionOfDistance;
            this.ceilingCutoffInMM = ceilingCutoffInMM;
            this.disableFloorFiltering = disableFloorFiltering;
            this.disableAll = disableAll;
        }

        /// <summary>
        /// Initializes a new instance of the <see cref="KinectAgentMessageProcessor"/> class. 
        /// </summary>
        /// <param name="correctionhelper">Correction helper utility</param>
        /// <param name="floorFilter">Floor filter utility</param>
        /// <param name="name">Processor name</param>
        /// <param name="calibrationDataFolder">Calibration data folder where all calibration files live</param>
        /// <param name="disableDepthMirror">Flag to disable flipping the depth image</param>
        /// <param name="producer">Message producer - normally KinectAgent</param>
        internal KinectAgentMessageProcessor(
                        IKinectCalibrationDataLoader correctionhelper,
                        IKinectFloorFilter floorFilter,
                        string name,
                        string calibrationDataFolder,
                        bool disableDepthMirror,
                        IProducer<DepthCameraAgentMessage<KinectDepthCameraParameters>> producer)
                        : base(name, producer.Name)
        {
            this.calibrationDataLoader = correctionhelper;
            this.floorFilter = floorFilter;
            this.calibrationDataFolder = calibrationDataFolder;
            this.disableDepthMirror = disableDepthMirror;
        }

        /// <summary>
        /// Called after construction, to allow the agent to finish initialization. 
        /// </summary>
        /// <param name="locator">A locator that can be used to find other agents.</param>
        public override void Initialize(AgentLocator locator)
        {
            base.Initialize(locator);

            if (null == this.disableAll)
            {
                this.disableAll = false;
            }

            if (null == this.calibrationDataLoader)
            {
                // no dependency injected - use default helper
                this.calibrationDataLoader = new KinectCalibrationDataLoader();
            }

            // Jan 2013 - By default turn off mirror imaging the depth 
            if (null == this.disableDepthMirror)
            {
                this.disableDepthMirror = true;
            }

            if (false == this.disableFloorFiltering.HasValue)
            {
                this.disableFloorFiltering = false;
            }

            if (default(string) == this.calibrationDataFolder)
            {
                Console.WriteLine("Calibration folder not specified. Consider doing that.");
            }
            
            this.InitializeFloorFiltering();

            this.kinectMessageProcessorUtilities = new KinectMessageProcessorUtilities(this.disableDepthMirror.Value, this.floorFilter, this.disableFloorFiltering.Value);

            this.InitializeKinectCalibration();            
        }

        /// <summary>
        /// Receive, process and republish a message
        /// </summary>
        /// <param name="message">Message to process</param>
        public override void Receive(DepthCameraAgentMessage<KinectDepthCameraParameters> message)
        {
            short[] pixelData;

            if (this.disableAll == true)
            {
                pixelData = message.ImageFrame.ImageData;
            }
            else
            {
                pixelData = this.kinectMessageProcessorUtilities.TransformDepthImage(
                                                                                        new System.Drawing.Size(message.ImageFrame.Width, message.ImageFrame.Height),
                                                                                        message.ImageFrame.ImageData,
                                                                                        message.Parameters.UnknownDepth,
                                                                                        message.Parameters.MaxDepth,
                                                                                        message.Parameters.TooFarDepth);
            }

            ImageFrameDepth newDepthFrame = new ImageFrameDepth(message.ImageFrame.Width, message.ImageFrame.Height, pixelData);
            DepthCameraAgentMessage<KinectDepthCameraParameters> processedImage = new DepthCameraAgentMessage<KinectDepthCameraParameters>(
                message.OriginatingTime,
                newDepthFrame,
                message.Parameters);

            this.Publisher.Post(processedImage);
        }

        /// <summary>
        /// Loads Kinect calibration file if specified and initializes processor with it
        /// </summary>
        private void InitializeKinectCalibration()
        {
            double[][] correctionData = null;

            if (default(string) != this.calibrationDataFolder)
            {
                string calibrationFile = Path.Combine(this.calibrationDataFolder, KinectCalibrationDataFileName);

                correctionData = this.calibrationDataLoader.ImportTable(calibrationFile);

                if (null != correctionData)
                {
                    this.kinectMessageProcessorUtilities.SetBucketCalibrationData(correctionData);

                    Console.WriteLine("Loaded Kinect calibration file: {0} ({1:d})", calibrationFile, File.GetLastWriteTime(calibrationFile));
                }
                else
                {
                    Console.WriteLine("Warning! Could not load: {0}. Depth camera will not be corrected.", calibrationFile);
                }
            }
        }

        /// <summary>
        /// Loads floor calibration file if specified and initializes floor filter
        /// </summary>
        private void InitializeFloorFiltering()
        {
            if (null == this.floorFilter)
            {
                if (default(string) != this.calibrationDataFolder &&
                    this.maxAllowedHorizonDisplacement.HasValue &&
                    this.minAllowedHorizonDisplacement.HasValue &&
                    this.maxSurfaceRoughness.HasValue &&
                    this.maxVerticalTravelPercentFromCalibrated.HasValue &&
                    this.minVerticalTravelPercentFromCalibrated.HasValue &&
                    this.floorCutoffPlaneSlopeAggressiveness.HasValue &&
                    this.floorCutoffClearanceConstantShift.HasValue &&
                    this.floorCutoffClearanceLinearFunctionOfDistance.HasValue &&
                    this.ceilingCutoffInMM.HasValue)
                {
                    string file = Path.Combine(this.calibrationDataFolder, FloorFilteringDataFilename);

                    double[][] floorData = this.calibrationDataLoader.ImportTable(file);

                    if (null != floorData)
                    {
                        IFlatSurfaceLearnDataPruner pruner = new Ev4FloorDataPruner(
                            this.maxAllowedHorizonDisplacement.Value,
                            this.minAllowedHorizonDisplacement.Value,
                            this.maxSurfaceRoughness.Value,
                            this.maxVerticalTravelPercentFromCalibrated.Value,
                            this.minVerticalTravelPercentFromCalibrated.Value);

                        this.floorFilter = new Ev4FloorFilter(
                            pruner,
                            this.floorCutoffClearanceConstantShift.Value,
                            this.floorCutoffClearanceLinearFunctionOfDistance.Value,
                            this.floorCutoffPlaneSlopeAggressiveness.Value,
                            this.ceilingCutoffInMM.Value);

                        this.floorFilter.SetCalibrationData(floorData);

                        Console.WriteLine("Loaded Floor calibration file: {0} ({1:d})", file, File.GetLastWriteTime(file));
                    }
                    else
                    {
                        Console.WriteLine("Warning! Cannot open calibration file: {0}. Floor extraction will be very aggressive.", file);
                    }
                }
            }

            if (null == this.floorFilter)
            {
                // If there was any trouble loading a floor calibration file, or file was not specified - 
                // we still need to apply floor filtering. We use a default floor filter that will apply 
                // a wide floor and ceiling cutoff margin
                this.floorFilter = new Ev4FloorFilter(null, 0, 0, 0, 2000);

                Console.WriteLine("No floor filtering file or filtering parameters specified, consider doing that");
            }            
        }
    }
}
