﻿using log4net;
using OpenCL.Net;
using SharpML.Api.Abstraction.Plugin;
using SharpML.Api.Extensions;
using SharpML.Api.Implementation;
using SharpML.Api.Types;
using SharpML.Api.Enumerations;
using System;
using System.Collections.Generic;
using System.ComponentModel;
using System.Configuration;
using System.Linq;
using System.Text;
using SharpML.Engines.DeepBelief.Controls;

namespace SharpML.Engines.DeepBelief.Implementation
{
    [Description("Deep Belief Network")]
    public class DeepBeliefNetwork : MLEngine
    {
        /// <summary>
        /// Simple logging pattern
        /// </summary>
        private static readonly ILog Log = LogManager.GetLogger( System.Reflection.MethodBase.GetCurrentMethod().DeclaringType );

        public List<int> LayerSizes { get; private set; }
        public List<InterLayerWeights> LayerWeights { get; private set; }

        public InterLayerWeights TrainingWeights { get; private set; }
        public List<Layer> TrainingInputLayers { get; private set; }
        public List<Layer> InputLayers;

        RbmProgram Program;


        public void AddNewLayer(int nodeCount)
        {
            InterLayerWeights last = LayerWeights.Last();
            LayerSizes.Add(nodeCount);

            InterLayerWeights weights = new InterLayerWeights(last.UpperLayerSize, nodeCount);
            LayerWeights.Add(weights);
        }

        private void GenerateLayerWeights()
        {
            LayerWeights = new List<InterLayerWeights>();

            for( int layerIndex = 0; layerIndex < LayerSizes.Count - 1; layerIndex++ )
            {
                InterLayerWeights weights = new InterLayerWeights(LayerSizes[layerIndex], LayerSizes[layerIndex + 1]);
                LayerWeights.Add(weights);
            };            
        }


        public Layer ActivateDownToInput(Layer upper)
        {
            return ActivateDownToInput( upper, LayerWeights.IndexOf( TrainingWeights ) );
        }

        public Layer Reconstruct( Layer layer, bool isDream, int layerLimit = Int32.MaxValue )
        {
            Layer tempLayer = layer.Clone();
            return ReconstructionHelper( layer, 0, isDream, layerLimit );
        }

        private Layer ActivateDownToInput(Layer upper, int layerIndex)
        {
            InterLayerWeights interLayerWeights = LayerWeights[layerIndex];
            Layer output = new Layer(interLayerWeights.LowerLayerSize);
            interLayerWeights.ActivateUpperToLower(output, upper);

            if (layerIndex == 0)
            {
                return output;
            }
            else
            {
                return ActivateDownToInput(output, layerIndex - 1);
            }
        }

        private Layer ReconstructionHelper( Layer layer, int layerIndex, bool isDream, int layerLimit )
        {
            InterLayerWeights interLayerWeights = LayerWeights[layerIndex];
            layer.Values = layer.Values.Select(x => (float)Math.Round(x)).ToArray();
            Layer output = new Layer(interLayerWeights.UpperLayerSize);
            interLayerWeights.ActivateLowerToUpper( layer, output );

            if( isDream )
                InsertNoise( output );

            if( layerIndex < LayerWeights.Count - 1 && layerIndex < layerLimit )
                    output = ReconstructionHelper( output, layerIndex + 1, isDream, layerLimit );

            Layer reconstruction = new Layer( interLayerWeights.LowerLayerSize );
            interLayerWeights.ActivateUpperToLower( reconstruction, output );

            return reconstruction;
        }

        private static void InsertNoise( Layer output )
        {
            InsertNoise( 
                output, 
                Randomized.Default.Next( Variables.DreamNoiseNumeratorLow, Variables.DreamNoiseNumeratorHigh ), 
                Randomized.Default.Next( Variables.DreamNoiseDenominatorLow, Variables.DreamNoiseDenominatorHigh), 
                Randomized.Default.NextDouble() < Variables.DreamNoiseBlastProbability
            );
        }

        private static void InsertNoise( Layer output, int numerator, int denominator, bool outOfBounds )
        {
            // Using out of bounds to blast out of local minima
            var downbit = outOfBounds ? -1.0f : 0.0f;

            // Add some portion of noise
            for( int unused = 0; unused < output.Values.Length * numerator / denominator; unused++ )
            {
                int index = Randomized.Default.Next( output.Values.Length );
                output.Values[index] = Randomized.Default.NextDouble() > 0.5d ? 1.0f : downbit;
            }
        }
        private void PrepareTrainingInputLayers()
        {
            List<Layer> currentInputLayers = InputLayers.ToList();

            for (int i = 0; i < LayerWeights.IndexOf(TrainingWeights); i++)
            {
                InterLayerWeights weights = LayerWeights[i];
                List<Layer> generatedOutputLayers = new List<Layer>();
                foreach (Layer input in currentInputLayers)
                {
                    Layer output = new Layer(weights.UpperLayerSize);
                    weights.ActivateLowerToUpper(input, output/*, ActivationMethod.Binary*/);
                    generatedOutputLayers.Add(output);
                }

                currentInputLayers = generatedOutputLayers;
            }

            TrainingInputLayers = currentInputLayers;
        }

        //public void ActivateLowerToUpper(Layer lower, Layer upper, ActivationMethods method)
        //{
        //    //_weights.ActivateLowerToUpper(lower, upper, method);
        //    //return;
        //    IMem testCaseIMem = _context.AllocateFloatArray(lower.Values, MemFlags.CopyHostPtr | MemFlags.ReadOnly);
        //    try
        //    {
        //        testCaseIMem.WriteFloatArray(_commandQueue, lower.Values, BlockModes.Blocking);

        //        ( method == ActivationMethods.Binary ? _activateLowerToUpperBinary : _activateLowerToUpper )              
        //            .SetKernelArgFloat(0, testCaseIMem)
        //            .SetKernelArgFloat(1, _reconstructedModelIMem)
        //            .SetKernelArgFloat(2, _weightsIMem)
        //            .SetKernelArg( 3, Helper.Random.Next() )
        //            .EnqueueNDRangeKernel( _commandQueue, _weights.UpperLayerSize );
        //        _reconstructedModelIMem.ReadFloatArray(_commandQueue, upper.Values, BlockModes.Blocking);
        //    }
        //    finally
        //    {
        //        Cl.ReleaseMemObject(testCaseIMem);
        //    }
        //}

        //public void ActivateUpperToLower(Layer lower, Layer upper)
        //{
        //    //_weights.ActivateUpperToLower(lower, upper);
        //    //return;
        //    _modelIMem.WriteFloatArray(_commandQueue, upper.Values, BlockModes.Blocking);

        //    _activateUpperToLower
        //        .SetKernelArgFloat(0, _reconstructedIMem)
        //        .SetKernelArgFloat(1, _modelIMem)
        //        .SetKernelArgFloat(2, _weightsIMem)
        //        .EnqueueNDRangeKernel(_commandQueue, _weights.LowerLayerSize);

        //    _reconstructedIMem.ReadFloatArray(_commandQueue, lower.Values, BlockModes.Blocking);
        //}


        #region - IMLEngine Implementation -
		 

            public override void Initialize( ComputationEngine engine, IPatternDataAdaptor dataAdaptor, string shape ) // ComputationEngine engine
            {
                if( dataAdaptor == null || dataAdaptor.PatternData.TrainingData.Any() )
                {
                    Log.Info( "Deep Belief Network is waiting for data." );
                    return;
                }
                else
                {
                    LayerSizes = String.Format( shape, dataAdaptor.InputSize, dataAdaptor.OutputSize ).Split( new char[] { ',' } ).Select( value => Int32.Parse( value ) ).ToList();
                    GenerateLayerWeights();

                    // TODO: Hookup engine
                        Program = new RbmProgram( TrainingWeights, InputLayers );

                        Program.Compile( Engine );

                    //TrainingWeights = trainingWeights;
                    //InputLayers = inputLayers;
                    PrepareTrainingInputLayers();     

                    Log.Info( "Deep Belief Network is ready." );

                    base.Initialize( Engine, dataAdaptor, shape );
                }

            }

            public override void Run()
            {
                throw new NotImplementedException();
            }

            public override void Step()
            {
                throw new NotImplementedException();
            }

            public override void Pause()
            {
                throw new NotImplementedException();
            }

            public override void Resume()
            {
                throw new NotImplementedException();
            }

            public override void Quit()
            {
                throw new NotImplementedException();
            }

            public override Api.Interfaces.IPluginConfiguration Gui
            {
                get { return new Configuration(); }
            }

            public override XmlSerializableDictionary<string,object> Settings
            {
                get
                {
                    return DeepBelief.Properties.Settings.Default.Serialize();
                }
                set
                {
                    DeepBelief.Properties.Settings.Default.Deserialize( value );
                }
            }

	    #endregion



    }
}
