﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using SharpRBM.Core.LearningModules;
using SharpRBM.Core.Enumerations;

namespace SharpRBM.Core
{
    public class DeepBeliefNetworkTrainer
    {
        public DeepBeliefNetworkTrainer(DeepBeliefNetwork deepBeliefNetwork, InterLayerWeights trainingWeights, List<Layer> inputLayers) :
            this(deepBeliefNetwork, trainingWeights, inputLayers, null)
        {
        }

        public DeepBeliefNetworkTrainer(DeepBeliefNetwork deepBeliefNetwork, InterLayerWeights trainingWeights, List<Layer> inputLayers, ILearningModule learningModule)
        {
            DeepBeliefNetwork = deepBeliefNetwork;
            TrainingWeights = trainingWeights;
            _inputLayers = inputLayers;

            _trainingWeightsIndex = DeepBeliefNetwork.LayerWeights.IndexOf(TrainingWeights);
            if (_trainingWeightsIndex == -1)
            {
                throw new InvalidOperationException("trainingWeights not part of deepBeliefNetwork!");
            }

            PrepareTrainingInputLayers();
            if (learningModule != null)
            {
                LearingModule = learningModule;
            }
            else
            {
                //LearingModule = new CleanLearningModule();
                //LearingModule = new ArrayBasedLearningModule();
                LearingModule = new OpenCLLearningModule();
            }

            LearingModule.Prepare(trainingWeights, TrainingInputLayers);
        }

        public DeepBeliefNetwork DeepBeliefNetwork { get; private set; }
        public InterLayerWeights TrainingWeights { get; private set; }
        public int TraingWeightsIndex { get { return DeepBeliefNetwork.LayerWeights.IndexOf(TrainingWeights); } }
        public List<Layer> TrainingInputLayers { get; private set; }
        public ILearningModule LearingModule { get; set; }

        private List<Layer> _inputLayers;
        private int _trainingWeightsIndex;

        public TrainingError Train()
        {
            return LearingModule.Train();
        }

        public Layer ActivateDownToInput(Layer upper)
        {
            return ActivateDownToInput(upper, _trainingWeightsIndex);
        }

        public Layer Reconstruct( Layer layer, bool isDream, int layerLimit = Int32.MaxValue )
        {
            Layer tempLayer = layer.Clone();
            return ReconstructionHelper( layer, 0, isDream, layerLimit );
        }

        private Layer ActivateDownToInput(Layer upper, int layerIndex)
        {
            InterLayerWeights interLayerWeights = DeepBeliefNetwork.LayerWeights[layerIndex];
            Layer output = new Layer(interLayerWeights.LowerLayerSize);
            interLayerWeights.ActivateUpperToLower(output, upper);

            if (layerIndex == 0)
            {
                return output;
            }
            else
            {
                return ActivateDownToInput(output, layerIndex - 1);
            }
        }

        private Layer ReconstructionHelper( Layer layer, int layerIndex, bool isDream, int layerLimit )
        {
            InterLayerWeights interLayerWeights = DeepBeliefNetwork.LayerWeights[layerIndex];
            layer.Values = layer.Values.Select(x => (float)Math.Round(x)).ToArray();
            Layer output = new Layer(interLayerWeights.UpperLayerSize);
            interLayerWeights.ActivateLowerToUpper( layer, output );

            if( isDream )
                InsertNoise( output );

            if( layerIndex < DeepBeliefNetwork.LayerWeights.Count - 1 && layerIndex < layerLimit )
                    output = ReconstructionHelper( output, layerIndex + 1, isDream, layerLimit );

            Layer reconstruction = new Layer( interLayerWeights.LowerLayerSize );
            interLayerWeights.ActivateUpperToLower( reconstruction, output );

            return reconstruction;
        }

        private static void InsertNoise( Layer output )
        {
            InsertNoise( 
                output, 
                RBMCommon.Random.Next( Variables.DreamNoiseNumeratorLow, Variables.DreamNoiseNumeratorHigh ), 
                RBMCommon.Random.Next( Variables.DreamNoiseDenominatorLow, Variables.DreamNoiseDenominatorHigh), 
                RBMCommon.Random.NextDouble() < Variables.DreamNoiseBlastProbability
            );
        }

        private static void InsertNoise( Layer output, int numerator, int denominator, bool outOfBounds )
        {
            // Using out of bounds to blast out of local minima
            var downbit = outOfBounds ? -1.0f : 0.0f;

            // Add some portion of noise
            Helper.Loop( output.Values.Length * numerator / denominator, () => {
                int index = Helper.Random.Next( output.Values.Length );
                output.Values[index] = Helper.Random.NextDouble() > 0.5d ? 1.0f : downbit;
            } );
        }
        private void PrepareTrainingInputLayers()
        {
            List<Layer> currentInputLayers = _inputLayers.ToList();

            for (int i = 0; i < _trainingWeightsIndex; i++)
            {
                InterLayerWeights weights = DeepBeliefNetwork.LayerWeights[i];
                List<Layer> generatedOutputLayers = new List<Layer>();
                foreach (Layer input in currentInputLayers)
                {
                    Layer output = new Layer(weights.UpperLayerSize);
                    weights.ActivateLowerToUpper(input, output/*, ActivationMethod.Binary*/);
                    generatedOutputLayers.Add(output);
                }

                currentInputLayers = generatedOutputLayers;
            }

            TrainingInputLayers = currentInputLayers;
        }
    }
}
