﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using SharpRBM.Core.Enumerations;

namespace SharpRBM.Core.LearningModules
{
    public class CleanLearningModule : ILearningModule
    {
        private List<Layer> _inputs;
        private InterLayerWeights _weights;
        
        public void ActivateLowerToUpper(Layer lower, Layer upper, ActivationMethod method)
        {
            _weights.ActivateLowerToUpper(lower, upper, method);
        }

        public void ActivateUpperToLower(Layer lower, Layer upper)
        {
            _weights.ActivateUpperToLower(lower, upper);
        }
        
        public TrainingError Train()
        {
            Layer model = new Layer(_weights.UpperLayerSize);
            Layer reconstructedModel = new Layer(_weights.UpperLayerSize);
            Layer reconstruction = new Layer(_weights.LowerLayerSize);
            Layer binaryInput = new Layer(_weights.LowerLayerSize);
            float featureDetectorError = 0;
            float reconstructionError = 0;

            float[,] error = new float[_weights.UpperLayerSize, _weights.LowerLayerSize];
            foreach (Layer input in _inputs)
            {
                _weights.ActivateLowerToUpper(input, model, ActivationMethod.Binary);
                _weights.ActivateUpperToLower(reconstruction, model);
                _weights.ActivateLowerToUpper(reconstruction, reconstructedModel);

                Helper.Loop(input.Size, i => reconstructionError += (float)Math.Abs(input.Values[i] - reconstruction.Values[i]));

                Helper.Loop(_weights.UpperLayerSize, upper =>
                {
                    Helper.Loop(_weights.LowerLayerSize, lower =>
                    {
                        error[upper, lower] +=
                            // What the model should believe in
                            input.Values[lower] * model.Values[upper]
                            // What the model actually believes in
                            - reconstruction.Values[lower] * reconstructedModel.Values[upper];
                    });
                });
            }

            float epsilon = _weights.GetAdjustedAndScaledTrainingRate(_inputs.Count);
            
            _weights.SetEachWeight(
                (lower, upper, weight) =>
                {
                    float DPdiff = error[upper, lower];
                    featureDetectorError += Math.Abs(DPdiff);
                    return weight + DPdiff * epsilon;
                });

            TrainingError traningError = new TrainingError(featureDetectorError, reconstructionError);
            _weights.RegisterLastTrainingError(traningError);
            return traningError;
        }            

        public void  Prepare(InterLayerWeights weights, List<Layer> inputs)
        {
            _inputs = inputs;
            _weights = weights;
        }

        public void Dispose()
        {
            _inputs = null;
        }
    }
}
