﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using SharpML.Api.Enumerations;
using SharpML.Api.Implementation;

namespace SharpML.Api.Types
{
    [Serializable]
    public class InterLayerWeights
    {
        private float _lastError = -1;
        public const float InitialWeightMagnitude = 0.1F;
        public const float InitialLearningRate = 0.3F;
        public const int TrainingBatchSize = 100;

        internal InterLayerWeights()
        {
        }

        public InterLayerWeights(Layer lowerLayer, Layer upperLayer) : this(lowerLayer.Size, upperLayer.Size)
        {
        }

        public InterLayerWeights(int lowerLayerSize, int upperLayerSize)
        {
            AdjustedLearningRate = InitialLearningRate;
            BaseLearningRate = InitialLearningRate;
            AutoAdjustLearningRate = true;
            LowerLayerSize = lowerLayerSize;
            UpperLayerSize = upperLayerSize;

            FillWeights();
            CreateRandomWeights();
        }

        public int LowerLayerSize { get; private set; }
        public int UpperLayerSize { get; private set; }
        public float BaseLearningRate { get; private set; }
        public float AdjustedLearningRate { get; set; }
        public bool AutoAdjustLearningRate { get; set; }
        public int PerformedTrainingSteps { get; private set; }
        public TrainingError LastTrainingError { get; private set; }

        // For each node in the lower layer, there's a list of weights for the upper layer
        ///public float[/*lower*/,/*upper*/] Weights { get; set; }
        // For compatibility with 
        public float[/*(lower * UpperLayerSize) + upper*/] Weights { get; set; }

        public void ForEachWeight(Action<int, int, float> action)
        {
            for (int lower = 0; lower < LowerLayerSize; lower++)
            {
                for (int upper = 0; upper < UpperLayerSize; upper++)
                {
                    action(lower, upper, Weights[(lower * UpperLayerSize) + upper]);
                }
            }
        }

        public void SetEachWeight(Func<int, int, float, float> func)
        {
            ForEachWeight((lower, upper, weight) => Weights[(lower * UpperLayerSize) + upper] = func(lower, upper, weight));
        }

        public void SetEachWeight(Func<float, float> func)
        {
            SetEachWeight((lower, upper, initialWeight) => Weights[(lower * UpperLayerSize) + upper] = func(initialWeight));
        }

        public void SetEachWeight(Func<float> func)
        {
            SetEachWeight((lower, upper, initialWeight) => Weights[(lower * UpperLayerSize) + upper] = func());
        }

        public void ActivateLowerToUpper(Layer lowerLayer, Layer upperLayer)
        {
            ActivateLowerToUpper(lowerLayer, upperLayer, ActivationMethods.Linear);
        }

        public void ActivateLowerToUpper(Layer lowerLayer, Layer upperLayer, ActivationMethods activationMethod)
        {
            upperLayer.ClearValues();
            if (activationMethod == ActivationMethods.Binary)
            {
                ForEachWeight((lower, upper, weight) => upperLayer.Values[upper] += Randomized.Default.NextDouble() <= lowerLayer.Values[lower] ? weight : 0);
            }
            else
            {
                ForEachWeight((lower, upper, weight) => upperLayer.Values[upper] += lowerLayer.Values[lower] * weight);
            }

            for (int upper = 0; upper < upperLayer.Size; upper++)
            {
                upperLayer.Values[upper] = 1.0F / (1.0F + (float)Math.Exp(-upperLayer.Values[upper]));
            }
            upperLayer.SetBias();
        }

        public void ActivateUpperToLower(Layer lowerLayer, Layer upperLayer)
        {
            lowerLayer.ClearValues();
            ForEachWeight((lower, upper, weight) => lowerLayer.Values[lower] += upperLayer.Values[upper] * weight);

            for (int lower = 0; lower < lowerLayer.Size; lower++)
            {
                lowerLayer.Values[lower] = 1.0F / (1.0F + (float)Math.Exp(-lowerLayer.Values[lower]));
            }
            lowerLayer.SetBias();
        }

        public void CreateRandomWeights()
        {
            float weightMagnitude = InitialWeightMagnitude;
            SetEachWeight(() => (float)(Randomized.Default.NextDouble() * 2.0 - 1.0) * weightMagnitude);
            LastTrainingError = null;
            PerformedTrainingSteps = 0;
        }

        public void SetWeightsToZero()
        {
            SetEachWeight(() => 0);
        }

        public void FillWeights()
        {
            Weights = new float[LowerLayerSize * UpperLayerSize];
        }

        public void LoadWeightsFromEnumerable(IEnumerable<float> enumerable)
        {
            bool lastMoveFailed = false;
            IEnumerator<float> enumerator = enumerable.GetEnumerator();
            SetEachWeight(
                (lower, upper, weight) =>
                {
                    float val = enumerator.Current;
                    lastMoveFailed = enumerator.MoveNext();
                    return val;
                });

            if (lastMoveFailed == false)
            {
                throw new InvalidOperationException();
            }
        }

        public float GetAdjustedAndScaledTrainingRate(int testCount)
        {
            PerformedTrainingSteps++;
            float divisor = (testCount * (float)Math.Log(Weights.Length) / 11F);
            return AdjustedLearningRate / divisor;
        }

        public TrainingError RegisterLastTrainingError(TrainingError trainingError)
        {
            AdjustLearningRate(trainingError);
            LastTrainingError = trainingError;
            return trainingError;
        }

        private void AdjustLearningRate(TrainingError trainingError)
        {
            float newError = trainingError.FeatureDetectorError;
            if (AutoAdjustLearningRate && _lastError > 0)
            {
                float errorChange = 100f * (newError - _lastError) / _lastError;

                // Error increased more than 10%, time to slow down!
                if (errorChange > 10)
                {
                    AdjustedLearningRate -= AdjustedLearningRate * 0.01F;
                }

                // Error decreased more than 10%, time to speed up!
                if (errorChange < -10)
                {
                    AdjustedLearningRate += AdjustedLearningRate * 0.01F;
                }

                AdjustedLearningRate = Math.Max(BaseLearningRate * 0.5F, AdjustedLearningRate);
                AdjustedLearningRate = Math.Min(BaseLearningRate * 2F, AdjustedLearningRate);

                _lastError = newError;
            }

            _lastError = newError;
        }

        private void AdjustWeightsFromErrors(float weightAdjustor, InterLayerWeights errors)
        {
            SetEachWeight(
                (lower, upper, initialWeight) =>
                    initialWeight + errors.Weights[(lower * UpperLayerSize) + upper] * weightAdjustor);
        }

        private void AdjustWeightsFromErrors(float weightAdjustor, List<float> errors)
        {
            SetEachWeight(
                (lower, upper, initialWeight) =>
                    initialWeight + errors[upper] * weightAdjustor);
        }

        private void AccumulateErrors(Layer wanted, Layer actual)
        {
            SetEachWeight(
                (lower, upper, errorSum) =>
                    errorSum + (wanted.Values[lower] - actual.Values[lower]));
        }
    }
}
