﻿using System;
using System.Collections.Generic;
using innovations.util.exts.mathdotnet;
using innovations.util.exts.msft;
using MathNet.Numerics.LinearAlgebra.Double;
using MathNet.Numerics.LinearAlgebra.Generic;
using innovations.util;
using innovations.util.collections;
using innovations.ml.core.collections;


namespace innovations.ml.core.models
{
    public class NeuralNetworkModel : Model, IModel
    {
        public NeuralNetworkModel(Matrix<double> x, Vector<double> y, bool includesOnesColumn, int numberOfLabels, double lambda, int layers, List<Matrix<double>> multiLevelMultiClassTheta)
            : base(x, y, includesOnesColumn, numberOfLabels, lambda)
        {
            Layers = layers;
            SetBinaryY();
            MultiLevelMultiClassTheta = multiLevelMultiClassTheta;
            // TODO: Re-think the GradientLinkedList design.  Having a singleton seems cumbersome.
            GradientLinkedList = GradientLinkedList<Matrix<double>>.GetInstance(Layers);
            GradientLinkedList.Clear();
            GradientLinkedList = GradientLinkedList<Matrix<double>>.GetInstance(Layers);

            // TODO: It is assummed that Matrices in MultiLevelMultiClassTheta are in order of layer used.  Code should be built to force this to happen insteda of leaving it the client code to do right.
            GradientLinkedList = GradientLinkedList<Matrix<double>>.GetInstance(Layers - 1);
            int counter = 0;
                        foreach (Matrix<double> item in MultiLevelMultiClassTheta)
                            GradientLinkedList.AddLast(item.CreateMatrix(item.RowCount, item.ColumnCount), counter++.ToString());

        }
        
        public virtual void ComputeCost(bool includeGradient = false)
        {
            for (int a = 0; a < X.RowCount; a++)
            {
                ExecuteFeedForwardStep(a);
                if (includeGradient)
                    ComputeGradient(a);
                else
                    ActivationValues.Clear();
            }
            double regularization = (this.Lambda / (2.0 * Convert.ToDouble(X.RowCount)))
                * ((MultiLevelMultiClassTheta[0].SubMatrix(0, MultiLevelMultiClassTheta[0].RowCount, 1, MultiLevelMultiClassTheta[0].ColumnCount - 1).PointwisePower(2).Sum())
                + (MultiLevelMultiClassTheta[1].SubMatrix(0, MultiLevelMultiClassTheta[1].RowCount, 1, MultiLevelMultiClassTheta[1].ColumnCount - 1).PointwisePower(2).Sum()));
            J += regularization;

            if (includeGradient)
            {
                for (int a = Layers - 2; a >= 0; a--)
                {
                    MultiLevelMultiClassTheta[a].ReplaceColumn(new DenseVector(MultiLevelMultiClassTheta[a].RowCount, 0), 0);
                    var gradientRegularization = (Lambda / Convert.ToDouble(X.RowCount)).PointwiseMultiplication(MultiLevelMultiClassTheta[a]);
                    //(1 / Convert.ToDouble(X.RowCount)) * GradientLinkedList[1].Value + gradientRegularization
                    GradientLinkedList.AddFirst((1 / Convert.ToDouble(X.RowCount)) * GradientLinkedList.RemoveLast().Value + gradientRegularization, a.ToString());
                }
            }
        }

        public virtual void ComputeCost(List<Matrix<double>> theta, bool includeGradient = false)
        {
        MultiLevelMultiClassTheta = theta;    
        }

        private void ExecuteFeedForwardStep(int a)
        {
            // Input Layer
            ActivationValues = ActivationValuesStack<Vector<double>>.GetInstance(Layers);
            ActivationValues.Push(X.Row(a));  // Activation values for input layer is the training set            
            ZValues = MultiLevelMultiClassTheta[0] * ActivationValues.Peek(); // Same as "Theta Transpose * X"
            // Hidden Layer
            for (int b = 1; b < Layers - 1; b++)
            {
                ActivationValues.Push(Sigmoid.Compute(ZValues)); // Takes the Z-values from the previous layer, calculates sigmoid, and the results are the used to activate layer
                ActivationValues.Push(ActivationValues.Pop().Insert(0, 1.0)); // Bias unit is added to layer
                ZValues = MultiLevelMultiClassTheta[b] * ActivationValues.Peek(); // Similar as "Theta Transpose * X" but uses activation units instead of X values
            }
            // Output Layer
            ActivationValues.Push(Sigmoid.Compute(ZValues)); // Takes the Z-values from the previous layer, calculates sigmoid, and the results are the used to activate layer
            Vector<double> hypothesis = ActivationValues.Peek();  // In the output layer, the activation layer is called the hypothesis

            // Calculates difference between Y-Values that were predicted to be zero and the actual values output by the hypothesis
            Vector<double> error1 = -BinaryValueY.Transpose().Column(a).PointwiseMultiply((MoreMath.Log(hypothesis)));            

            int r = 1;
            // Calculates difference between Y-Values that were predicted to be one and the actual values output by the hypothesis
            Vector<double> error2 = r.PointwiseSubtraction(BinaryValueY).Transpose().Column(a).PointwiseMultiply(MoreMath.Log(r.PointwiseSubtraction(hypothesis)));            
            J = J + ((1 / (Convert.ToDouble(X.RowCount))) * (error1 - error2).Sum()); // computes the cost
        }

        private void ExecuteBackPropagationStep(int a)
        {
            Queue<Matrix<double>> deltaQueue = new Queue<Matrix<double>>(Layers - 1);
            // Output Layer
            deltaQueue.Enqueue(ActivationValues.Pop().Subtract(BinaryValueY.Row(a)).ToColumnMatrix()); // the difference between the value of the hypothesis and the actual output Y (vectorized)
            //System.Diagnostics.Debug.WriteLine(deltaQueue.Peek().ToTable());
            int r = 1;
            int counter = 1;
            // Hidden Layer
            for (int b = Layers - 2; b >= 1; b--)
            {
                //  the difference between the activation value that should have been output by this layer to eventually get the hypothesis to equal Y and the actual activation value that was output
                deltaQueue.Enqueue((MultiLevelMultiClassTheta[counter].Transpose().Multiply(deltaQueue.Peek()).PointwiseMultiply(ActivationValues.Peek().PointwiseMultiply(r.PointwiseSubtraction(ActivationValues.Peek())).ToColumnMatrix())).Transpose().RemoveColumn(0));
                //System.Diagnostics.Debug.WriteLine((MultiLevelMultiClassTheta[counter].Transpose().Multiply(deltaQueue.Peek()).PointwiseMultiply(ActivationValues.Peek().PointwiseMultiply(r.PointwiseSubtraction(ActivationValues.Peek())).ToColumnMatrix())).Transpose().RemoveColumn(0));
                //deltaQueue.Peek().Multiply(ActivationValues.Peek().ToRowMatrix())
                //System.Diagnostics.Debug.WriteLine(GradientLinkedList.LastNode.Value.Add(deltaQueue.Peek().Multiply(ActivationValues.Peek().ToRowMatrix())).ToTable());
                GradientLinkedList.AddLast(GradientLinkedList.RemoveLast().Value.PointwiseAddition((deltaQueue.Dequeue().Multiply(ActivationValues.Pop().ToRowMatrix()))), b.ToString());
                System.Diagnostics.Debug.WriteLine(GradientLinkedList.LastNode.Value.ToTable());
                counter++;
            }
            // Input Layer
            GradientLinkedList.AddFirst(GradientLinkedList.RemoveFirst().Value.PointwiseAddition(deltaQueue.Peek().ToVector().ToColumnMatrix().Multiply(ActivationValues.Pop().ToRowMatrix())), "0");
            System.Diagnostics.Debug.WriteLine(GradientLinkedList.FirstNode.Value.ToTable());
            deltaQueue.Clear();
        }

        protected virtual void ComputeGradient(int b)
        {
            ExecuteBackPropagationStep(b);
        }

        /// <summary>
        /// Used by ALGLIB.  All parameters are ignored.
        /// </summary>
        /// <param name="theta">ignored</param>
        /// <param name="j">ignored</param>
        /// <param name="gradient">ignored</param>
        /// <param name="obj">ignored</param>
        public virtual void ComputeCostAndGradient(double[] theta, ref double j, double[] gradient, object obj)
        {
            Theta = new DenseVector(theta);
            ComputeCost(true);
            j = J;
            // dm.Gradient = gradient doesn't work with L_BFGS solver in ALGLIB.
            // Therefore, we have to iterate through eack gradient element 
            // and assign to dm.Gradient.
            for (int i = 0; i < Gradient.Count; i++)
                gradient[i] = Gradient[i];
        }

        public void SetBinaryY()
        {
            BinaryValueY = new DenseMatrix(X.RowCount, NumberOfLabels, 0.0);
            for (int i = 0; i < BinaryValueY.RowCount; i++)
                BinaryValueY[i, Convert.ToInt32(Y[i])] = 1;
        }

        public List<Matrix<double>> ComputeNumericalGradient() 
        {
            List<Matrix<double>> originalMultiLevelMultiClassTheta = new List<Matrix<double>>(MultiLevelMultiClassTheta);
            double epsilon = .0001;
            List<Matrix<double>> numericalGradient = new List<Matrix<double>>(); ;
            List<Matrix<double>> perturbation = new List<Matrix<double>>();

            for (int i = 0; i < MultiLevelMultiClassTheta.Count; i++)
            {
                Matrix<double> matrix = MultiLevelMultiClassTheta[i];
                numericalGradient.Add(new DenseMatrix(matrix.RowCount, matrix.ColumnCount, 0.0));
                perturbation.Add(new DenseMatrix(matrix.RowCount, matrix.ColumnCount, 0.0));

                for (int j = 0; j < matrix.ColumnCount; j++)
                {
                    for (int k = 0; k < matrix.RowCount; k++)
                    {
                        J = 0;
                        perturbation[i][k, j] = epsilon;
                        MultiLevelMultiClassTheta[i] = MultiLevelMultiClassTheta[i].PointwiseSubtraction(perturbation[i]);
                        this.ComputeCost();
                        var loss1 = J;
                        MultiLevelMultiClassTheta = new List<Matrix<double>>(originalMultiLevelMultiClassTheta);
                        MultiLevelMultiClassTheta[i] = MultiLevelMultiClassTheta[i].PointwiseAddition(perturbation[i]);
                        J = 0;
                        this.ComputeCost();
                        var loss2 = J;
                        numericalGradient[i][k, j] = (loss2 - loss1) / (2 * epsilon);
                        perturbation[i][k, j] = 0;
                        MultiLevelMultiClassTheta = new List<Matrix<double>>(originalMultiLevelMultiClassTheta);
                    }
                }
            }
            return numericalGradient;
        }

        public static Matrix<double> DebugInitializeWeights(int fanOut, int fanIn)
        {
            Matrix<double> matrix = new DenseMatrix(fanOut, 1 + fanIn);
            int counter = 1;
            for (int j = 0; j < matrix.ColumnCount; j++)
                for (int i = 0; i < matrix.RowCount; i++)
                    matrix[i, j] = Math.Sin(counter++) / 10;
            return matrix;
        }


        public int Layers { get; set; }
        public Matrix<double> BinaryValueY { get; set; }
        public ActivationValuesStack<Vector<double>> ActivationValues { get; set; }
        public Vector<double> ZValues { get; set; }
        public GradientLinkedList<Matrix<double>> GradientLinkedList { get; set; }        
    }
}