﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.Xml.Serialization;

namespace Beetle.FandorinsBugBrain
{
    public class Neuron
    {
        public Neuron()
            : this(34)
        {
        }
        public double[] inputs;

        public double[] Weights;

      // public double offset;

        public double Compute(double x)
        {
            LastState = (2 / (1 + Math.Exp(-Constants.alfa * x))) - 1;//sigmoid  
            return LastState;   
        }

        public double ComputeFirstDerivative(double x)
        {
            return (2 * Constants.alfa * Math.Exp(-Constants.alfa * x)) / Math.Pow((1 + Math.Exp(-Constants.alfa * x)), 2);
        }

        public double Summator()
        {
            double sum = 0;
                for (int i = 0; i < inputs.Length; i++)
                    sum += inputs[i] * Weights[i];
            LastNET=sum;
                return sum;
        }

        public double LastNET  {get; set;}

        public double LastState { get; set; }//out

        public double dEdz { get; set; }

        public void Reset()
        {
            for (int i = 0; i < inputs.Length; i++) inputs[i] = 0;
        }

        public Neuron(int count)
        {
            inputs = new double[count];
            Weights = new double[count];
            for (int i = 0; i < count; i++)
            {
                inputs[i] = 0;
                Weights[i] = Constants.rand.Next(-Constants.defNeuroWeight,Constants.defNeuroWeight) / 1000d;
            }

        }

        public Neuron(Neuron neuron)
        {
            this.Weights = neuron.Weights;
        }
       
    }

    public class Layer
    {
        public Layer()
            : this(34, 34)
        {

        }
         public Neuron[] Neurons;
         public double[] LastOut;

         public int NeuronsLength { get { return Neurons.Length; } }
        public int InputCount
        {
            get
            {
                return Neurons[0].inputs.Length;
            }
        }

        public double[] Compute(double[] inputs)
        {

            double[] Output = new double[Neurons.Length];

            for (int i = 0; i < Neurons.Length; i++)
            {
                Neurons[i].inputs = inputs;
                Output[i] = Neurons[i].Compute(Neurons[i].Summator());
            }
            LastOut = Output;
            return Output;
        }

        public Layer(int neuronsCount, int inputCount)
        {
            Neurons = new Neuron[neuronsCount];
            for (int i = 0; i < neuronsCount; i++)
            {
                Neurons[i] = new Neuron(inputCount);
            }
        }

        public Layer(int neuronsCount)
        {
            Neurons = new Neuron[neuronsCount];
        }

    }

    public partial class Net
    {
        public Layer[] Layers;
       
        public int InputCount
        {
            get
            {
                return Layers[0].InputCount;
            }
        }

        public int OutputCount
        {
            get
            {
                return Layers.Last().NeuronsLength;
            }
        }

        public double[] ComputeOutput(double[] inputs)
        {

            double[] NextLayerIn = inputs;
            for (int i = 0; i < Layers.Length; i++)
            {
                NextLayerIn = Layers[i].Compute(NextLayerIn);
            }
            return NextLayerIn;
        }


        public Net()
        {
            Layers = new Layer[3];

            Layers[0] = new Layer(34, 34);
            Layers[1] = new Layer(34, 34);
            Layers[2] = new Layer(18, 34);
        }
        public Net(int layersCount)
        {
            Layers = new Layer[layersCount];
        }
         public Net(string fileName)
        {
            XmlSerializer reader = new XmlSerializer(typeof(Net));
            System.IO.StreamReader file = new System.IO.StreamReader(
                @"c:\logs\"+fileName);
            var temp = (Net)reader.Deserialize(file);
            file.Close();

            this.Layers = temp.Layers;
        }

        double ErrorFunction(double[] v1, double[] v2)
        {//half Euclid
            double d = 0;
            for (int i = 0; i < v1.Length; i++)
            {
                d += Math.Pow((v1[i] - v2[i]),2) ;
            }
            return 0.5 * d;
           
        }
        public Net(int[] layersNeuronCount, int[] layersInputs)
        {
            Layers = new Layer[layersInputs.Count()];

            for (int i = 0; i < Layers.Count(); i++)
            {
                Layers[i] = new Layer(layersNeuronCount[i], layersInputs[i]);
            }
        }

        double CalculatePartialDerivaitve(double[] v1, double[] v2, int index)
        {
            return v2[index] - v1[index];
        }

        public void Train( DataItem data, double rate=Constants.LearningRate)
        {

            Net net = this;
            //var input = MakeInput(bug, targets);
            var realOut = net.ComputeOutput(data.input);
            var bestOut = data.Output;
            #region init error for weights and offsets
            double[][][] nablaWeights = new double[net.Layers.Length][][];
           // double[][] nablaOffsets = new double[net.Layers.Length][];
            Parallel.For( 0,  net.Layers.Length, (i)=>
            {
               // nablaOffsets[i] = new double[net.Layers[i].Neurons.Length];
                nablaWeights[i] = new double[net.Layers[i].Neurons.Length][];
                for (int j = 0; j < net.Layers[i].Neurons.Length; j++)
                {
                   // nablaOffsets[i][j] = 0;
                    nablaWeights[i][j] = new double[net.Layers[i].Neurons[j].Weights.Length];
                    for (int k = 0; k < net.Layers[i].Neurons[j].Weights.Length; k++)
                        nablaWeights[i][j][k] = 0;
                }
            });
            #endregion

           
            #region paralel last layer
            //init offsets(grad there) calc dE/dz, calc delta
            Parallel.For(0, net.Layers[net.Layers.Length - 1].Neurons.Length, (j) =>
            {
                net.Layers[net.Layers.Length - 1].Neurons[j].dEdz = CalculatePartialDerivaitve(bestOut, realOut, j) *
                        net.Layers[net.Layers.Length - 1].Neurons[j].ComputeFirstDerivative(net.Layers[net.Layers.Length - 1].Neurons[j].LastNET);
               // nablaOffsets[net.Layers.Length - 1][j] += Constants.LearningRate * net.Layers[net.Layers.Length - 1].Neurons[j].dEdz;
                for (int i = 0; i < net.Layers[net.Layers.Length - 1].Neurons[j].Weights.Length; i++)
                {
                    nablaWeights[net.Layers.Length - 1][j][i] +=
      rate * net.Layers[net.Layers.Length - 1].Neurons[j].dEdz *
                                   net.Layers[net.Layers.Length - 1 - 1].Neurons[i].LastState;
                }
            });
            #endregion
            
            #region hiddenLayer
            for (int hlIndex = net.Layers.Length - 2; hlIndex > 0; hlIndex--)
                Parallel.For(0, net.Layers[hlIndex + 1].Neurons.Length, (j) =>
                {
                    net.Layers[hlIndex].Neurons[j].dEdz = 0;
                    for (int k = 0; k < net.Layers[hlIndex + 1].Neurons.Length; k++)
                    {
                        net.Layers[hlIndex].Neurons[j].dEdz +=
                            net.Layers[hlIndex + 1].Neurons[k].Weights[j] *
                            net.Layers[hlIndex + 1].Neurons[k].dEdz;
                    }
                    net.Layers[hlIndex].Neurons[j].dEdz *=
                        net.Layers[hlIndex].Neurons[j].ComputeFirstDerivative(
                        net.Layers[hlIndex].Neurons[j].LastNET);
                    //nablaOffsets[hlIndex][j] += Constants.LearningRate * net.Layers[hlIndex].Neurons[j].dEdz;
                    for (int i = 0; i < net.Layers[hlIndex].Neurons[j].Weights.Length; i++)
                    {
                        nablaWeights[hlIndex][j][i] += rate * net.Layers[hlIndex].Neurons[j].dEdz *
                            net.Layers[hlIndex - 1].Neurons[i].LastState;
                    }
                }
            );

            #endregion



            for (int lIndex = 0; lIndex < net.Layers.Length; lIndex++)
            {
            
                Parallel.For(0, net.Layers[lIndex].Neurons.Length, (nIndex) =>
                {
                    for (int wIndex = 0; wIndex < net.Layers[lIndex].Neurons[nIndex].Weights.Length; wIndex++)
                        net.Layers[lIndex].Neurons[nIndex].Weights[wIndex] -=
                            nablaWeights[lIndex][nIndex][wIndex];
                });
               
            }
        }

        public void Train(List<DataItem> batch,double rate=Constants.LearningRate)
        {
            
            Net net = this;
            //var input = MakeInput(bug, targets);
            //double[] realOut = net.ComputeOutput(input);
            //double[] bestOut = FindBestOut(bug, targets, realOut);
            #region init error for weights and offsets
            double[][][] nablaWeights = new double[net.Layers.Length][][];
            // double[][] nablaOffsets = new double[net.Layers.Length][];
            Parallel.For(0, net.Layers.Length, (i) =>
            {
                // nablaOffsets[i] = new double[net.Layers[i].Neurons.Length];
                nablaWeights[i] = new double[net.Layers[i].Neurons.Length][];
                for (int j = 0; j < net.Layers[i].Neurons.Length; j++)
                {
                    // nablaOffsets[i][j] = 0;
                    nablaWeights[i][j] = new double[net.Layers[i].Neurons[j].Weights.Length];
                    for (int k = 0; k < net.Layers[i].Neurons[j].Weights.Length; k++)
                        nablaWeights[i][j][k] = 0;
                }
            });
            #endregion

            for (int batchIndex = 0; batchIndex < batch.Count; batchIndex++)
            {
                //ouble rate;
                var realOut = net.ComputeOutput(batch[batchIndex].input);
                var bestOut = batch[batchIndex].Output;
                //if (batch[batchIndex].IsGood) rate = Constants.LearningRate;
                //else rate = Constants.LearningRateBad;
                
                #region last layer
                //init offsets(grad there) calc dE/dz, calc delta
                Parallel.For(0, net.Layers[net.Layers.Length - 1].Neurons.Length, (j) =>
                {
                    net.Layers[net.Layers.Length - 1].Neurons[j].dEdz = CalculatePartialDerivaitve(bestOut, realOut, j) *
                            net.Layers[net.Layers.Length - 1].Neurons[j].ComputeFirstDerivative(net.Layers[net.Layers.Length - 1].Neurons[j].LastNET);
                    // nablaOffsets[net.Layers.Length - 1][j] += Constants.LearningRate * net.Layers[net.Layers.Length - 1].Neurons[j].dEdz;
                    for (int i = 0; i < net.Layers[net.Layers.Length - 1].Neurons[j].Weights.Length; i++)
                    {
                        nablaWeights[net.Layers.Length - 1][j][i] += rate * net.Layers[net.Layers.Length - 1].Neurons[j].dEdz *
                                       net.Layers[net.Layers.Length - 1 - 1].Neurons[i].LastState;
                    }
                });
                #endregion

                #region hiddenLayer
                for (int hlIndex = net.Layers.Length - 2; hlIndex > 0; hlIndex--){
                    Parallel.For(0, net.Layers[hlIndex + 1].Neurons.Length, (j) =>
                    {
                        net.Layers[hlIndex].Neurons[j].dEdz = 0;
                        for (int k = 0; k < net.Layers[hlIndex + 1].Neurons.Length; k++)
                        {
                            net.Layers[hlIndex].Neurons[j].dEdz +=
                                net.Layers[hlIndex + 1].Neurons[k].Weights[j] *
                                net.Layers[hlIndex + 1].Neurons[k].dEdz;
                        }
                        net.Layers[hlIndex].Neurons[j].dEdz *=
                            net.Layers[hlIndex].Neurons[j].ComputeFirstDerivative(
                            net.Layers[hlIndex].Neurons[j].LastNET);
                        //nablaOffsets[hlIndex][j] += Constants.LearningRate * net.Layers[hlIndex].Neurons[j].dEdz;
                        for (int i = 0; i < net.Layers[hlIndex].Neurons[j].Weights.Length; i++)
                        {
                            nablaWeights[hlIndex][j][i] += rate * net.Layers[hlIndex].Neurons[j].dEdz *
                                net.Layers[hlIndex - 1].Neurons[i].LastState;
                        }
                    }
                );
                   
            }

                #endregion
            }

//update weights
            for (int lIndex = 0; lIndex < net.Layers.Length; lIndex++)
            {

                Parallel.For(0, net.Layers[lIndex].Neurons.Length, (nIndex) =>
                {
                    for (int wIndex = 0; wIndex < net.Layers[lIndex].Neurons[nIndex].Weights.Length; wIndex++)
                        net.Layers[lIndex].Neurons[nIndex].Weights[wIndex] -=
                            nablaWeights[lIndex][nIndex][wIndex];
                });

            }
        }
        public void Save(string fileName)
        {
            XmlSerializer writer = new XmlSerializer(typeof(Net));
            System.IO.StreamWriter file = new System.IO.StreamWriter(@"c:\logs\" + fileName);
            writer.Serialize(file, this);
            file.Close();
        }
        public double CurrentError(List<DataItem> data)
        {
            double currentErr = 0;
            for (int i = 0; i < data.Count; i++)
            {
                currentErr += ErrorFunction(this.ComputeOutput(data[i].Input), data[i].Output);
                Train(data[i]);
            }
            currentErr *= 1d / data.Count;
           
            return currentErr;
        }
         
    }

}
