﻿using System;
using System.IO;
using System.Runtime.Serialization.Formatters.Binary;

namespace MSI2
{
    [Serializable]
    public class NeuralNetwork
    {
        private int InputLayerSize; //rozmiar warstwy wejściowej
        private readonly int HiddenLayerSize; //liczba neuronów w warstwie ukrytej
        private readonly int OutputLayerSize; //== 3, nie powinno się zmienić ;)

        private readonly double beta; //parametr funkcji aktywacji
        private readonly double gamma;//parametr propagacji wstecznej

        private double[] inputLayer; //w skrócie IL
        private double[] hiddenLayer; //HL; wartość PRZED funkcją aktywacji!!
        private double[] outputLayer; //OL; wartość PRZED funkcją aktywacji!!

        private double[][] weights1; //weights1[i][j] == waga krawędzi między i-tym neuronem IL, a j-tym HL
        private double[][] weights2; //weights2[i][j] == waga krawędzi między i-tym neuronem HL, a j-tym OL

        public NeuralNetwork(int inputLayerSize, int hiddenLayerSize, int outputLayerSize, double beta = 1.0, double gamma = 1.0)
        {
            InputLayerSize = inputLayerSize;
            HiddenLayerSize = hiddenLayerSize;
            OutputLayerSize = outputLayerSize;

            this.beta = beta;
            this.gamma = gamma;

            AllocTables();
            InitializeWeightsWithRandom();
        }

        public NeuralNetwork(NeuralNetworkParams networkParams)
            : this(networkParams.InputLayerSize, networkParams.HiddenLayerSize,
                networkParams.OutputLayerSize, networkParams.Beta, networkParams.Gamma)
        { }

        public double[] OutputLayer //Właściwość zwraca stany jednostek w warstwie PO zastosowaniu funkcji aktywacji!!
        {
            get { return ActivationFunction(outputLayer); }
        }

        private double[] HiddenLayer //Właściwość zwraca stany jednostek w warstwie PO zastosowaniu funkcji aktywacji!!
        {
            get { return ActivationFunction(hiddenLayer); }
        }

        private double[] InputLayer
        {
            get { return inputLayer; }
        }

        private void AllocTables()
        {
            inputLayer = new double[InputLayerSize];
            hiddenLayer = new double[HiddenLayerSize];
            outputLayer = new double[OutputLayerSize];

            weights1 = new double[InputLayerSize][];

            for (int i = 0; i < weights1.Length; i++)
            {
                weights1[i] = new double[HiddenLayerSize];
            }

            weights2 = new double[HiddenLayerSize][];

            for (int i = 0; i < weights2.Length; i++)
            {
                weights2[i] = new double[OutputLayerSize];
            }
        }

        private void InitializeWeightsWithRandom()
        {
            var rand = new Random();

            for (int i = 0; i < weights1.Length; i++)
            {
                for (int j = 0; j < weights1[i].Length; j++)
                {
                    weights1[i][j] = rand.NextDouble() / 500 - 0.001;
                }
            }

            for (int i = 0; i < weights2.Length; i++)
            {
                for (int j = 0; j < weights2[i].Length; j++)
                {
                    weights2[i][j] = rand.NextDouble() / 500 - 0.001;
                }
            }
        }

        public void RunNetwork(double[] input)
        {
            if (input.Length != InputLayer.Length)
            {
                throw new ArgumentException();
            }

            for (int i = 0; i < input.Length; i++)
            {
                InputLayer[i] = input[i];
            }

            FeedHiddenLayer();
            FeedOutputLayer();
        }

        private void FeedHiddenLayer()
        {
            for (int i = 0; i < hiddenLayer.Length; i++)
            {
                double signal = 0.0;

                for (int j = 0; j < InputLayer.Length; j++)
                {
                    signal += weights1[j][i] * InputLayer[j];
                }

                hiddenLayer[i] = signal;
            }
        }

        private double ActivationFunction(double h)
        {
            return Math.Tanh(beta * h);
        }

        private double[] ActivationFunction(double[] tab)
        {
            double[] ret = new double[tab.Length];

            for (int i = 0; i < tab.Length; i++)
            {
                ret[i] = ActivationFunction(tab[i]);
            }

            return ret;
        }

        private void FeedOutputLayer()
        {
            for (int i = 0; i < outputLayer.Length; i++)
            {
                double signal = 0.0;

                for (int j = 0; j < hiddenLayer.Length; j++)
                {
                    signal += weights2[j][i] * (ActivationFunction(hiddenLayer[j]));
                }

                outputLayer[i] = signal;
            }
        }

        public void Backpropagation(double[] input, double[] teacher)
        {
            RunNetwork(input);

            if (input.Length != InputLayer.Length)
            {
                throw new ArgumentException();
            }

            if (teacher.Length != outputLayer.Length)
            {
                throw new ArgumentException();
            }

            //odchylenia dla OutputLayer
            double[] outputLayerVariation = new double[outputLayer.Length];

            for (int i = 0; i < outputLayerVariation.Length; i++)
            {
                outputLayerVariation[i] = ActivationFunctionDerivative(outputLayer[i]) * (teacher[i] - OutputLayer[i]);
            }

            //odchylenia dla HiddenLayer
            double[] hiddenLayerVariation = new double[hiddenLayer.Length];

            for (int b = 0; b < hiddenLayerVariation.Length; b++)
            {
                double sum = 0.0;

                for (int c = 0; c < outputLayerVariation.Length; c++)
                {
                    sum += outputLayerVariation[c] * weights2[b][c];
                }

                hiddenLayerVariation[b] = ActivationFunctionDerivative(hiddenLayer[b]) * sum;
            }

            //poprawki wag krawędzi między HiddenLayer, a OutputLayer
            for (int i = 0; i < outputLayer.Length; i++)
            {
                for (int j = 0; j < hiddenLayer.Length; j++)
                {
                    weights2[j][i] = weights2[j][i] + gamma * outputLayerVariation[i] * HiddenLayer[j];
                }
            }

            //poprawki wag krawędzi między InputLayer, a HiddenLayer
            for (int i = 0; i < hiddenLayer.Length; i++)
            {
                for (int j = 0; j < InputLayer.Length; j++)
                {
                    weights1[j][i] = weights1[j][i] + gamma * hiddenLayerVariation[i] * InputLayer[j];
                }
            }
        }

        private double ActivationFunctionDerivative(double h)
        {
            return (4 * beta) / (Math.Pow(Math.Exp(h * beta) + Math.Exp((-1) * h * beta), 2.0));
        }

        public static void SaveNeuralNetwork(string fileName, NeuralNetwork neuralNetwork)
        {
            Stream stream = File.Open(fileName, FileMode.Create);
            BinaryFormatter bf = new BinaryFormatter();


            Console.WriteLine("---------------------------------------------------------------------");
            Console.WriteLine("Saving Neural Network to file: " + fileName);
            bf.Serialize(stream, neuralNetwork);
            stream.Close();
            Console.WriteLine("Saving completed!");
            Console.WriteLine("---------------------------------------------------------------------");
        }

        public static NeuralNetwork LoadNeuralNetwork(string fileName)
        {
            NeuralNetwork neuralNetwork;
            Stream stream = File.Open(fileName, FileMode.Open);
            BinaryFormatter bf = new BinaryFormatter();


            Console.WriteLine("---------------------------------------------------------------------");
            Console.WriteLine("Loading Neural Network from file: " + fileName);
            neuralNetwork = (NeuralNetwork)bf.Deserialize(stream);
            stream.Close();
            Console.WriteLine("Loading completed!");
            Console.WriteLine("---------------------------------------------------------------------");
            return neuralNetwork;
        }

        public class NeuralNetworkParams
        {
            public int InputLayerSize { get; set; }
            public int HiddenLayerSize { get; set; }
            public int OutputLayerSize { get; set; }
            public double Beta { get; set; }
            public double Gamma { get; set; }
        }

        public void SetInputSize(int inputSize)
        {
            InputLayerSize = inputSize;
            inputLayer = new double[inputSize];
        }
    }
}
