﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using System.IO;

namespace NeuralNetworks
{
    internal class Brain
    {
        #region Variables
        private double[,] Connections;
        private double[] Results;
        private double Epsilon;
        #endregion

        #region Additional methods
        private double Derivative(double fxValue)
        {
            return 0.5 * (1 + Function(fxValue)) * (1 - Function(fxValue));
            //return Function(fxValue) * (1 - Function(fxValue));
        }
        private double Function(double x)
        {
            return (2 / (1 + Math.Exp(-x))) - 1;
            //return (1 / (1+Math.Exp(-x)));
        }
        private List<double[]> MemoryCounting(double[] input)
        {
            List<double[]> memory = new List<double[]>();
            memory.Add(input);
            double[] temp = new double[input.Length];
            int firstLayerNeurons = (Connections.GetLength(1) - Connections.GetLength(0)) % Connections.GetLength(0); // we get the first layer neurons
            if (firstLayerNeurons == 0) firstLayerNeurons = Connections.GetLength(0);
            if (input.Length != firstLayerNeurons) throw new ArgumentException("Кількість вхідної інформації має бути рівна кількості нейронів на 1 шарі.");
            for (int i = 0; i <= Connections.GetLength(1) / Connections.GetLength(0) + 1; i++)
            {
                if (i != Connections.GetLength(1) / Connections.GetLength(0) + 1)
                {
                    if (i == Connections.GetLength(1) / Connections.GetLength(0)) temp = new double[Results.Length];
                    else temp = new double[Connections.GetLength(0)];
                    double func_result = 0;
                    for (int j = 0; j < input.Length; j++)
                    {
                        func_result = Function(input[j]);
                        for (int k = 0; k < temp.Length; k++)
                        {
                            if (i != 0)
                            {
                                if (firstLayerNeurons == Connections.GetLength(0)) temp[k] += func_result * Connections[k, i * Connections.GetLength(0) - firstLayerNeurons + j];
                                else temp[k] += func_result * Connections[k, (i - 1) * Connections.GetLength(0) + firstLayerNeurons + j];
                            }
                            else temp[k] += input[j] * Connections[k, j];
                        }
                    }
                    input = (double[])temp.Clone();
                    memory.Add(input);
                }
                else
                {
                    memory.Add(new double[Results.Length]);
                    for (int k = 0; k < Results.Length; k++)
                    {
                        Results[k] = Function(input[k]);
                        memory.Last()[k] = Results[k];
                    }
                }
            }
            return memory;
        }
        #endregion

        #region Work with IO
        private void OpenMemory(string cachePath)
        {
            using (FileStream fs = new FileStream(cachePath, FileMode.Open))
            {
                using (StreamReader sr = new StreamReader(fs))
                {
                    string temp = "";
                    char read;
                    List<double> currentString = new List<double>();
                    int k = 1;
                    do
                    {
                        read = (char)sr.Read();
                        if (read == ' ' || read == '\r')
                        {
                            if (read == '\r')
                                k++;
                            currentString.Add(Double.Parse(temp));
                            temp = "";
                        }
                        else temp += read;
                    }
                    while (!sr.EndOfStream);
                    currentString.Add(Double.Parse(temp));
                    double[,] massive = new double[currentString.Count / k, k];

                    int c = 0;
                    for (int i = 0; i < massive.GetLength(1); i++) // rows
                    {
                        for (int j = 0; j < massive.GetLength(0); j++) // columns
                        {
                            massive[j, i] = currentString[c];
                            c++;
                        }
                    }

                    Connections = (double[,])massive.Clone();
                }
            }
        }
        public void SaveMemory(string pathToSave)
        {
            using (FileStream fs = new FileStream(pathToSave, FileMode.Create))
            {
                using (StreamWriter sr = new StreamWriter(fs))
                {
                    for (int i = 0; i < Connections.GetLength(1); i++)
                    {
                        for (int j = 0; j < Connections.GetLength(0); j++)
                        {
                            if (j != Connections.GetLength(0) - 1)
                                sr.Write(Connections[j, i].ToString() + ' ');
                            else sr.Write(Connections[j, i].ToString());
                        }
                        if (i != Connections.GetLength(1) - 1)
                            sr.Write(@"
");
                    }
                }
            }
        }
        #endregion

        #region Constructors
        /// <summary>
        /// Main constructor.
        /// </summary>
        /// <param name="layers">Says the amount of layers in the neural network. (Including the first and last layers).</param>
        /// <param name="neuronsFirst">Says the amount of neurons in the first (input) layer.</param>
        /// <param name="neuronsHidden">Says the amount of neurons in the hidden layers of the network (their amount is the same in each layer).</param>
        /// <param name="neuronsLast">Says the anount of neurons in the last (output) layer of the network.</param>
        public Brain(int layers, int neuronsFirst, int neuronsHidden, int neuronsLast)
        {
            Random r = new Random();
            Connections = new double[neuronsHidden, neuronsHidden * (layers - 2) + neuronsFirst];
            for (int i = 0; i < Connections.GetLength(0); i++)
            {
                for (int j = 0; j < Connections.GetLength(1) - Connections.GetLength(0); j++)
                {
                    Connections[i, j] = (r.NextDouble() - 0.5) / 100;
                    if (Connections[i, j] == 0) Connections[i, j] = 0.001;
                }
            }
            for (int i = Connections.GetLength(1) - Connections.GetLength(0); i < Connections.GetLength(1); i++)
            {
                for (int j = 0; j < neuronsLast; j++)
                {
                    Connections[j, i] = (r.NextDouble() - 0.5) / 100;
                    if (Connections[j, i] == 0) Connections[j, i] = 0.001;
                }
                for (int j = neuronsLast; j < neuronsHidden; j++) { Connections[j, i] = 0; }
            }
            Results = new double[neuronsLast];
            for (int i = 0; i < Results.Length; i++) Results[i] = 0;
            Results = new double[neuronsLast];
        }
        /// <summary>
        /// Creates the neural network from the previously cached file.
        /// </summary>
        /// <param name="cachePath">Path to the cache file.</param>
        public Brain(string cachePath)
        {
            OpenMemory(cachePath);
            this.Results = new double[Connections.GetLength(0)];
        }
        #endregion

        /// <summary>
        /// The main neural network counting algorithm.
        /// </summary>
        /// <param name="input">The neural network input.</param>
        /// <returns>The array of neuron results</returns>
        public double[] Count(double[] input)
        {
            double[] temp = new double[input.Length];
            int firstLayerNeurons = (Connections.GetLength(1) - Connections.GetLength(0)) % Connections.GetLength(0); // we get the first layer neurons
            if (firstLayerNeurons == 0) firstLayerNeurons = Connections.GetLength(0);
            if (input.Length != firstLayerNeurons) throw new ArgumentException("Кількість вхідної інформації має бути рівна кількості нейронів на 1 шарі.");
            for (int i = 0; i <= Connections.GetLength(1) / Connections.GetLength(0) + 1; i++)
            {
                if (i != Connections.GetLength(1) / Connections.GetLength(0) + 1)
                {
                    if (i == Connections.GetLength(1) / Connections.GetLength(0)) temp = new double[Results.Length];
                    else temp = new double[Connections.GetLength(0)];
                    double func_result = 0;
                    for (int j = 0; j < input.Length; j++)
                    {
                        func_result = Function(input[j]);
                        for (int k = 0; k < temp.Length; k++)
                        {
                            if (i != 0)
                            {
                                if (firstLayerNeurons == Connections.GetLength(0)) temp[k] += func_result * Connections[k, i * Connections.GetLength(0) - firstLayerNeurons + j];
                                else temp[k] += func_result * Connections[k, (i - 1) * Connections.GetLength(0) + firstLayerNeurons + j];
                            }
                            else temp[k] += input[j] * Connections[k, j];
                        }
                    }
                    input = (double[])temp.Clone();
                }
                else { for (int k = 0; k < Results.Length; k++) Results[k] = Function(input[k]); }
            }
            return Results;
        }
        /// <summary>
        /// Backpropagation algorithm.
        /// </summary>
        /// <param name="input">The neural network input data.</param>
        /// <param name="correctResults">Correct answers for these inputs.</param>
        /// <param name="epsilon">Algorithm step.</param>
        public void Teach(double[] input, double[] correctResults, double epsilon)
        {
            int inputLength = input.Length;
            this.Epsilon = epsilon;
            // getting neuron input values
            List<double[]> memory = MemoryCounting(input);

            // backpropagation
            double[,] deltas = (double[,])Connections.Clone();
            double[] currentSigmas, prevSigmas = new double[Results.Length];
            for (int i = Connections.GetLength(1) / Connections.GetLength(0) + 1; i > 0; i--)
            {
                if (i != Connections.GetLength(1) / Connections.GetLength(0) + 1)
                {
                    currentSigmas = new double[Connections.GetLength(0)];
                    for (int j = 0; j < Connections.GetLength(0); j++)
                    {
                        if (i != Connections.GetLength(1) / Connections.GetLength(0)) // if that's not pre last layer (this if is for choosing the l<?)
                        {
                            for (int l = 0; l < Connections.GetLength(0); l++)
                            {
                                if (inputLength != Connections.GetLength(0))
                                    currentSigmas[j] += prevSigmas[l] * Connections[l, j + Connections.GetLength(0) * (i - 1) + inputLength];
                                else currentSigmas[j] += prevSigmas[l] * Connections[l, j + Connections.GetLength(0) * i - inputLength];
                            }
                        }
                        else
                        {
                            for (int l = 0; l < Results.Length; l++)
                            {
                                if (inputLength != Connections.GetLength(0))
                                    currentSigmas[j] += prevSigmas[l] * Connections[l, j + Connections.GetLength(0) * (i - 1) + inputLength];
                                else currentSigmas[j] += prevSigmas[l] * Connections[l, j + Connections.GetLength(0) * i - inputLength];
                            }
                        }
                        currentSigmas[j] *= Derivative(memory[i][j]);
                        if (i == 1)
                            for (int k = 0; k < inputLength; k++)
                            { deltas[j, k] += Epsilon * currentSigmas[j] * memory[i - 1][k - (i - 1) * Connections.GetLength(0)]; }
                        else
                            for (int k = (i - 1) * Connections.GetLength(0); k < i * Connections.GetLength(0); k++)
                            { deltas[j, k] += Epsilon * currentSigmas[j] * Function(memory[i - 1][k - (i - 1) * Connections.GetLength(0)]); }
                    }
                }
                else
                {
                    currentSigmas = new double[Results.Length];
                    for (int j = 0; j < Results.Length; j++)
                    {
                        currentSigmas[j] = (correctResults[j] - Results[j]) * Derivative(memory[i][j]);
                        for (int k = Connections.GetLength(1) - Connections.GetLength(0); k < Connections.GetLength(1); k++)
                        { deltas[j, k] += Epsilon * currentSigmas[j] * Function(memory[i - 1][k - Connections.GetLength(1) + Connections.GetLength(0)]); }
                    }
                }
                prevSigmas = (double[])currentSigmas.Clone();
            }
            // change weights
            Connections = (double[,])deltas.Clone();
        }
    }
}
