﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using System.Threading.Tasks;
using Beetle;
using System.Xml.Serialization;

namespace Beetle.DimaHead
{
    public class Neuron
    {
        public double[] Inputs;
        public double[] Weights;

        public double Output
        {
            get
            {
                return ((1 / (1 + Math.Exp(-Coeff.alfa * InputSum))) - 0.5) * 2;
            }
        }

        public double Derivative
        {
            get
            {
                return (2 * Coeff.alfa * Math.Exp(-Coeff.alfa * InputSum)) / Math.Pow((1 + Math.Exp(-Coeff.alfa * InputSum)), 2);
            }
        }

        public double InputSum
        {
            get
            {
                double res = 0;
                for (int i = 0; i < InputCount; i++)
                {
                    res += Inputs[i] * Weights[i];
                }
                return res;
            }
        }

        public int InputCount
        {
            get
            {
                return Inputs.Count();
            }
        }

        public void Reset()
        {
            for (int i = 0; i < InputCount; i++) Inputs[i] = 0;
        }

        public Neuron(int count)
        {
            Inputs = new double[count];
            Weights = new double[count];


            for (int i = 0; i < count; i++)
            {
                Inputs[i] = 0;
                Weights[i] = (double)Coeff.rand.Next(-4500, 4500) / 10000;
            }

        }

        public Neuron(Neuron neuron)
        {
            this.Weights = neuron.Weights;
        }

        public Neuron(): this(26)
        {
        }
    }

    public class Layer
    {
        public Neuron[] Neurons;

        public int NeuronsCount
        {
            get
            {
                return Neurons.Count();
            }
        }

        public int InputCount
        {
            get
            {
                return Neurons[0].InputCount;
            }
        }

        public double[] Calc(double[] inputs)
        {
            if (inputs.Count() != InputCount) throw new Exception("В слое " + InputCount + " входов.");

            double[] Output = new double[NeuronsCount];

            for (int i = 0; i < NeuronsCount; i++)
            {
                Neurons[i].Inputs = inputs;
                Output[i] = Neurons[i].Output;
            }

            return Output;
        }

        public double[] Output
        {
            get
            {
                var result = new double[NeuronsCount];

                for (int i = 0; i < result.Count(); i++) result[i] = Neurons[i].Output;

                return result;
            }
        }

        public Layer(int neuronsCount, int inputCount)
        {
            Neurons = new Neuron[neuronsCount];
            for (int i = 0; i < neuronsCount; i++)
            {
                Neurons[i] = new Neuron(inputCount);
            }
        }

        public Layer(int neuronsCount)
        {
            Neurons = new Neuron[neuronsCount];
        }

        public Layer() :this(26,26)
        {

        }
    }

    public class DimaNeuro
    {
        public Layer[] Layers;

        public int LayersCount
        {
            get
            {
                return Layers.Count();
            }
        }

        public int InputCount
        {
            get
            {
                return Layers[0].InputCount;
            }
        }

        public int OutputCount
        {
            get
            {
                return Layers.Last().NeuronsCount;
            }
        }

        public double[] MakeIteration(double[] inputs)
        {
            if (inputs.Count() != InputCount) throw new Exception("В нейросети " + InputCount + " входов.");

            double[] NextLayerInput = inputs;

            for (int i = 0; i < LayersCount; i++)
            {
                NextLayerInput = Layers[i].Calc(NextLayerInput);
            }

            return NextLayerInput;
        }

        public void Teach(double[] bestoutput, double k=Coeff.kgood)
        {
            double[] oldKoef = new double[Layers[LayersCount - 1].NeuronsCount];

            double[] oldSum = new double[Layers[LayersCount - 2].NeuronsCount];

            object loker1 = new object();

            Parallel.For(0, oldKoef.Count(), i =>
            {
                var Neuron = Layers[LayersCount - 1].Neurons[i];

                oldKoef[i] = 1 * Neuron.Derivative * (bestoutput[i] - Neuron.Output);

                for (int j = 0; j < Neuron.InputCount; j++)
                {
                    lock (loker1)
                    {
                        oldSum[j] += oldKoef[i] * Neuron.Weights[j];
                    }
                    Neuron.Weights[j] += k * oldKoef[i] * Neuron.Inputs[j];
                }
            });

          
            for (int i = LayersCount - 2; i >= 0; i--)
            {
                var Layer = Layers[i];
                double[] newKoef = new double[Layer.NeuronsCount];
                double[] newSum = new double[Layer.NeuronsCount];
                object loker2 = new object();

                Parallel.For(0, newKoef.Count(), j =>
                {
                    var Neuron = Layer.Neurons[j];

                    newKoef[j] = 1 * Neuron.Derivative * oldSum[j];

                    for (int o = 0; o < Neuron.InputCount; o++)
                    {
                        lock (loker2)
                        {
                            newSum[o] += newKoef[j] * Neuron.Weights[o];
                        }
                        Neuron.Weights[o] -= k * newKoef[j] * Neuron.Inputs[o];
                    }
                });               

                oldSum = newSum;
                oldKoef = newKoef;
            }
        }

        public double Error(double[] bestoutput)
        {
            double result = 0;

            for (int i = 0; i < OutputCount; i++)
            {
                result += (1.0 / 2.0) * Math.Pow(bestoutput[i] - Layers.Last().Output[i], 2);
            }

            return result;
        }

        public void Save(string fileName)
        {
            XmlSerializer writer = new XmlSerializer(typeof(DimaNeuro));

            System.IO.StreamWriter file = new System.IO.StreamWriter(@"c:\logs\"+fileName);
            writer.Serialize(file, this);
            file.Close();
        }

        public DimaNeuro()
        {
            Layers = new Layer[3];

            Layers[0] = new Layer(26, 26);
            Layers[1] = new Layer(26, 26);
            Layers[2] = new Layer(18, 26);
        }

        public DimaNeuro(int[] layersNeuronCount, int[] layersInputsCount)
        {
            Layers = new Layer[layersInputsCount.Count()];

            for (int i = 0; i < Layers.Count(); i++)
            {
                Layers[i] = new Layer(layersNeuronCount[i], layersInputsCount[i]);
            }
        }

        public DimaNeuro(string fileName)
        {
            XmlSerializer reader = new XmlSerializer(typeof(DimaNeuro));
            System.IO.StreamReader file = new System.IO.StreamReader(
                @"c:\logs\"+fileName);
            var temp = (DimaNeuro)reader.Deserialize(file);
            file.Close();

            this.Layers = temp.Layers;
        }
    }
}
