﻿using System;
using System.Collections.Generic;
using System.Linq;
using System.Text;
using Logic.Functions;

namespace Logic
{
    public class Neuron
    {
        private static readonly Random rand = new Random();
        public float Bias { get; set; }
        public float[] Weights { get; set; }
        public float[] OldWeights { get; set; }
        public float OutputSignal { get; set; }
        public float Delta { get; set; }

        public Neuron()
        {
        }

        public Neuron(int prevLayerSize)
        {
            if (prevLayerSize < 1)
                throw new ArgumentException("Size must be positive.");
            Weights = new float[prevLayerSize];
            OldWeights = new float[prevLayerSize];
        }

        public void Evaluate(IFunction activationFunction, float[] prevLayerSignals)
        {
            OutputSignal = activationFunction.Compute(Sum(prevLayerSignals));
        }

        public float ComputeWithDerrivative(IFunction activationFunction, float[] prevLayerSignals)
        {
            return activationFunction.ComputeDerrivative(Sum(prevLayerSignals));
        }

        public void EvaluateEuclideanWithConscience(IFunction activationFunction, float[] prevLayerSignals, float conscienceFactor, int layerSize, float winningFreq)
        {
            OutputSignal = activationFunction.Compute(ComputeEuclideanDistance(prevLayerSignals)) + ComputeBiasForConscience(conscienceFactor, layerSize, winningFreq);
        }

        private float ComputeEuclideanDistance(float[] prevLayerSignals)
        {
            float distance = 0;
            float sum = Weights.Sum(t => t > 0 ? t : -t);
            if (sum.Equals(0.0f))
                sum = 1.0f;
            for (int i = 0; i < Weights.Length; i++)
                distance += (prevLayerSignals[i] - Weights[i]/sum)*(prevLayerSignals[i] - Weights[i]/sum);
            return (float)Math.Sqrt(distance);
        }

        private float ComputeBiasForConscience(float conscienceFactor, int layerSize, float winningFreq)
        {
            return conscienceFactor*(layerSize*winningFreq-1);
        }

        private float Sum(float[] prevLayerSignals)
        {
            float sum = -Bias;
            for (int i = 0; i < Weights.Length; i++)
                sum += Weights[i] * prevLayerSignals[i];
            return sum;
        }

        public void CleanWeights()
        {
            for (int i = 0; i < Weights.Length; i++)
                Weights[i] = 0;
        }

        public void InitializeWeights(float min, float max)
        {
            if (max < min)
                throw new ArgumentException("Min cannot be greater than max.");
            double diff = max - min;
            for (int i = 0; i < Weights.Length; i++)
            {
                Weights[i] = (float)(min + rand.NextDouble() * diff);
                OldWeights[i] = (float)(min + rand.NextDouble() * diff);
            }
        }
    }
}
