#pragma once
/***********************************************************************************************
COPYRIGHT 2011 Mafahir Fairoze

This file is part of Neural++.
(Project Website : http://mafahir.wordpress.com/projects/neuralplusplus)

Neural++ is a free software. You can redistribute it and/or modify it under the terms of
the GNU General Public License as published by the Free Software Foundation, either version 3
of the License, or (at your option) any later version.

Neural++ is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License <http://www.gnu.org/licenses/> for more details.

***********************************************************************************************/
#include "ActivationLayer.h"
#include "../INeuron.h"
namespace NeuralPlusPlus
	{
	namespace Core
		{
		namespace Backpropagation        
			{
			/// <summary>
			/// Activation Neuron is a buiding block of a back-propagation neural network.
			/// </summary>
			class ActivationNeuron : public INeuron
				{

				public: double Error;
				public: double Bias;

						/// <summary>
						/// Create a new activation neuron
						/// </summary>
						/// <param name="parent">
						/// The parent layer containing this->neuron
						/// </param>
						/// <exception cref="System.ArgumentNullException">
						/// If <c>parent</c> is <c>NULL</c>
						/// </exception>
				public: ActivationNeuron(ActivationLayer *parent);

						/// <summary>
						/// Obtains input from source synapses and activates to update the output
						/// </summary>
				public: void Run() override;

						/// <summary>
						/// Backpropagates the target synapses and evaluates the error
						/// </summary>
				public: void EvaluateError();

						/// <summary>
						/// Optimizes the bias value (if not <c>UseFixedBiasValues</c>) and the weights of all the
						/// source synapses using back propagation algorithm
						/// </summary>
						/// <param name="learningRate">
						/// The current learning rate (this->depends on training progress as well)
						/// </param>
				public: void Learn(double learningRate) override;
				};
			}

		}
	}