#pragma once
/***********************************************************************************************
COPYRIGHT 2011 Mafahir Fairoze

This file is part of Neural++.
(Project Website : http://mafahir.wordpress.com/projects/neuralplusplus)

Neural++ is a free software. You can redistribute it and/or modify it under the terms of
the GNU General Public License as published by the Free Software Foundation, either version 3
of the License, or (at your option) any later version.

Neural++ is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License <http://www.gnu.org/licenses/> for more details.

***********************************************************************************************/
#include "../Layer.h"

namespace NeuralPlusPlus
	{
	namespace Core
		{
		namespace Backpropagation        
			{
			/// <summary>
			/// Activation Layer is a layer of activation neurons.
			/// </summary>
			class ActivationLayer : public Layer
				{
				public: bool UseFixedBiasValues;

						/// <summary>
						/// Constructs an instance of activation Layer
						/// </summary>
				protected: ActivationLayer(int neuronCount);

						/// <summary>
						/// Initializes all neurons and makes them ready to undergo training freshly.
						/// </summary>
				public: void Initialize() override;

						/// <summary>
						/// Sets neuron errors as the difference between actual and expected outputs
						/// </summary>
				public: double SetErrors(double* expectedOutput);

						/// <summary>
						/// Evaluate errors at all neurons in the layer
						/// </summary>
				public: void EvaluateErrors();

						/// <summary>
						/// Activation function used by all neurons in this->layer
						/// </summary>
				public: virtual double Activate(double input, double previousOutput) = 0;

						/// <summary>
						/// Derivative function used by all neurons in this->layer
						/// </summary>
				public: virtual double Derivative(double input, double output) = 0;
				};
			}

		}
	}