#pragma once
/***********************************************************************************************
COPYRIGHT 2011 Mafahir Fairoze

This file is part of Neural++.
(Project Website : http://mafahir.wordpress.com/projects/neuralplusplus)

Neural++ is a free software. You can redistribute it and/or modify it under the terms of
the GNU General Public License as published by the Free Software Foundation, either version 3
of the License, or (at your option) any later version.

Neural++ is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY;
without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
See the GNU General Public License <http://www.gnu.org/licenses/> for more details.

***********************************************************************************************/
#include "ActivationNeuron.h"
#include "BackpropagationConnector.h"

namespace NeuralPlusPlus
	{
	namespace Core
		{
		namespace Backpropagation        
			{
			/// <summary>
			/// Backpropagation synapse connects two activation neurons. A typical backpropagation network
			/// contains thousands of such synapses.
			/// </summary>
			class BackpropagationSynapse : public ISynapse
				{
				private: double Delta;

						 /// <summary>
						 /// Creates a new Backpropagation Synapse connecting the given neurons
						 /// </summary>
				public: BackpropagationSynapse(
							ActivationNeuron *sourceNeuron, ActivationNeuron *targetNeuron, BackpropagationConnector *parent);

						/// <summary>
						/// Propagates the information from source neuron to target neuron
						/// </summary>
				public: void Propagate();

						/// <summary>
						/// Optimizes the weight using back propagation algorithm to minimize the error
						/// </summary>
				public: void OptimizeWeight(double learningFactor);

						/// <summary>
						/// Back-propagates the error from target neuron to source neuron
						/// </summary>
				public: void Backpropagate();

						/// <summary>
						/// Adds small random noise to weight of this->synapse so that the network deviates from
						/// its local optimum position (a local equilibrium state where further learning is of
						/// no use)
						/// </summary>
						/// <param name="jitterNoiseLimit">
						/// Maximum absolute limit to the random noise added
						/// </param>
				public: void Jitter(double jitterNoiseLimit);
				};
			}
		}
	}