#include "Network.h"
#include <stack>
#include <map>
#include <algorithm>
#include "Helper.h"

using namespace NeuralPlusPlus::Core;

Layer* NeuralPlusPlus::Core::Network::operator[]( int index )
	{
	return layers[index];
	}

NeuralPlusPlus::Core::Network::Network( Layer *inputLayer, Layer *outputLayer, TrainingMethodType trainingMethod )
	{
	// Validate
	Helper::ValidateNotNull(inputLayer, "inputLayer");
	Helper::ValidateNotNull(outputLayer, "outputLayer");

	// Assign arguments to corresponding variables
	this->inputLayer = inputLayer;
	this->outputLayer = inputLayer;
	this->trainingMethod = trainingMethod;

	// Initialize jitter parameters with default values
	this->jitterEpoch = 73;
	this->jitterNoiseLimit = 0.03;
	this->isStopping = false;

	// Populate the lists by visiting layers topologically starting from input layer
	std::stack<Layer*> stack;
		stack.push(inputLayer);

	// Indegree map
	std::map<Layer*, int> inDegree;
	while (stack.size() > 0)
		{
		// Add 'top of stack' to list of layers
		this->outputLayer = stack.top();
		stack.pop();
		layers.push_back(this->outputLayer);

		// Add targetConnectors to connectors list making sure that they do not lead to cycle
		int targetConnectorSize = this->outputLayer->TargetConnectors.size();
		for(int i=0;i<targetConnectorSize;i++)
			{
			Connector *connector = this->outputLayer->TargetConnectors[i];
			connectors.push_back(connector);
			Layer *targetLayer = connector->TargetLayer;

			if (std::find(layers.begin(), layers.end(), targetLayer)!= layers.end())
				{
				throw ("Cycle Exists in the network structure");
				}

			// Virtually remove this->layer
			inDegree[targetLayer] =
				inDegree.find(targetLayer)!= inDegree.end()
				? inDegree[targetLayer] - 1
				: targetLayer->SourceConnectors.size() - 1;

			// Push unvisited target layer onto the stack, if its effective inDegree is zero
			if (inDegree[targetLayer] == 0)
				{
				stack.push(targetLayer);
				}
			}
		}
	// The last layer should be same as output layer
	if (outputLayer != this->outputLayer)
		{
		throw ("The outputLayer is invalid", "outputLayer");
		}
	// Initialize the newly created network
	Initialize();
	}

void NeuralPlusPlus::Core::Network::OnBeginEpoch( int currentIteration, TrainingSet *trainingSet )
	{
	__raise BeginEpochEvent(TrainingEpochEventArgs(currentIteration, trainingSet));
	}

void NeuralPlusPlus::Core::Network::OnEndEpoch( int currentIteration, TrainingSet *trainingSet )
	{
	__raise EndEpochEvent(TrainingEpochEventArgs(currentIteration, trainingSet));
	}

void NeuralPlusPlus::Core::Network::OnBeginSample( int currentIteration, TrainingSample *currentSample )
	{
	__raise BeginSampleEvent(TrainingSampleEventArgs(currentIteration, currentSample));
	}

void NeuralPlusPlus::Core::Network::OnEndSample( int currentIteration, TrainingSample *currentSample )
	{
	__raise EndSampleEvent(TrainingSampleEventArgs(currentIteration, currentSample));
	}

void NeuralPlusPlus::Core::Network::SetLearningRate( double learningRate )
	{
	int layerCount = layers.size();
	for(int i=0;i<layerCount;i++)
		{
		layers[i]->SetLearningRate(learningRate);
		}
	}

void NeuralPlusPlus::Core::Network::SetLearningRate( double initialLearningRate, double finalLearningRate )
	{
	int layerCount = layers.size();
	for(int i=0;i<layerCount;i++)
		{
		layers[i]->SetLearningRate(initialLearningRate, finalLearningRate);
		}
	}

void NeuralPlusPlus::Core::Network::SetLearningRate( AbstractFunction *learningRateFunction )
	{
	// Validation is delegated
	int layerCount = layers.size();
	for(int i=0;i<layerCount;i++)
		{
		layers[i]->SetLearningRate(learningRateFunction);
		}
	}

void NeuralPlusPlus::Core::Network::Initialize()
	{
	int layerCount = layers.size();
	for(int i=0;i<layerCount;i++)
		{
		layers[i]->Initialize();

		int targetConnectorSize = layers[i]->TargetConnectors.size();
		for(int j=0;j<targetConnectorSize;j++)
			{
			layers[i]->TargetConnectors[j]->Initialize();
			}
		}
	}

double* NeuralPlusPlus::Core::Network::Run( double* input )
	{
	// Validation is delegated
	inputLayer->SetInput(input);
	int layerCount = layers.size();
	for(int i=0;i<layerCount;i++)
		{
		layers[i]->Run();
		}
	return outputLayer->GetOutput();
	}

void NeuralPlusPlus::Core::Network::Learn( TrainingSet *trainingSet, int trainingEpochs )
	{
	// Validate
	Helper::ValidateNotNull(trainingSet, "trainingSet");
	Helper::ValidatePositive(trainingEpochs, "trainingEpochs");
	if ((trainingSet->InputVectorLength != inputLayer->NeuronsLength)
		|| (trainingMethod == TrainingMethodType::Supervised && trainingSet->OutputVectorLength != outputLayer->NeuronsLength)
		|| (trainingMethod == TrainingMethodType::Unsupervised && trainingSet->OutputVectorLength != 0))
		{
		throw ("Invalid training set");
		}

	// Reset isStopping
	isStopping = false;

	// Re-Initialize the network
	Initialize();
	for (int currentIteration = 0; currentIteration < trainingEpochs; currentIteration++)
		{
		int sampleSize = trainingSet->TrainingSamples.size();
		int *randomOrder = Helper::GetRandomOrder(sampleSize);
		// Beginning a new training epoch
		OnBeginEpoch(currentIteration, trainingSet);

		// Check for Jitter Epoch
		if (jitterEpoch > 0 && currentIteration % jitterEpoch == 0)
			{
			int connectorSize = connectors.size();
			for(int i=0;i<connectorSize;i++)
				{
				connectors[i]->Jitter(jitterNoiseLimit);
				}
			}
		for (int index = 0; index < sampleSize; index++)
			{
			TrainingSample *randomSample = (*trainingSet)[randomOrder[index]];

			// Learn a random training sample
			OnBeginSample(currentIteration, randomSample);
			LearnSample((*trainingSet)[randomOrder[index]], currentIteration, trainingEpochs);
			OnEndSample(currentIteration, randomSample);

			// Check if we need to stop
			if (isStopping)
				{ 
				isStopping = false; 
				delete[] randomOrder;
				return;
				}
			}

		// Training Epoch successfully complete
		OnEndEpoch(currentIteration, trainingSet);

		// Check if we need to stop
		delete[] randomOrder;
		if (isStopping) 
			{
			isStopping = false;
			return;
			}
		}
	}

void NeuralPlusPlus::Core::Network::Learn( TrainingSample *trainingSample, int currentIteration, int trainingEpochs )
	{
	Helper::ValidateNotNull(trainingSample, "trainingSample");
	Helper::ValidatePositive(trainingEpochs, "trainingEpochs");
	Helper::ValidateWithinRange(currentIteration, 0, trainingEpochs - 1, "currentIteration");

	OnBeginSample(currentIteration, trainingSample);
	LearnSample(trainingSample, currentIteration, trainingEpochs);
	OnEndSample(currentIteration, trainingSample);
	}

void NeuralPlusPlus::Core::Network::StopLearning()
	{
	isStopping = true;
	}