#include "SCA_NeuralNetController.h"
#include "SCA_ISensor.h"
#include "SCA_LogicManager.h"
#include "BoolValue.h"

#ifdef HAVE_CONFIG_H
#include <config.h>
#endif


/* ------------------------------------------------------------------------- */
/* Native functions                                                          */
/* ------------------------------------------------------------------------- */

SCA_NeuralNetController::SCA_NeuralNetController(SCA_IObject* gameobj,
												   short nntype,float nnthreshold,
												   PyTypeObject* T)
	:SCA_IController(gameobj,T),
	m_type(nntype),m_threshold(nnthreshold)
{
	m_isTrained = false;
	m_nbiterations = 1000;
	m_learningRate = 0.5;
	m_momentumFactor = 0.1;
}



SCA_NeuralNetController::~SCA_NeuralNetController()
{


}



CValue* SCA_NeuralNetController::GetReplica()
{
	SCA_NeuralNetController* replica = new SCA_NeuralNetController(*this);
	replica->m_type = m_type;
	// this will copy properties and so on...
	replica->ProcessReplica();

	return replica;
}


int SCA_NeuralNetController::getInputs()
{
	vector<SCA_ISensor*>::const_iterator is=m_linkedsensors.begin();
	int nbinputs = 0;

	while(!(is==m_linkedsensors.end()))
	{
		Neuron n;
		if((*is)->GetState())
			n.value = 1;
		else
			n.value = 0;
		mao_layers[0].Neurons.push_back(n);
		is++;
		nbinputs++;
	}
	return nbinputs;
}


void SCA_NeuralNetController::Trigger(SCA_LogicManager* logicmgr)
{
	
	switch(m_type)
	{
	case 0:
		TriggerPerceptron(logicmgr);
		break;
	case 1:
		TriggerBackPropagation(logicmgr);
		break;
	case 2:
		break;
	default:;
	}
}

void SCA_NeuralNetController::TriggerPerceptron(SCA_LogicManager* logicmgr)
{
	int nbinputs = 0;
	int i;
	float sum=0.0;
	STR_String startpropname="w";
	STR_String propname="";
	char propindex[2];
	mao_layers = new LAYER[2];

	nbinputs = getInputs();
	Neuron n;
	n.value = 0;
	mao_layers[1].Neurons.push_back(n);
	mao_layers[1].Neurons[0].weight = new float[nbinputs];
	for( i=0;i<nbinputs;i++)				// go through all properties called w0,w1,w2,w3...
	{
		itoa(i,propindex,10);
		propname = startpropname;
		propname += propindex;
		if(m_linkedsensors.at(i)->GetState())
		{
			mao_layers[1].Neurons[0].weight[i] = m_linkedsensors.at(i)->GetParent()->FindIdentifier(propname)->GetText().ToFloat();
			sum += mao_layers[1].Neurons[0].weight[i];
		}
	}

	if(sum >= m_threshold)
		logicmgr->AddActiveActuator(m_linkedactuators.at(0),true);
	else
		logicmgr->AddActiveActuator(m_linkedactuators.at(0),false);

}

void SCA_NeuralNetController::SetPatternText(const STR_String& text)
{ 
	m_patternText = text;
}



void SCA_NeuralNetController::SetPatternName(const STR_String& name)
{
	m_patternName = name;
}

void SCA_NeuralNetController::RandomWeights()
{
	for(int i=1;i<m_nblayers;i++)
	{
		for(int j=0;j<mao_layers[i].Neurons.size();j++)
		{
			mao_layers[i].Neurons[j].weight = new float[mao_layers[i-1].Neurons.size()];
			for(int k=0;k<mao_layers[i-1].Neurons.size();k++)
			{
				mao_layers[i].Neurons[j].weight[k] = (rand()/(float)RAND_MAX)-(0.5f);
			}
		}
	}
}

void SCA_NeuralNetController::UpdateNetwork(float *inputs)
{
	float sum=0.0;

	for(int i=0;i<m_nbinputs;i++)
		mao_layers[0].Neurons[i].value = inputs[i];

	for(int j=0;j<mao_layers[1].Neurons.size();j++)
	{
		sum = 0.0;
		for(int i=0;i<mao_layers[0].Neurons.size();i++)
			sum += mao_layers[0].Neurons[i].value * mao_layers[1].Neurons[j].weight[i];
		mao_layers[1].Neurons[j].value = Sigmoid(sum);
	}

	for(int j=0;j<mao_layers[2].Neurons.size();j++)
	{
		sum = 0.0;
		for(int i=0;i<mao_layers[1].Neurons.size();i++)
			sum += mao_layers[1].Neurons[i].value * mao_layers[2].Neurons[j].weight[i];
		mao_layers[2].Neurons[j].value = Sigmoid(sum);
	}
}

float SCA_NeuralNetController::Sigmoid(float _f)
{
	return tanh(_f);
}

float SCA_NeuralNetController::dSigmoid(float _f)
{
	return 1.0 - pow((_f),2);
}

float SCA_NeuralNetController::BackPropagate(float *inputs, float *targets)
{
	float error = 0.0;
	float change;
	float* output_deltas = new float[m_nboutputs];
	float* hidden_deltas = new float[m_nbhidden];


//# calculate error terms for output	
	
	for(int i=0;i<m_nboutputs;i++)
		output_deltas[i] = 0.0;

	for(int k=0;k<m_nboutputs;k++)			
	{
		error = targets[k] - mao_layers[2].Neurons[k].value;
		output_deltas[k] = dSigmoid(mao_layers[2].Neurons[k].value) * error;
	}

//# calculate error terms for hidden

	for(int i=0;i<m_nbhidden;i++)
		hidden_deltas[i] = 0.0;

	for(int j=0;j<m_nbhidden;j++)		
	{
		error = 0.0;
		for(int k=0;k<m_nboutputs;k++)
			error += output_deltas[k]*mao_layers[2].Neurons[k].weight[j];
		hidden_deltas[j] = dSigmoid(mao_layers[1].Neurons[j].value) * error;
	}

//# update output weights
	for(int j=0;j<m_nbhidden;j++)		
	{
		for(int k=0;k<m_nboutputs;k++)
		{
			change = output_deltas[k]*mao_layers[1].Neurons[j].value;
			mao_layers[2].Neurons[k].weight[j] += m_learningRate*change + m_momentumFactor*m_lastChangeOutput[j][k];
			m_lastChangeOutput[j][k] = change;
		}
	}

//# update input weights
	for(int i=0;i<m_nbinputs;i++)
	{
		for(int j=0;j<m_nbhidden;j++)
		{
			change = hidden_deltas[j]*mao_layers[0].Neurons[i].value;
			mao_layers[1].Neurons[j].weight[i] += m_learningRate*change + m_momentumFactor*m_lastChangeInput[i][j];
            m_lastChangeInput[i][j] = change;
		}
	}

//# calculate error
	error = 0.0;
	for(int k=0;k<m_nboutputs;k++)
		error += 0.5*pow(targets[k]-targets[k],2);

	return error;

}

void SCA_NeuralNetController::ReadPattern()
{
	STR_String text = m_patternText;
	STR_String buffer = "";
	int i=0;
	while(text[i] != '\n')
	{
		buffer += text[i];
		i++;
	}
	m_nbinputs = atoi(buffer);
	i++;
	buffer.Clear();

	while(text[i] != '\n')
	{
		buffer += text[i];
		i++;
	}
	m_nboutputs = atoi(buffer);
	i++;
	buffer.Clear();

	while(text[i] != '#')
	{
		while(text[i] != '/')
		{
			if(text[i] != ',')
			{
				buffer.Clear();
				buffer = text[i];
				m_patternInputValues.push_back(atoi(buffer));
			}	
			i++;
		}
		i++;
		while(text[i] != '\n')
		{
			if(text[i] !=  ',')
			{
				buffer.Clear();
				buffer = text[i];
				m_patternOutputValues.push_back(atoi(buffer));
			}
			i++;
		}
		i++;
	}
	

//Create Neuron Layers 

	mao_layers = new LAYER[3];
	for(int k=0;k<m_nbinputs;k++)
	{
		Neuron n;
		n.value = 1;
		mao_layers[0].Neurons.push_back(n);
	}
	m_nbhidden = 2*m_nbinputs+1;
	for(int k=0;k<m_nbhidden;k++)
	{
		Neuron n;
		n.value = 1;
		mao_layers[1].Neurons.push_back(n);
	}
	for(int k=0;k<m_nboutputs;k++)
	{
		Neuron n;
		n.value = 1;
		mao_layers[2].Neurons.push_back(n);
	}

	m_lastChangeInput = new float*[m_nbinputs];
	for(int k=0;k<m_nbinputs;k++)
	{
		m_lastChangeInput[k] = new float[m_nbhidden];
		for(int j=0;j<m_nbhidden;j++)
			m_lastChangeInput[k][j] = 1.0;
	}

	m_lastChangeOutput = new float*[m_nbhidden];
	for(int k=0;k<m_nbhidden;k++)
	{
		m_lastChangeOutput[k] = new float[m_nboutputs];
		for(int j=0;j<m_nboutputs;j++)
			m_lastChangeOutput[k][j] = 1.0;
	}
	

/*	printf("%i\n",m_nbinputs);
	printf("%i\n",m_nboutputs);
	
	for(int j=0;j<mao_layers[0].Neurons.size();j++)
		printf("%i, ",mao_layers[0].Neurons[j].value);
	for(int j=0;j<mao_layers[2].Neurons.size();j++)
		printf("%i, ",mao_layers[2].Neurons[j].value);*/

}

void SCA_NeuralNetController::TrainNetwork()
{	
	float error;
	ReadPattern();
	RandomWeights();
	float *inputs = new float[m_nbinputs];
	float *targets = new float[m_nboutputs];
	
	for(int i=0;i<m_nbiterations;i++)
	{
		error = 0.0;
		for(int j=0;j<m_patternInputValues.size()/m_nbinputs;j++)
		{
			for(int k=0;k<m_nbinputs;k++)
			{
				inputs[k] = m_patternInputValues[j*m_nbinputs+k];
			}
			for(int k=0;k<m_nboutputs;k++)
			{
				mao_layers[2].Neurons[k].value = m_patternOutputValues[j*m_nboutputs+k];
				targets[k] = m_patternOutputValues[j*m_nboutputs+k];
			}
			
			UpdateNetwork(inputs);

			error += BackPropagate(inputs,targets);
		}

	}
}

void SCA_NeuralNetController::TriggerBackPropagation(SCA_LogicManager* logicmgr)
{
	int indexoutput = 0;
	bool isActive = false;
	
	if(!m_isTrained)
	{
		m_nblayers = 3;
		TrainNetwork();
		m_isTrained = true;
	}
	
	float *inputs = new float[m_nbinputs];

	for(int i=0;i<m_nbinputs;i++)
	{
		if(m_linkedsensors.at(i)->GetState())
			inputs[i] = 1.0;
		else
			inputs[i] = 0.0;	
	}
	for(int i=0;i<m_nbinputs;i++)
	{
		if(inputs[i] != 0.0)
			isActive = true;
	}

	if(isActive)
	{
		UpdateNetwork(inputs);
	
		for(int i=1;i<m_nboutputs;i++)
		{
			if(mao_layers[2].Neurons[i].value > mao_layers[2].Neurons[indexoutput].value)
				indexoutput = i;
		}
		for(int i=0;i<m_nboutputs;i++)
		{
			if(i == indexoutput && mao_layers[2].Neurons[i].value > 0)
				logicmgr->AddActiveActuator(m_linkedactuators.at(i),true);
			else
				logicmgr->AddActiveActuator(m_linkedactuators.at(i),false);
		}
	}
	else
	{
		for(int i=0;i<m_nboutputs;i++)
		{
			logicmgr->AddActiveActuator(m_linkedactuators.at(i),false);
		}
	}
}

CValue* SCA_NeuralNetController::FindIdentifier(const STR_String& identifiername)
{

	CValue* identifierval = NULL;

	for (vector<SCA_ISensor*>::const_iterator is=m_linkedsensors.begin();
	!(is==m_linkedsensors.end());is++)
	{
		SCA_ISensor* sensor = *is;
		if (sensor->GetName() == identifiername)
		{
			identifierval = new CBoolValue(sensor->GetState());
			//identifierval = sensor->AddRef();
		}

		//if (!sensor->IsPositiveTrigger())
		//{
		//	sensorresult = false;
		//	break;
		//}
	}

	if (identifierval)
		return identifierval;

	return  GetParent()->FindIdentifier(identifiername);

}
