
#include "neural_net_genetic.h"
#include <cstdlib>
#include <ctime>
#include <fstream>

using namespace WtfEngine;

struct SigmoidFunc {
	static float Eval(float v)
	{
		return (2.0 / (1.0 + exp(-v))) - 1.0;
	};

	static float Derivative(float fv)
	{
		fv += 1.0;
		return fv * (2.0 - fv);
	};
};

template <unsigned _Dim, class _Trigger = SigmoidFunc>
class GeneticNeuralNetworkImpl: public IGeneticNeuralNetworkImpl {
	std::vector<unsigned> mInputMap;
	std::vector<unsigned> mOutputMap;

	float mValues[_Dim];
	float mWeights[_Dim];
	float mConnectionMap[_Dim][_Dim];
	float mDeltas[_Dim];
	float mMomentum;
	bool mNodeProcessed[_Dim];
	bool mNodeHasDelta[_Dim];
	bool mNodeProcessing[_Dim];


	static const int _RandRes = 1024;
	static int gRandSeed;

public:
	GeneticNeuralNetworkImpl( const std::vector<unsigned>& inputMap, const std::vector<unsigned>& outputMap )
		: mInputMap(inputMap), mOutputMap(outputMap), mMomentum(1.0)
	{
		for( int i = 0; i < _Dim; i++ ) {
			mNodeProcessed[i] = mNodeProcessing[i] = false;
		};
	};


	inline float GetRand()
	{
		float f = (float(rand() % (_RandRes * 5)) / (float)(_RandRes * 2)) - 1.0;
		return f > 1.0 ? 0.0 : f;
	};

	void Randomize()
	{
		srand( clock() + gRandSeed++ );
		for( int i = 0; i < _Dim; i++ ) {
			mWeights[i] = GetRand();
			for( int j = 0; j < _Dim; j++ ) {
				mConnectionMap[i][j] = GetRand();
			};
		};
	};

	/**
	 * Chooses a random neuron to mutate, then randomly changes its weight and a random
	 * number of connections.
	 * @note Assumes srand has been called
	 **/
	void Mutate()
	{
		int n, c, j;
		int i = rand() % _Dim;
		mWeights[i] = GetRand();

		n = c = rand() % (_Dim + 1);
		j = 0;
		while( c > 0 ) {
			int skip = rand() % ((_Dim + 1) - n);
			j += skip;
			n += skip;
			mConnectionMap[i][j] = GetRand();
			c--;
		};
	};

	void ProcessInputs( std::vector<float>& vIn )
	{
		for( int i = 0; i < _Dim; i++ ) {
			mNodeProcessed[i] = false;
		}
		for( int i = 0; i < vIn.size(); i++ ) {
			mValues[mInputMap[i]] = vIn[i];
			mNodeProcessed[i] = true;
			for( int j = 0; j < _Dim; j++ ) {
				mConnectionMap[mInputMap[i]][j] = 0.0;
			}
		}
	};

	float GetValue( unsigned i )
	{
		assert(!mNodeProcessing[i]);

		if( mNodeProcessed[i] ) {
			return mValues[i];
		} else {
			mValues[i] = 0.0;
			mNodeProcessing[i] = true;
			for( int j = 0; j < _Dim; j++ ) {
				// Remove loops
				if( mNodeProcessing[j] ) mConnectionMap[i][j] = 0.0;

				if( mConnectionMap[i][j] != 0.0 ) {
					mValues[i] += GetValue(j) * mConnectionMap[i][j];
				}
			}
			mValues[i] = typename _Trigger::Eval( mValues[i] );
			mNodeProcessing[i] = false;
			mNodeProcessed[i] = true;
			return mValues[i];
		}
	};

	void ReadOutputs( std::vector<float>& vOut )
	{
		for( int i = 0; i < vOut.size(); i++ ) {
			vOut[i] = GetValue( mOutputMap[i] );
		}
	};

	/**
	 * Calculates the delta for a node, during backpropagation.
	 * @pre Output values have been computed for all connected nodes.
	 **/
	float GetDelta( unsigned i )
	{
		if( !mNodeHasDelta[i] ) {
			float v = GetValue( i );
			mDeltas[i] = 0.0;
			
			for( int j = 0; j < _Dim; j++ ) {
				if( mConnectionMap[j][i] != 0.0 ) {
					mDeltas[i] += GetDelta(j) * mConnectionMap[j][i];
				}
			}

			mDeltas[i] *= typename _Trigger::Derivative(v);
			mNodeHasDelta[i] = true;
		}
		return mDeltas[i];
	};

	void BackPropagate( const std::vector<float>& vDesired, float maxError )
	{
		std::vector<float> vOut;
		vOut.resize(vDesired.size());

		do {
			for( int i = 0; i < _Dim; i++ ) {
				mNodeProcessed[i] = false;
			}
			for( std::vector<unsigned>::const_iterator i = mInputMap.begin(); i != mInputMap.end(); i++ ) {
				mNodeProcessed[*i] = true;
				for( int j = 0; j < _Dim; j++ ) {
					mConnectionMap[*i][j] = 0.0;
				}
			}
			ReadOutputs(vOut);

			for( int i = 0; i < _Dim; i++ ) {
				mNodeHasDelta[i] = false;
				mDeltas[i] = 0.0;
			}
			for( int i = 0; i < vDesired.size(); i++ ) {
				mDeltas[mOutputMap[i]] = vOut[i] - vDesired[i];
				mDeltas[mOutputMap[i]] *= typename _Trigger::Derivative(vOut[i]);
				mNodeHasDelta[mOutputMap[i]] = true;
			}

			// Update weights
			for( int i = 0; i < _Dim; i++ ) {
				for( int j = 0; j < _Dim; j++ ) {
					mConnectionMap[j][i] += mMomentum * GetDelta(j) * mValues[i];
				};
			};

			// Calculate mean square error, store as next momentum
			mMomentum = 0;
			for( int i = 0; i < vDesired.size(); i++ ) {
				mMomentum += mDeltas[mOutputMap[i]] * mDeltas[mOutputMap[i]];
			};
			mMomentum /= float(vDesired.size());

		} while(mMomentum > maxError);
	};

	void Load(std::istream& s)
	{
		for( int i = 0; i < _Dim; i++ ) {
			s >> mWeights[i];
		};
		for( int i = 0; i < _Dim; i++ ) {
			for( int j = 0; j < _Dim; j++ ) {
				s >> mConnectionMap[i][j];
			};
		};
	};

	void Save(std::ostream& s)
	{

		for( int i = 0; i < _Dim; i++ ) {
			s << mWeights[i] << " ";
		};
		s << std::endl;
		for( int i = 0; i < _Dim; i++ ) {
			for( int j = 0; j < _Dim; j++ ) {
				s << mConnectionMap[i][j] << " ";
			};
			s << std::endl;
		};
	};

	unsigned GetSize() const
	{
		return sizeof(*this);
	};
	unsigned GetNeuronCount() const
	{
		return _Dim;
	};
	const std::vector<unsigned>& GetInputMap() const
	{
		return mInputMap;
	};
	const std::vector<unsigned>& GetOutputMap() const
	{
		return mOutputMap;
	};

	IGeneticNeuralNetworkImpl * Clone() const
	{
		return new GeneticNeuralNetworkImpl<_Dim>(*this);
	};
};

template <unsigned _Dim, class _Trigger>
int GeneticNeuralNetworkImpl<_Dim, _Trigger>::gRandSeed = 0;



/**** Wrapper Class ****/

IGeneticNeuralNetworkImpl * CreateGNN( unsigned dim, const std::vector<unsigned>& inputMap, const std::vector<unsigned>& outputMap )
{
#	define GNN_DIM(d) else if( dim <= d ) { return new GeneticNeuralNetworkImpl<d>(inputMap, outputMap); }

	if( dim == 0 ) {
		throw std::logic_error("Trying to create a GNN with zero neurons");
	}
	GNN_DIM(16)
	GNN_DIM(32)
	GNN_DIM(64)
	GNN_DIM(128)
	GNN_DIM(256)
	else {
		throw std::logic_error("Trying to create a GNN with too many neurons");
	}

#	undef GNN_DIM
}

GeneticNeuralNetwork::GeneticNeuralNetwork(const WtfEngine::StringPool::Entry &name)
: DataObject(name), mpImpl(NULL)
{
}

GeneticNeuralNetwork::GeneticNeuralNetwork(const WtfEngine::StringPool::Entry &name, unsigned dim, const std::vector<unsigned>& inputMap, const std::vector<unsigned>& outputMap )
: DataObject(name), mpImpl( CreateGNN(dim, inputMap, outputMap) )
{
}

GeneticNeuralNetwork::tRef GeneticNeuralNetwork::Clone(const StringPool::Entry& name) const
{
	GeneticNeuralNetwork * pNew = new GeneticNeuralNetwork(name);
	pNew->mpImpl = mpImpl->Clone();
	return pNew;
}

/**
 * File format:
 * #Neurons (N)
 * Input Map (P, i1, i2, ..., iP)
 * Output Map (Q, o1, o2, ..., oQ)
 * Neuron weights (w1, ..., wN)
 * Neuron 1 Connections (1c1, 1c2, ..., 1cN)
 * ...
 * Neuron N Connections (Nc1, ..., NcN)
 **/
void GeneticNeuralNetwork::SaveToFile(const String& sFile)
{
	if(mpImpl == NULL) throw std::logic_error("Neural network is not initialized");

	std::ofstream file(sFile.c_str());
	if(!file.good()) throw std::runtime_error("Failed to open output file");

	file << mpImpl->GetNeuronCount();
	file << std::endl;

	const std::vector<unsigned>& inputMap = mpImpl->GetInputMap();
	file << inputMap.size() << " ";
	for( int i = 0; i < inputMap.size(); i++ ) {
		file << inputMap[i] << " ";
	};
	file << std::endl;

	const std::vector<unsigned>& outputMap = mpImpl->GetOutputMap();
	file << outputMap.size() << " ";
	for( int i = 0; i < outputMap.size(); i++ ) {
		file << outputMap[i] << " ";
	};
	file << std::endl;

	mpImpl->Save(file);

	file.close();
}

IDataObject * GeneticNeuralNetwork::Load(const StringPool::Entry& sFilePath)
{
	GeneticNeuralNetwork * pNetwork;
	unsigned N, n;
	std::vector<unsigned> inputMap, outputMap;
	std::ifstream file((*sFilePath).c_str());
	if(!file.good()) throw std::runtime_error("Failed to open input file");

	file >> N;

	file >> n;
	inputMap.resize(n);
	for( int i = 0; i < n; i++ ) {
		file >> inputMap[i];
	};

	file >> n;
	outputMap.resize(n);
	for( int i = 0; i < n; i++ ) {
		file >> outputMap[i];
	};

	pNetwork = new GeneticNeuralNetwork(sFilePath, N, inputMap, outputMap);
	pNetwork->mpImpl->Load(file);

	file.close();
	return pNetwork;
}
