
#include <iostream>
#include <fstream>
#include <sstream>
#include <vector>
#include <map>
#include <math.h>
#include <cstdlib>
#include <iomanip>

using namespace std;

void error(string message) {
	cout << message << endl;
	exit(1);
}

int max_position(vector<double> v) {

	int max_position = 0;
	double max_value = v.at(0);
	for (int i = 1; i < v.size(); ++i) {
		if (v.at(i) > max_value) {
			max_value = v.at(i);
			max_position = i;
		}
	}
	return max_position;
}

void print_vector_and_winner(const vector<double>& aVector, string delim)
{
	for (size_t i = 0; i < aVector.size(); i++) {
		//cout << aVector[i] << delim;
		printf("%.4f%s",aVector[i],delim.c_str());
	}

	//find winner
	int winner = 0;
	for ( unsigned int k = 0; k < aVector.size(); k++ )
		if ( aVector.at(winner) < aVector.at(k) )
			winner = k;
	cout << "winner = " << winner;
}

void print_vector(const vector<double>& aVector, string delim) {

	for (size_t i = 0; i < aVector.size(); i++) {
		cout << aVector[i] << delim;
	}
}

/**
 * W miejscu najlepszej wartości daje 1, a dla pozostałych 0.
 */
vector<double> transformKohonenOutput(vector<double> output) {

	vector<double> result;

	int max_index = 0;
	double max = output.at(0);

	for (int i = 1; i < output.size(); ++i) {
		if (output.at(i) > max) {
			max = output.at(i);
			max_index = i;
		}
	}
	for (int i = 0; i < output.size(); ++i) {
		if (i == max_index) {
			result.push_back(1.0);
		} else {
			result.push_back(0.0);
		}
	}
	return result;
}

/**
 * Zamienia dane uczace na dane testowe.
 */
vector<vector<double> > traindata_to_testdata(vector<vector<vector<double> > > trainData) {

	vector<vector< double> > result;
	for (int i = 0; i < trainData.size(); ++i) {
		result.push_back(trainData.at(i).at(0));
	}
	return result;
}

/**
 * Converts from string to given type T
 */
template <class T> bool from_string(T& t,
		const std::string& s,
		std::ios_base& (*f)(std::ios_base&))
{
  std::istringstream iss(s);
  return !(iss >> f >> t).fail();
};

/**
 * Replace
 */
void replace(string &str, const string &find_what, const string &replace_with)
{
	string::size_type pos=0;
	while((pos=str.find(find_what, pos))!=string::npos)
	{
		str.erase(pos, find_what.length());
		str.insert(pos, replace_with);
		pos+=replace_with.length();
	}
}

/**
 * Split
 */
vector<string> string_split(string str, string delim)
{
	vector<string> results;
	unsigned int cutAt;
	while( (cutAt = str.find_first_of(delim)) != str.npos )
	{
		if(cutAt > 0)
		{
			results.push_back(str.substr(0,cutAt));
		}
		str = str.substr(cutAt+1);
	}
	if(str.length() > 0)
	{
		results.push_back(str);
	}
	return results;
}

/**
 * Split string using white characters as delimiter and converts result to double.
 * Also replace , with .
 */
vector<double> split_to_doubles(string str)
{
	replace(str,",",".");
    stringstream ss(str); // Insert the string into a stream
    string buf;
    vector <double> result;

    while (ss >> buf)
    {
    	double d = 0;
        if (! from_string<double>(d, buf, std::dec)) {
        	error("ERROR from_string<double>(d, buf, std::dec)");
        }
        result.push_back(d);
    }
    return result;
}

/**
 * Split string using white characters as delimiter and converts result to int.
 */
vector<int> split_to_integers(string str)
{
    stringstream ss(str); // Insert the string into a stream
    string buf;
    vector<int> result;

    while (ss >> buf)
    {
    	int d = 0;
        if (! from_string<int>(d, buf, std::dec)) {
        	error("ERROR from_string<int>(d, buf, std::dec)");
        }
        result.push_back(d);
    }
    return result;
}

/**
 * If line starts with prefix, return true, and fill rest with part of line after prefix
 */
bool is_line_starts_with(const string& line, const string& prefix, string& rest)
{
	if (line.compare(0,prefix.size(),prefix) == 0) {
		rest = line.substr(prefix.size(),line.size()-prefix.size()+1);
		return true;
	}
	return false;
}

vector<vector <double> > read_input_data(const string filepath)
{
	vector<vector <double> > result;

	ifstream myfile(filepath.c_str());
	if (! myfile.is_open()) {
		error("ERROR while opening read_input_data");
	}
	else {
		while (! myfile.eof() )
		{
			string line;
			getline (myfile,line);
			vector<double> dataset = split_to_doubles(line);
			if (dataset.size() > 0)
				result.push_back(dataset);
		}
	}
	return result;
}

/**
 * Wczytuje dane do trzywymiarowej listy.
 * Separatorami dla kolejnych wymiarów są 1) średnik, 2) przecinek 3) spacja lub tabulator
 */
vector<vector<vector <double> > > read_input_learndata(const string filepath)
{
	vector<vector<vector<double> > > result;
	ifstream myfile(filepath.c_str());
	if (! myfile.is_open()) {
		error("ERROR while opening read_input_learndata");
	}
	else {
		while (! myfile.eof() )
		{
			string line;
			getline (myfile,line,';');

			vector<vector <double> > input_and_output;
			vector<string> splited_line = string_split(line,",");

			for (int i = 0; i < splited_line.size(); i++) {
				string p = splited_line.at(i);
				vector<double> d = split_to_doubles(p);

				if (! (d.size() > 0)) {
					error("ERROR read_input_learndata : wrong file format");
				}

				input_and_output.push_back(d);
			}
			if (input_and_output.size() == 0) {
				error("ERROR read_input_learndata : wrong file format");
			}

			result.push_back(input_and_output);
		}
	}
	return result;
}

vector<vector <double> > read_input_testdata(const string filepath)
{
	vector<vector<double> > result;
	ifstream myfile(filepath.c_str());
	if (! myfile.is_open()) {
		error("ERROR while opening read_input_testdata");
	}
	else {
		while (! myfile.eof() )
		{
			string line;
			getline (myfile,line,';');

			result.push_back(split_to_doubles(line));
		}
	}
	return result;
}

class ActivationFunction {
public:
	virtual double evaluate(double x) const = 0;
	virtual double evaluateDerivative(double x) const = 0;
};

class SigmoidActivationFuncion : public ActivationFunction {
public:
	double evaluate(double x) const {
		// cout << "evaluate " << x << "result = " << result << endl;
		return 1.0 / (1.0 + exp(-x));
	}

	double evaluateDerivative(double x) const {
		// s'(x) = s(x) * (1 - s(x)), gdzie s(x) to sigmoida
		double sx = evaluate(x);
		return (sx * (1 - sx));
	}
};

class LinearActivationFuncion : public ActivationFunction {
public:
	double evaluate(double x) const {
		return x;
	}

	double evaluateDerivative(double x) const {
		return 1.0;
	}
};

class ThresholdActivationFuncion : public ActivationFunction {
public:
	double evaluate(double x) const {
		if (x >= 0) {
			return 1.0;
		} else {
			return 0;
		}
	}
	double evaluateDerivative(double x) const {
		return 0.0;
	}
};

class Neuron {
private:
	int id;
	map<const Neuron*,double> connections;
	double bias;

public:
	double inputPotential;
	double outputPotential;

	Neuron() {
		static int next_id = 0;
		id = next_id++;
	}

	void setConnection(const Neuron& neuron, double weight) {
//		cout << endl << "Jestem " << id << " ustawiam polaczenie do " << neuron.getId() << " waga = " << weight << endl;
		connections[&neuron] = weight;
	}
	double getConnectionWeight(const Neuron& neuron) {
		return connections[&neuron];
	}

	void setBias(double aBias) {
		bias = aBias;
	}
	double getBias() const {
		return bias;
	}

	int getId() const {
		return id;
	}

};

class Network {
protected:
	vector<vector<Neuron> > layers;
	vector<int> num_of_neurons_in_layers;
	ActivationFunction* activationFunction;

	void clearPotential() {
		for (unsigned int i = 0; i < layers.size(); ++i) {
			vector<Neuron>& neurons = layers.at(i);
			for (unsigned int j = 0; j < neurons.size(); ++j) {
				neurons.at(j).inputPotential = 0;
				neurons.at(j).outputPotential = 0;
			}
		}
	}

public:
	Network(){

	}

	int getNumOfLayers() const { return num_of_neurons_in_layers.size(); };

	int getNumOfNeuronsInLayer(int aLayerNum) { return num_of_neurons_in_layers[aLayerNum]; }

	virtual void setNumOfNeuronsInLayers(const vector<int>& aNum_of_neurons_in_layers)
	{
		num_of_neurons_in_layers = aNum_of_neurons_in_layers;

		for (unsigned int i = 0; i < num_of_neurons_in_layers.size(); i++) {
			int num_of_neutrons = num_of_neurons_in_layers.at(i);
			vector<Neuron> layer;
			for (int j = 0; j < num_of_neutrons; j++) {
				Neuron neuron;
				layer.push_back(neuron);
			}
			layers.push_back(layer);
		}
	}

	virtual void setActivationFunction(ActivationFunction* aActivationFunction) {
		activationFunction = aActivationFunction;
	}

	void setConnectionWeight(int aLayerNum, int aNeuronNum, double weight, int aNextLayerTargetNeuronNum) {
		Neuron& targetNeuron = layers.at(aLayerNum+1).at(aNextLayerTargetNeuronNum);
		layers.at(aLayerNum).at(aNeuronNum).setConnection(targetNeuron,weight);
	}

	void setNeuronBias(int aLayerNum, int aNeuronNum, double bias) {
		layers.at(aLayerNum).at(aNeuronNum).setBias(bias);
	}

	vector<double> fire(const vector<double> input)
	{
		//cout << "Firing started" << endl;
		if ((int)input.size() != num_of_neurons_in_layers.at(0)) {
			error("ERROR wrong number of input data");
		}
		clearPotential();

		// initialize first layer
		vector<Neuron>& neurons = layers.at(0);
		for (int i = 0; i < num_of_neurons_in_layers.at(0); i++) {
			neurons.at(i).outputPotential = input.at(i);
		}

		int num_of_layers = num_of_neurons_in_layers.size();

		for (unsigned int i = 0; i < num_of_layers-1; i++)
		{
			vector<Neuron>& currentLayer = layers.at(i);
			vector<Neuron>& nextLayer = layers.at(i+1);

			for (unsigned int j = 0; j < currentLayer.size(); j++) {
				Neuron& sourceNeuron = currentLayer.at(j);
				for (unsigned int k = 0; k < nextLayer.size(); k++) {
					Neuron& targetNeuron = nextLayer.at(k);

					double weight = sourceNeuron.getConnectionWeight(targetNeuron);
					//cout << "mnoze wage " << weight << " razy potencjal " << sourceNeuron.outputPotential << endl;
					targetNeuron.inputPotential += weight * sourceNeuron.outputPotential;
					//cout << "targetNeuron.inputPotential " << targetNeuron.inputPotential << endl;

				}
			}

			// adding bias and activating
			for (int j = 0; j < nextLayer.size(); ++j) {

				Neuron& neuron = nextLayer.at(j);
				neuron.inputPotential -= neuron.getBias();
				//printf("inputPotential: %.4f\n", neuron.inputPotential);
				neuron.outputPotential = activationFunction->evaluate(neuron.inputPotential);
				//cout << "output potential = " << neuron.outputPotential << endl;
			}
		}

		vector<double> result;
		vector<Neuron>& last_layer = layers.at(layers.size()-1);
		for (int i = 0; i < last_layer.size(); ++i) {
			Neuron& neuron = last_layer.at(i);
			result.push_back(neuron.outputPotential);
		}
		return result;
	}

	void printWeights() {
		for (int i = 0; i < getNumOfLayers()-1; ++i) {
			cout << "Warstwa " << i << endl;

			for (int j = 0; j < layers.at(i).size(); ++j) {
				Neuron& source = layers.at(i).at(j);
				cout << "\t neuron " << j << " bias = " << source.getBias() << " ";
				for (int k = 0; k < layers.at(i+1).size(); ++k) {
					Neuron& target = layers.at(i+1).at(k);
					cout << source.getConnectionWeight(target) << " ";
				}
				cout << endl;
			}
		}
	}
};

/**
 * Grossberg layer (outstar layer)
 */
class PerceptronNetwork: public Network {
private:
	int learning_epochs;
	vector<double> learning_speeds;
	int iterations_per_epoch;
public:

	void setLearningEpochs(int x) {
		learning_epochs = x;
	}

	void setLearningSpeeds(vector<double>& x) {
		learning_speeds = x;
	}

	void setIterationsPerEpoch(int x) {
		iterations_per_epoch = x;
	}

	void learn(vector<vector<vector<double> > > learningData) {

		for (int epoch = 0; epoch< learning_epochs; epoch++ ) {
			double rate = learning_speeds.at(epoch);

			for (int it = 0; it < iterations_per_epoch; ++it) {

				for (int i = 0; i < learningData.size(); ++i) {
					vector<vector<double> >& learningCase = learningData.at(i);

					if (learningCase.at(0).size() != layers.at(0).size()) {
						error("ERROR perceptron.learn");
					}
					fire(learningCase.at(0));

					for (int inInd = 0; inInd < layers.at(0).size(); ++inInd) {
						Neuron& sourceNeuron = layers.at(0).at(inInd);

						for (int outInd = 0; outInd < layers.at(1).size(); ++outInd) {
							Neuron& targetNeuron = layers.at(1).at(outInd);
							double expected_output = learningCase.at(1).at(outInd);
							double derivative = activationFunction->evaluateDerivative(targetNeuron.inputPotential);
							double weight = sourceNeuron.getConnectionWeight(targetNeuron);

							double delta_weight = rate *
								(expected_output - targetNeuron.outputPotential) *
								derivative * sourceNeuron.outputPotential;

							sourceNeuron.setConnection(targetNeuron,weight+delta_weight);
						}
					}

				}
			}
		}

	}
};

class KohonenNetwork: public Network {

private:
	int neighbourhood;
	int learning_epochs;
	vector<double> learning_speeds;
	int iterations_per_epoch;

public:
	KohonenNetwork() : Network() { }

	void setNeighourhood(int x) {
		neighbourhood = x;
	}

	void setLearningEpochs(int x) {
		learning_epochs = x;
	}

	void setLearningSpeeds(vector<double>& x) {
		learning_speeds = x;
	}

	void setIterationsPerEpoch(int x) {
		iterations_per_epoch = x;
	}

	void learn(vector<vector <double> > trainData)
	{
		bool debug = false;

		if (debug)
			printf("learn\n");

		for (int epoch = 0; epoch< learning_epochs; epoch++ ) {
			double rate = learning_speeds.at(epoch);

			if (debug)
				printf("  epoch = %i, rate = %f\n", epoch, rate);

			for (int i = 0; i < iterations_per_epoch; i++ ) {

				// debug
				if (debug && i < 10) {
					printf("\n");
					printf("    iteration = %i\n", i);
				}

				for (int j = 0; j < trainData.size(); j++) {

					// debug
					if (debug && i < 10) {
						printf("\n");
						printf("      trainDataNum = %i\n", j);
						printf("        ");
						for (int l = 0; l < trainData.at(j).size(); l++ )
							printf("%.0f ", trainData.at(j).at(l));
						printf("\n");
					}

					//get normalize input
					double normfac = normalizeInput(trainData.at(j));

					// debug
					if (debug && i < 10)
						printf("      normfac = %f\n", normfac);

					// debug
					if (debug && i < 10) {
						printf("      neurons weights\n");
						for (int neuron = 0; neuron < layers.at(1).size(); neuron++ ) {
							Neuron& targetNeuron = layers.at(1).at(neuron);

							printf("        %i: ", neuron);
							for (int j = 0; j < layers.at(0).size(); j++ ) {
								Neuron& sourceNeuron = layers.at(0).at(j);
								printf("%.2f ", sourceNeuron.getConnectionWeight(targetNeuron));
							}
							printf("\n");

						}
					}

					//fire train input
					vector<double> outcomes = fire(trainData.at(j), normfac);

					// debug
					if (debug && i < 10) {
						printf("      outcomes:\n");
						printf("        ");
						for (int l = 0; l < outcomes.size(); l++ )
							printf("%.3f ", outcomes.at(l));
						printf("\n");
					}

					//find winner
					int winner = 0;
					for ( int k = 0; k < outcomes.size(); k++ )
						if ( outcomes.at(winner) < outcomes.at(k) )
							winner = k;

					// debug
					if (debug && i < 10)
						printf("      winner = %i\n", winner);

					//find neighbours
					vector<int> neighbours;
					if (neighbourhood == 1)
						neighbours = neighbours1D(winner);
					else if (neighbourhood == 2)
						neighbours = neighbours2D(winner);

					// debug
					if (debug && i < 10) {
						printf("      neighbours:\n");
						printf("        ");
						for (int l = 0; l < neighbours.size(); l++ )
							printf("%i ", neighbours.at(l));
						printf("\n");
					}

					//correct neurons' weights
					adjustWeights(rate, winner, neighbours, trainData.at(j));

					normalizeWeights();

					// debug
					if (debug && i < 10) {
						printf("      neurons weights after adjusting\n");
						for (int neuron = 0; neuron < layers.at(1).size(); neuron++ ) {
							Neuron& targetNeuron = layers.at(1).at(neuron);

							printf("        %i: ", neuron);
							for (int j = 0; j < layers.at(0).size(); j++ ) {
								Neuron& sourceNeuron = layers.at(0).at(j);
								printf("%.2f ", sourceNeuron.getConnectionWeight(targetNeuron));
							}
							printf("\n");

						}
					}
				}
			}
		}
	}

	void adjustWeights(double rate, int winner, vector<int> &neighbours,
			vector<double> &trainData) {

		vector<Neuron> &inputNeurons = layers.at(0);
		vector<Neuron> &kohonenNeurons = layers.at(1);

		Neuron& winnerNeuron = kohonenNeurons.at(winner);

		for (int j = 0; j < inputNeurons.size(); j++ ) {
			Neuron& sourceNeuron = inputNeurons.at(j);
			double oldWeight = sourceNeuron.getConnectionWeight(winnerNeuron);

			double newWeight = oldWeight + rate * (trainData.at(j) - oldWeight) * 1;

			sourceNeuron.setConnection(winnerNeuron, newWeight);
		}

		for (int k = 0; k < neighbours.size(); k++ ) {
			int neighbour = neighbours.at(k);

			Neuron& targetNeuron = kohonenNeurons.at(neighbour);

			for (int j = 0; j < inputNeurons.size(); j++ ) {
				Neuron& sourceNeuron = inputNeurons.at(j);
				double oldWeight = sourceNeuron.getConnectionWeight(targetNeuron);

				double newWeight = oldWeight + rate * (trainData.at(j) - oldWeight) * ( 1.0 / 2.0 );

				sourceNeuron.setConnection(targetNeuron, newWeight);
			}

		}

	}

	void normalizeWeights() {
		for (int neuron = 0; neuron < layers.at(1).size(); neuron++ ) {
			Neuron& targetNeuron = layers.at(1).at(neuron);

			//make output weights vector
			vector<double> outputWeights;
			for (int j = 0; j < layers.at(0).size(); j++ ) {
				Neuron& sourceNeuron = layers.at(0).at(j);
				double weight = sourceNeuron.getConnectionWeight(targetNeuron);
				outputWeights.push_back(weight);
			}

			double len = vectorLength( outputWeights );
			if (len < 1.e-30)
				len = 1.e-30 ;

			double fac = 1.0 / sqrt(len);

			for (int j = 0; j < layers.at(0).size(); j++ ) {
				Neuron& sourceNeuron = layers.at(0).at(j);
				sourceNeuron.setConnection(targetNeuron, outputWeights.at(j)*fac);
			}

		}
	}

	double vectorLength (const vector<double> &vec)
	{
		double sum = 0.0;

		for (int i=0;i<vec.size();i++ )
			sum += vec.at(i) * vec.at(i);

		return sum;
	}

	double normalizeInput(const vector<double> &input)
	{
		double length;

		length = vectorLength(input);
		if(length < 1.e-30)
			length = 1.e-30 ;

		return 1.0 / sqrt(length);
	}

	vector<int> neighbours1D(int winner) {
		int size = layers.at(1).size();

		vector<int> line;

		for (int i = 0; i< num_of_neurons_in_layers[1]; i++ ) {
			line.push_back(i);
		}

		vector<int> neighbours;

		if (winner > 0)
			neighbours.push_back(line.at(winner - 1));
		if (winner < size - 1)
			neighbours.push_back(line.at(winner + 1));

		return neighbours;
	}

	//todo: optimization: square might be made once for network
	vector<int> neighbours2D(int winner) {
		bool debug = false;

		//debug
		if (debug)
			printf("\nneighbours2D\n");

		int size = layers.at(1).size();

		// debug
		if (debug)
			printf("  size = %i\n", size);

		int cols = ceil(sqrt(size));

		// debug
		if (debug)
			printf("  cols = %i\n", cols);

		int winner_x = -1, winner_y = -1;
		vector< vector <int> >& square = getSquare(winner, cols);

		for (int i = 0; i < square.size() && (winner_x < 0 || winner_y < 0); i++)
			for (int j = 0; j < cols && (winner_x < 0 || winner_y < 0); j++)
				if (square.at(i).at(j) == winner) {
					winner_y = i;
					winner_x = j;
				}

		vector<int> neighbours;

		// debug
		if (debug) {
			printf("  winner_x = %i\n", winner_x);
			printf("  winner_y = %i\n", winner_y);
		}

		if (winner_x > 0)
			neighbours.push_back(square.at(winner_y).at(winner_x-1));
		if (winner_x < cols - 1 && square.at(winner_y).at(winner_x + 1) >= 0)
			neighbours.push_back(square.at(winner_y).at(winner_x+1));
		if (winner_y > 0)
			neighbours.push_back(square.at(winner_y - 1).at(winner_x));
		if (winner_y < cols - 1 && square.at(winner_y + 1).at(winner_x) >= 0)
			neighbours.push_back(square.at(winner_y + 1).at(winner_x));

		//debug
		if (debug)
			printf("\n");

		return neighbours;

	}

private:
	vector< vector <int> > square;

	vector< vector <int> >& getSquare(int winner, int cols) {
		bool debug = false;

		if (square.size() == 0) {

			vector<int> row;
			int i,j;
			int rowNum = 0;
			for ( i = 0, j = 0; i < num_of_neurons_in_layers[1]; i++ ) {
				row.push_back(i);

				if (j == cols - 1) {
					square.push_back(row);

					// debug
					if (debug) {
						printf("    rowNum = %i\n", rowNum);
						printf("      ");
						for (int l = 0; l < row.size(); l++)
							printf("%i ", row.at(l));
						printf("\n");
					}

					j = 0;
					row.clear();
					rowNum++;
				}
				else
					j++;
			}

			if (j != 0)
				while (++j < cols)
					row.push_back(-1);
		}

		return square;
	}

public:

	double dotProduct(const vector<double>& vec1, const vector<double>& vec2)
	{
		double sum = 0;
		for (int i = 0; i < vec1.size(); i++ )
			sum += vec1.at(i) * vec2.at(i);

		return sum;
	}

	vector<double> fire(const vector<double>& input, double normfac) {
		//cout << "Firing started" << endl;
		if ((int)input.size() != num_of_neurons_in_layers[0]) {
			error("ERROR wrong number of input data");
		}
		clearPotential();

		vector<double> result;

		for (int i = 0; i < layers.at(1).size(); i++) {
			Neuron& targetNeuron = layers.at(1).at(i);

			//make output weights vector
			vector<double> outputWeights;
			for (int j = 0; j < layers.at(0).size(); j++ ) {
				Neuron& sourceNeuron = layers.at(0).at(j);
				double weight = sourceNeuron.getConnectionWeight(targetNeuron);
				outputWeights.push_back(weight);
			}

			//count output
			targetNeuron.outputPotential = dotProduct(input, outputWeights) * normfac;

			result.push_back(targetNeuron.outputPotential);
		}

		return result;
	}

};

class CounterPropagationNetwork : public Network {
private:
	KohonenNetwork kohonen;
	PerceptronNetwork perceptron;

public:

	KohonenNetwork& getKohonenNetwork() {
		return kohonen;
	}

	PerceptronNetwork& getPerceptronNetwork() {
		return perceptron;
	}

	virtual void setNumOfNeuronsInLayers(const vector<int>& aNum_of_neurons_in_layers)
	{
		if (aNum_of_neurons_in_layers.size() != 3) {
			error("ERROR counterpropagation network must have 3 layers");
		}
		vector<int> kohonenNumOfNeurons;
		kohonenNumOfNeurons.push_back(aNum_of_neurons_in_layers.at(0));
		kohonenNumOfNeurons.push_back(aNum_of_neurons_in_layers.at(1));
		kohonen.setNumOfNeuronsInLayers(kohonenNumOfNeurons);

		vector<int> perceptronNumOfNeurons;
		perceptronNumOfNeurons.push_back(aNum_of_neurons_in_layers.at(1));
		perceptronNumOfNeurons.push_back(aNum_of_neurons_in_layers.at(2));
		perceptron.setNumOfNeuronsInLayers(perceptronNumOfNeurons);

	}

	virtual void setActivationFunction(ActivationFunction* aActivationFunction) {
		perceptron.setActivationFunction(aActivationFunction);
	}

	void learn(vector<vector<vector <double> > > trainData)
	{
		bool debug = true;

		vector<vector<double> > kohonenTrainData = traindata_to_testdata(trainData);
		kohonen.learn(kohonenTrainData);

		// przygotowuje dane uczace dla drugiej warstwy (sieci)
		vector<vector<vector<double> > > outstarTrainData;
		for (int i = 0; i < trainData.size(); ++i) {
			vector<vector<double> > outstarTrainDataCase;
			vector<double> kohonenTransformedOutput = transformKohonenOutput(kohonen.fire(trainData.at(i).at(0),1.0));
			outstarTrainDataCase.push_back(kohonenTransformedOutput);
			outstarTrainDataCase.push_back(trainData.at(i).at(1));

			if (debug) {
				cout << "kohonen output : ";
				print_vector_and_winner(kohonenTransformedOutput," ");
				cout << endl;
			}
			outstarTrainData.push_back(outstarTrainDataCase);
		}
		// sprawdzam czy kazdej danej wejsciowej odpowiada inny neuron (u nas tak powinno być)
		if (true) {

			bool ok = true;
			for (int neuronNum = 0; neuronNum < outstarTrainData.size(); ++neuronNum) {
				bool is_winner = false;
				for (int caseNum = 0; caseNum < outstarTrainData.size(); ++caseNum) {
					int winnerNum = max_position(outstarTrainData.at(caseNum).at(0));
					if (winnerNum == neuronNum) {
						is_winner = true;
					}
				}
				if (is_winner == false) {
					ok = false;
				}
			}
			if (ok) {
				cout << endl;
				cout << "Warstwa Kohonena OK\n";
				cout << endl;
			}
			else {
				cout << endl;
				cout << "BŁĄD Warstwa Kohonena nie OK\n";
				cout << endl;
			}
		}


		perceptron.learn(outstarTrainData);
		//perceptron.printWeights();

	}

	vector<double> fire(const vector<double>& dataVector) {
		double normfac = kohonen.normalizeInput(dataVector);
		vector<double> kohonen_output = kohonen.fire(dataVector,normfac);

		vector<double> transformedKohonenOutput = transformKohonenOutput(kohonen_output);
		return perceptron.fire(transformedKohonenOutput);
	}
};

class NetworkConfigurator {
public:

	void configureNetwork(Network& network, string filepath) {

		const string NUM_OF_LAYERS = "num_of_layers:";
		const string NUM_OF_NEURONS_IN_LAYERS = "num_of_neurons_in_layers:";
		const string ACTIVATION_FUNCTION = "activation:";

		ifstream myfile(filepath.c_str());
		if (! myfile.is_open()) {
			error("ERROR while opening file with network");
		}
		else
		{
			int num_of_layers = -1;
			vector<int> num_of_neurons_in_layers;

			int current_line_num = 0;
			while (! myfile.eof() )
			{
				string line;
				string rest;
				getline (myfile,line);

				// num_of_layers
				if (is_line_starts_with(line,NUM_OF_LAYERS,rest))
				{
					from_string<int>(num_of_layers, rest, std::dec);
				}
				// num_of_neurons_in_layers
				else if (is_line_starts_with(line,NUM_OF_NEURONS_IN_LAYERS,rest))
				{
					num_of_neurons_in_layers = split_to_integers(rest);
				    if (num_of_layers != num_of_neurons_in_layers.size()) {
				    	error("num_of_layers != num_of_neurons_in_layers.size()");
				    }

				    network.setNumOfNeuronsInLayers(num_of_neurons_in_layers);
				}
				// activation function
				else if (is_line_starts_with(line,ACTIVATION_FUNCTION,rest))
				{
					ActivationFunction* activationFunction;
					if (rest.find("linear") != string::npos) {
					    activationFunction = new LinearActivationFuncion();
					}
					else if (rest.find("threshold") != string::npos) {
					    activationFunction = new ThresholdActivationFuncion();
					}
					else if (rest.find("sigmoid") != string::npos) {
					    activationFunction = new SigmoidActivationFuncion();
					}
					else {
						error("ERROR incorrect activation function");
					}
					network.setActivationFunction(activationFunction);
				}

				current_line_num++;
			}
		}
	};

	void configureKohonenNetwork(KohonenNetwork& network, string filepath) {

		const string RANDOM_WEIGHTS = "randomize_weights:";
		const string ZERO_WEIGHTS = "zero_weights";
		const string NEIGHBOURHOOD = "neighbourhood:";
		const string LEARNING_EPOCHS = "learning_epochs:";
		const string LEARNING_SPEEDS = "learning_speeds:";
		const string ITERATIONS_PER_EPOCH = "iterations_per_epoch:";

		ifstream myfile(filepath.c_str());
		if (! myfile.is_open()) {
			error("ERROR while opening file with network");
		}
		else
		{
			int current_line_num = 0;

			while (! myfile.eof() )
			{
				string line;
				string rest;
				getline (myfile,line);

				if (is_line_starts_with(line,RANDOM_WEIGHTS,rest))
				{
					vector<double> doubles = split_to_doubles(rest);
					double min = doubles.at(0);
					double max = doubles.at(1);

					int num_of_neurons = network.getNumOfNeuronsInLayer(0);
					int num_of_neurons_in_nextlayer = network.getNumOfNeuronsInLayer(1);

					srand( (unsigned)time( NULL ) );

					for (int i = 0; i<num_of_neurons_in_nextlayer; i++ ) {
						// ustawiam bias
						network.setNeuronBias(1,i,0);

						// pierwszy nie, bo pierwszy to bias
						for (int j=1; j < num_of_neurons+1; j++) {
							double random = ( rand()/(double(RAND_MAX) + 1) ) * ( max - min ) + min;
							cout << random << " ";
							network.setConnectionWeight(0,j-1,random,i);
						}
						cout << endl;
					}
					network.normalizeWeights();
				}
				else if (is_line_starts_with(line,ZERO_WEIGHTS,rest))
				{
					int num_of_neurons = network.getNumOfNeuronsInLayer(0);
					int num_of_neurons_in_nextlayer = network.getNumOfNeuronsInLayer(1);

					for (int i = 0; i<num_of_neurons_in_nextlayer; i++ ) {
						// ustawiam bias
						network.setNeuronBias(1,i,0);

						// pierwszy nie, bo pierwszy to bias
						for (int j=0; j < num_of_neurons; j++) {
							cout << 0 << " ";
							network.setConnectionWeight(0,j,0,i);
						}
						cout << endl;
					}
				}
				else if (is_line_starts_with(line,NEIGHBOURHOOD,rest))
				{
					vector<double> doubles = split_to_doubles(rest);
					network.setNeighourhood((int) doubles.at(0));
				}
				else if (is_line_starts_with(line,LEARNING_EPOCHS,rest))
				{
					vector<double> doubles = split_to_doubles(rest);
					network.setLearningEpochs((int) doubles.at(0));
				}
				else if (is_line_starts_with(line,LEARNING_SPEEDS,rest))
				{
					vector<double> doubles = split_to_doubles(rest);
					network.setLearningSpeeds(doubles);
				}
				else if (is_line_starts_with(line,ITERATIONS_PER_EPOCH,rest))
				{
					vector<double> doubles = split_to_doubles(rest);
					network.setIterationsPerEpoch((int) doubles.at(0));
				}

				current_line_num++;
			}
		}
	};

	void configurePerceptronNetwork(PerceptronNetwork& network, string filepath) {

		const string LEARNING_EPOCHS = "grossberg_learning_epochs:";
		const string LEARNING_SPEEDS = "grossberg_learning_speeds:";
		const string ITERATIONS_PER_EPOCH = "grossberg_iterations_per_epoch:";

		ifstream myfile(filepath.c_str());
		if (! myfile.is_open()) {
			error("ERROR while opening file with network");
		}
		else
		{
			int current_line_num = 0;

			while (! myfile.eof() )
			{
				string line;
				string rest;
				getline (myfile,line);

				if (is_line_starts_with(line,LEARNING_EPOCHS,rest))
				{
					vector<double> doubles = split_to_doubles(rest);
					network.setLearningEpochs((int) doubles.at(0));
				}
				else if (is_line_starts_with(line,LEARNING_SPEEDS,rest))
				{
					vector<double> doubles = split_to_doubles(rest);
					network.setLearningSpeeds(doubles);
				}
				else if (is_line_starts_with(line,ITERATIONS_PER_EPOCH,rest))
				{
					vector<double> doubles = split_to_doubles(rest);
					network.setIterationsPerEpoch((int) doubles.at(0));
				}

				current_line_num++;
			}
		}
	};

	void configureCounterPropagationNetwork(CounterPropagationNetwork& cpn, string config_file) {

		configureKohonenNetwork(cpn.getKohonenNetwork(),config_file);
		configurePerceptronNetwork(cpn.getPerceptronNetwork(),config_file);
	}
};

int main(int argc, char** argv) {

	if (argc != 5) {
		cout << "Parametry : [rodzaj_sieci] [plik_z_konfiguracja] [plik_z_danymi_uczacymi] [plik_z_danymi_wejsciowymi" << endl;
		cout << "Rodzaje sieci: kohonena - 'koh', counterpropagation - 'cp'";
		exit(1);
	}
	cout << setprecision(3) << fixed;
	NetworkConfigurator nc;

	string network_type = argv[1];
	string config_file = argv[2];
	string learningdata_file = argv[3];
	string testdata_file = argv[4];

	cout << "CONFIG_FILE:        " << config_file << endl;
	cout << "LEARNINGDATA_FILE:  " << learningdata_file << endl;
	cout << "TESTDATA_FILE:      " << testdata_file << endl;
	cout << endl;

	if (network_type == "koh") {
		cout << "KOHONEN NETWORK" << endl;

		KohonenNetwork net;
		nc.configureNetwork(net,config_file);
		nc.configureKohonenNetwork(net,config_file);

		cout << "TRAINING DATA SETS:" << endl;
		vector< vector <double> > inputData = read_input_data(learningdata_file);
		cout << endl;

		cout << "LEARNING:" << endl;
		net.learn(inputData);
		cout << endl;

		cout << "PROCESSING:" << endl;
		inputData = read_input_data(testdata_file);

		for (int i=0; i<inputData.size(); i++) {
			for (int j=0; j<inputData[i].size(); j++) {
				cout << inputData[i][j] << " ";
			}
			double normfac = net.normalizeInput(inputData[i]);
			vector<double> result = net.fire(inputData[i], normfac);
			cout << "| result = ";
			print_vector_and_winner(result," ");
			cout << endl;
		}
		cout << "End...";
	}

	else if (network_type == "cp") {
		cout << "COUNTERPROPAGATION NETWORK" << endl;

		CounterPropagationNetwork cpn;
		nc.configureNetwork(cpn, config_file);
		nc.configureCounterPropagationNetwork(cpn, config_file);

		vector<vector<vector<double> > > learningData = read_input_learndata(learningdata_file);
		cout << "LEARNING DATA:\n";
		for (int i = 0; i < learningData.size(); ++i) {
			print_vector(learningData.at(i).at(0)," ");
			cout << " odpowiedz ";
			print_vector(learningData.at(i).at(1)," ");
			cout << endl;
		}

		cout << "UCZENIE SIECI ROZPOCZETE\n";
		cpn.learn(learningData);

		cout << "TEST FOR LEARNING DATA:" << endl;
		vector<vector<double> > learningTestData = traindata_to_testdata(learningData);
		for (int i = 0; i < learningTestData.size(); ++i) {
			print_vector(learningTestData.at(i)," ");

			cout << " odpowiedz sieci ";
			vector<double> result = cpn.fire(learningTestData.at(i));
			print_vector(result," ");
			cout << endl;
		}

		vector<vector<double> > testingData = read_input_testdata(testdata_file);
		cout << "TESTING DATA:" << endl;
		for (int i = 0; i < testingData.size(); ++i) {
			print_vector(testingData.at(i)," ");

			cout << " odpowiedz sieci ";
			vector<double> result = cpn.fire(testingData.at(i));
			print_vector(result," ");
			cout << endl;
		}
	}
}
