/*
 *  neuralNet.h
 *  
 *
 *  Created by Thomas Fu on 5/28/11.
 *  Copyright 2011 __MyCompanyName__. All rights reserved.
 *testing 
 */

#ifndef _neuralnet_h
#define _neuralnet_h

#include "Options.h"
#include <vector>
#include <sstream>
#include <fstream>
#include "xmlwriter.h"
#include <rapidxml.hpp>

template <typename numType>
class Node;

template <typename numType>
class NeuralNet;

using namespace std;

using namespace rapidxml;


/* * * * * * * * * * * * * * * * * LINK CLASS * * * * * * * * * * * * * * * * */

/**
 * @class Link
 *
 * @brief This class represents a single link between Nodes of a NeuralNet
 *
 * @tparam numType A numerical data type that will be the format used for data
 * stored in the NeuralNet
 *
 * This class represents a single link between Nodes of a NeuralNet. Because
 * a neural network is a kind of directed graph, these links have a direction
 * associated with them in the form of an input and output Node.
 */
template <typename numType>
class Link
{
	
public:
	
	/** 
	 * Constructor for a new Link. By default the Link assigns itself a random
	 * weight between -1 and 1. In this state it does not point to either an input
	 * or output Node.
	 */
	Link()
	{
		input = NULL;
		output = NULL;
		weight = (rand() - (RAND_MAX/2)) / (numType)(RAND_MAX);
		inputNodeSet = false;
		outputNodeSet = false;
	}
	
	/** 
	 * Constructor for a new Link. By default the Link assigns itself a random
	 * weight between -1 and 1. In this state it does not point to either an input
	 * or output Node.
	 */
	Link(numType weight)
	{
		input = NULL;
		output = NULL;
		this->weight = weight;
		inputNodeSet = false;
		outputNodeSet = false;
	}
	
	/**
	 * Destructor for the Link class. Although Links internally maintain pointers 
	 * to input and output nodes, it is assumed that the memory for these is 
	 * deallocated somewhere else (namely in the NeuralNet that contains the Link)
	 */
	~Link()
	{
	}
	
	/**
	 * Adjusts the weight of the given Link. The exact method for this adjustment
	 * is subject to determination at runtime.
	 */
	void adjustWeight()
	{
		adjustWeightBackprop();
	}
	
	/**
	 * Returns a pointer to the input Node of the Link.
	 *
	 * @return A pointer to the input Node of the Link. If no input Node exists,
	 * NULL will be returned.
	 */
	Node<numType> *getInputNode()
	{
		return input;
	}
	
	/**
	 * Sets the input Node to the current Link. Should only be called once per 
	 * Link.
	 *
	 * @input A pointer to a Node that is to be set at the input to the current 
	 * Link.
	 */
	void setInputNode(Node<numType> *input)
	{
#ifdef DEBUGGING
		assert(!inputNodeSet);
		inputNodeSet = true;
#endif
		this->input = input;
	}
	
	/**
	 * Returns a pointer to the output Node of the Link.
	 *
	 * @return A pointer to the output Node of the Link. If no output Node exists,
	 * NULL will be returned.
	 */
	Node<numType> *getOutputNode()
	{
		return output;
	}
	
	/**
	 * Sets the output Node to the current Link. Should only be called once per \
	 * Link.
	 *
	 * @input A pointer to a Node that is to be set at the output to the current 
	 * Link.
	 */
	void setOutputNode(Node<numType> *output)
	{
#ifdef DEBUGGING
		assert(!outputNodeSet);
		outputNodeSet = true;
#endif
		this->output = output;
	}
	
	/**
	 * Returns a the value of the weight on the current Link.
	 *
	 * @return The numerical value of the weight assigned to the Link.
	 */
	numType getWeight()
	{
		return weight;
	}
	
private:
	
	/** A pointer to the input Node of the Link */
	Node<numType> *input;
	
	/** A pointer to the output Node of the Link */
	Node<numType> *output;
	
	/** The weight of the Link in the network */ 
	numType weight;
	
#ifdef DEBUGGING
	/** 
	 * Boolean value that specifies whether or not the input Node for this Link
	 * has been set.
	 */
	bool inputNodeSet;
	
	/** 
	 * Boolean value that specifies whether or not the output Node for this Link
	 * has been set.
	 */
	bool outputNodeSet;
#endif
	
	/**
	 * Adjusts the weight of the Link according to the rule for standard 
	 * backpropagation. 
	 */
	inline void adjustWeightBackprop()
	{
		weight += 1 * output->getError() * input->getValue();
	}
};


/* * * * * * * * * * * * * * * * * NODE CLASS * * * * * * * * * * * * * * * * */

/**
 * @class Node
 *
 * @brief This class represents a single Node (or Neuron) in the NeuralNet
 *
 * @tparam numType A numerical data type that will be the format used for data
 * stored in the NeuralNet
 *
 * This class represents a single Node (Neuron) in the NeuralNet. Because a 
 * neural network is a directed graph, the Node maintains lists of the input
 * and output Links (arcs) to itself. 
 */
template <typename numType>
class Node
{
	
public:
	
	/** 
	 * Constructor for a new Node. By default a new Node has no input or output 
	 * Links and has a value and error of 0.
	 */
	Node()
	{
		value = 0;
		error = 0;
	}
	
	/**
	 * Destructor for the Node class. Because no memory is allocated in the 
	 * construction of a Node, nothing is done here.
	 */
	~Node()
	{
	}
	
	/**
	 * Pulls values from all Nodes connected to the current Node across the 
	 * input Links according to the weights of those Links. Sums all input signals
	 * and applies a sigmoid transfer function to that sum, storing the result as 
	 * the Node's value.
	 */
	void pullInputs(bool disp = false)
	{
		value = 0;
		for(unsigned int i = 0; i < inputLinks.size(); i++)
			value += inputLinks[i]->getInputNode()->getValue() * 
			inputLinks[i]->getWeight();
		if(disp)
			cout << value << endl;
		value = 1 / (1 + exp(-value));
		if(disp)
			cout << value << endl;
	}
	
	/**
	 * Computes the error at the current Node by pulling errors from all Nodes 
	 * connected to the current Node via output Links, according to the weights
	 * of those Links and applying the chain rule.
	 */
	void pullBackprop()
	{
		// Error for output nodes is computed directly 
		if(outputLinks.empty())
			error *= value * (1 - value);
		else 
		{
			error = 0;
			for(unsigned int i = 0; i < outputLinks.size(); i++)
				error += outputLinks[i]->getOutputNode()->getError() * 
				outputLinks[i]->getWeight();
			error *= value * (1 - value);
		}
	}
	
	/**
	 * Returns a pointer to the input Link connected to the current Node 
	 * corresponding to the given index.
	 *
	 * @param index An unsigned int corresponding to the index of the input Link
	 * to be returned
	 *
	 * @return A pointer to the input Link connected to the given Node 
	 * corresponding to the specified index
	 */
	Link<numType> *getInputLink(unsigned int index)
	{
#ifdef DEBUGGING
		assert(index >= 0 && index < inputLinks.size());
#endif
		return inputLinks[index];
	}
	
	/**
	 * Returns a pointer to the output Link connected to the current Node 
	 * corresponding to the given index.
	 *
	 * @param index An unsigned int corresponding to the index of the output Link
	 * to be returned
	 *
	 * @return A pointer to the output Link connected to the given Node 
	 * corresponding to the specified index
	 */
	Link<numType> *getOutputLink(unsigned int index)
	{		
#ifdef DEBUGGING
		assert(index >= 0 && index < outputLinks.size());
#endif
		return outputLinks[index];
	}
	
	unsigned int numOutputNodes()
	{
		return outputLinks.size();
	}
	
	/**
	 * Adds a Link* to the Node's list of pointers to input Links
	 *
	 * @param link A pointer to the Link to be added to the Node's list of input
	 * Links
	 */
	void addInputLink(Link<numType> *link)
	{
		inputLinks.push_back(link);
	}
	
	/**
	 * Adds a Link* to the Node's list of pointers to output Links
	 *
	 * @param link A pointer to the Link to be added to the Node's list of output
	 * Links
	 */
	void addOutputLink(Link<numType> *link)
	{
		outputLinks.push_back(link);
	}
	
	/**
	 * Returns the value stored in the Node due to forward propagation of data
	 *
	 * @return The numeric data stored in the current Node
	 */
	numType getValue()
	{
		return value;
	}
	
	/**
	 * Returns the error stored in the Node due to backwarrd propagation of error
	 *
	 * @return The backpropagated error stored in the current Node
	 */
	numType getError()
	{
		return error;
	}
	
	/**
	 * Sets the value of the Node. Only allowed for Nodes with no inputs (the 
	 * input layer of the neural network) since all others should get their values
	 * only by propagating information through the network.
	 *
	 * @param value The numerical value to be stored in the Node
	 */
	void setValue(numType value)
	{
#ifdef DEBUGGING
		assert(inputLinks.size() == 0);
#endif
		this->value = value;
	}
	
	/**
	 * Sets the error of the Node. Only allowed for Nodes with no output (the 
	 * output layer of the neural network) since all others should get their 
	 * error only by propagating error backwards through the network.
	 *
	 * @param value The numerical value of the error to be stored in the Node
	 */
	void setError(numType error)
	{
#ifdef DEBUGGING
		assert(outputLinks.size() == 0);
#endif
		this->error = error;
	}
	
private:
	
	/** Vector of pointers to Links conneceted to the Node on the input side */
	vector< Link<numType>* > inputLinks;
	
	/** Vector of pointers to Links conneceted to the Node on the output side */
	vector< Link<numType>* > outputLinks;
	
	/** The numerical value stored at the current Node from forward propagation */
	numType value;
	
	/** The numerical error stored at the current Node from back propagation */
	numType error;
};


/* * * * * * * * * * * * * * * NEURAL NET CLASS * * * * * * * * * * * * * * */

/**
 * @class NeuralNet
 *
 * @brief This class represents two-hidden layer multiayer perceptron neural
 * network
 *
 * @tparam numType A numerical data type that will be the format used for data
 * stored in the NeuralNet
 *
 * This class represents a two-hidden layer, multilayer perceptron neural 
 * network. This structure is used only for training purposes, a trained network
 * used in runtime execution should use a different structure for faster 
 * performance.
 */
template <typename numType>
class NeuralNet
{
public:
	
	/** 
	 * Constructor for a raw neural network with the specified number of Nodes in
	 * each layer.
	 *
	 * @param numInput The number of nodes in the input layer of the network
	 *
	 * @param numH1 The number of hidden nodes in the first hidden layer of the
	 * NeuralNet
	 *
	 * @param numH2 The number of hidden nodes in the second hidden layer of the
	 * NeuralNet
	 *
	 * @param numOutput The number of nodes in the output layer of the network
	 */
	NeuralNet(int numInput, int numH1, int numH2, int numOutput)
	{
		for(int i = 0; i < numInput+1; i++)
			inputLayer.push_back(new Node<numType>);
		
		for(int i = 0; i < numH1+1; i++)
			hiddenLayer1.push_back(new Node<numType>);
		
		for(int i = 0; i < numH2+1; i++)
			hiddenLayer2.push_back(new Node<numType>);
		
		for(int i = 0; i < numOutput; i++)
			outputLayer.push_back(new Node<numType>);
		
		for(int i = 0; i < numInput+1; i++)
			for(int j = 0; j < numH1; j++)
			{
				Link<numType> *curLink = new Link<numType>();
				curLink->setInputNode(inputLayer[i]);
				curLink->setOutputNode(hiddenLayer1[j]);
				inputLayer[i]->addOutputLink(curLink);
				hiddenLayer1[j]->addInputLink(curLink);
			}
		
		for(int i = 0; i < numH1+1; i++)
			for(int j = 0; j < numH2; j++)
			{
				Link<numType> *curLink = new Link<numType>();
				curLink->setInputNode(hiddenLayer1[i]);
				curLink->setOutputNode(hiddenLayer2[j]);
				hiddenLayer1[i]->addOutputLink(curLink);
				hiddenLayer2[j]->addInputLink(curLink);
			}
		
		for(int i = 0; i < numH2+1; i++)
			for(int j = 0; j < numOutput; j++)
			{
				Link<numType> *curLink = new Link<numType>();
				curLink->setInputNode(hiddenLayer2[i]);
				curLink->setOutputNode(outputLayer[j]);
				hiddenLayer2[i]->addOutputLink(curLink);
				outputLayer[j]->addInputLink(curLink);
			}
		
		inputLayer[numInput]->setValue(1.0);
		hiddenLayer1[numH1]->setValue(1.0);
		hiddenLayer2[numH2]->setValue(1.0);
	}
	
	/*NeuralNet(string filename)
	{
		int length;
		char * buffer;
		
		ifstream is;
		is.open(filename.c_str());
		
		// get length of file:
		is.seekg (0, ios::end);
		length = is.tellg();
		is.seekg (0, ios::beg);
		
		// allocate memory:
		buffer = new char [length];
		
		// read data as a block:
		is.read (buffer,length);
		is.close();
		
		xml_document<> doc;
		doc.parse<0>(buffer);
		delete[] buffer;
		
		xml_node<> *curNode = doc.first_node();
		stringstream ss;
		string numOutputStr = curNode->first_attribute("NumOutputNodes")->value();
		string numH2Str = curNode->first_attribute("NumHidden2Nodes")->value();
		string numH1Str = curNode->first_attribute("NumHidden1Nodes")->value();
		string numInputStr = curNode->first_attribute("NumInputNodes")->value();
	
		int numInput, numH1, numH2, numOutput;
		ss << numOutputStr;
		ss >> numOutput;
		ss.str("");
		
		ss << numH2Str;
		ss >> numH2;
		ss.str("");
		
		ss << numH1Str;
		ss >> numH1;
		ss.str("");
		
		ss << numInputStr;
		ss >> numInput;
		ss.str("");
		
		for(int i = 0; i < numInput+1; i++)
			inputLayer.push_back(new Node<numType>);
		
		for(int i = 0; i < numH1+1; i++)
			hiddenLayer1.push_back(new Node<numType>);
		
		for(int i = 0; i < numH2+1; i++)
			hiddenLayer2.push_back(new Node<numType>);
		
		for(int i = 0; i < numOutput; i++)
			outputLayer.push_back(new Node<numType>);
		
		
		curNode = curNode->first_node("InputToHidden1Links")->first_node("Link");
		for(int i = 0; i < (numInput+1) * (numH1); i++)
		{
			string inputNumStr = curNode->first_attribute("InputNodeNum")->value();
			string outputNumStr = curNode->first_attribute("OutputNodeNum")->value(); 
			string weightStr = curNode->first_attribute("Weight")->value(); 
		}*/
		
		/*
		
		for(int i = 0; i < numInput+1; i++)
			for(int j = 0; j < numH1; j++)
			{
				Link<numType> *curLink = new Link<numType>();
				curLink->setInputNode(inputLayer[i]);
				curLink->setOutputNode(hiddenLayer1[j]);
				inputLayer[i]->addOutputLink(curLink);
				hiddenLayer1[j]->addInputLink(curLink);
			}
		
		for(int i = 0; i < numH1+1; i++)
			for(int j = 0; j < numH2; j++)
			{
				Link<numType> *curLink = new Link<numType>();
				curLink->setInputNode(hiddenLayer1[i]);
				curLink->setOutputNode(hiddenLayer2[j]);
				hiddenLayer1[i]->addOutputLink(curLink);
				hiddenLayer2[j]->addInputLink(curLink);
			}
		
		for(int i = 0; i < numH2+1; i++)
			for(int j = 0; j < numOutput; j++)
			{
				Link<numType> *curLink = new Link<numType>();
				curLink->setInputNode(hiddenLayer2[i]);
				curLink->setOutputNode(outputLayer[j]);
				hiddenLayer2[i]->addOutputLink(curLink);
				outputLayer[j]->addInputLink(curLink);
			}
		
		inputLayer[numInput]->setValue(1.0);
		hiddenLayer1[numH1]->setValue(1.0);
		hiddenLayer2[numH2]->setValue(1.0);
	}*/
	
	/** 
	 * Destructor for the NeuralNet. Frees space for all Nodes and Links 
	 * associated with the network.
	 */
	~NeuralNet()
	{
		for(unsigned int i = 0; i < inputLayer.size(); i++)
		{
			Node<numType> *curNode = inputLayer[i];
			for(unsigned int j = 0; j < hiddenLayer1.size()-1; j++)
			{
				Link<numType>* link = curNode->getOutputLink(j);
				delete link;
			}
		}

		for(unsigned int i = 0; i < hiddenLayer1.size(); i++)
		{
			Node<numType> *curNode = hiddenLayer1[i];
			for(unsigned int j = 0; j < hiddenLayer2.size()-1; j++)
			{
				Link<numType>* link = curNode->getOutputLink(j);
				delete link;
			}
		}

		for(unsigned int i = 0; i < hiddenLayer2.size(); i++)
		{
			Node<numType> *curNode = hiddenLayer2[i];
			for(unsigned int j = 0; j < outputLayer.size()-1; j++)
			{
				Link<numType>* link = curNode->getOutputLink(j);
				delete link;
			}
		}
		
		for(unsigned int i = 0; i<inputLayer.size(); i++)
		{
			Node<numType> *curNode = inputLayer[i];
			delete curNode;
		}
		
		for(unsigned int i = 0; i<hiddenLayer1.size(); i++)
		{
			Node<numType> *curNode = hiddenLayer1[i];
			delete curNode;
		}
		
		for(unsigned int i = 0; i<hiddenLayer2.size(); i++)
		{
			Node<numType> *curNode = hiddenLayer2[i];
			delete curNode;
		}
		
		for(unsigned int i = 0; i<outputLayer.size(); i++)
		{
			Node<numType> *curNode = outputLayer[i];
			delete curNode;
		}
	}
	
	/**
	 * Adjusts the weights of the links in the NeuralNet in order to most closely
	 * mimic the desired transfer function from the input data to the output data.
	 * As currently implemented, all values in inputData and outputData should be 
	 * normalized such that all values are between 0.05 and 0.95.
	 *
	 * @param inputData
	 *
	 * @param outpuData
	 *
	 * @param numDataSets
	 *
	 * @param numEpochs
	 */
	void trainNetwork(numType** inputData, numType** outputData, int numDataSets, 
										int numEpochs)
	{
		for(int k = 0; k < numEpochs; k++)
		{
			for(int m = 0; m < numDataSets; m++)
			{
				double *result = predict(inputData[m]);
				delete[] result;
				
				for(unsigned int i = 0; i < outputLayer.size(); i++)
				{
					outputLayer[i]->setError(outputData[m][i] - 
																	 outputLayer[i]->getValue());
				}
				
				for(unsigned int i = 0; i < outputLayer.size(); i++)
					outputLayer[i]->pullBackprop();
				
				for(unsigned int i = 0; i < hiddenLayer2.size(); i++)
					hiddenLayer2[i]->pullBackprop();
				
				for(unsigned int i = 0; i < hiddenLayer1.size(); i++)
					hiddenLayer1[i]->pullBackprop();
				
				for(unsigned int i = 0; i < inputLayer.size(); i++)
					for(unsigned int j = 0; j < inputLayer[i]->numOutputNodes(); j++)
						inputLayer[i]->getOutputLink(j)->adjustWeight();
				
				for(unsigned int i = 0; i < hiddenLayer1.size(); i++)
					for(unsigned int j = 0; j < hiddenLayer1[i]->numOutputNodes(); j++)
						hiddenLayer1[i]->getOutputLink(j)->adjustWeight();
				
				for(unsigned int i = 0; i < hiddenLayer2.size(); i++)
					for(unsigned int j = 0; j < hiddenLayer2[i]->numOutputNodes(); j++)
						hiddenLayer2[i]->getOutputLink(j)->adjustWeight();
			}
		}
	}
	
	numType* predict(numType* inputData, bool show = false)
	{
		for(unsigned int i = 0; i < inputLayer.size() - 1; i++)
			inputLayer[i]->setValue(inputData[i]);
		
		
		for(unsigned int i = 0; i < hiddenLayer1.size() - 1; i++)
			hiddenLayer1[i]->pullInputs(show);
		
		if(show){
			//cout << inputLayer[1]->getOutputLink(0)->getWeight() << endl;
			cout << "Layer1: ";
			cout << hiddenLayer1[0]->getValue() << "\t" << hiddenLayer1[1]->getValue() 
			<< "\t" << hiddenLayer1[2]->getValue() << endl;
		}
		
		
		for(unsigned int i = 0; i < hiddenLayer2.size() - 1; i++)
			hiddenLayer2[i]->pullInputs();
		
		if(show){
			cout << "Weights: ";
			for(int i = 0; i < 3; i++)
			{
				cout << hiddenLayer2[0]->getInputLink(i)->getWeight() << "\t";
			}
			cout << endl;
			cout << "Layer2: ";
			cout << hiddenLayer2[0]->getValue() << "\t" << hiddenLayer2[1]->getValue() << endl;
		}
		
		double *result = new double[outputLayer.size()];
		for(unsigned int i = 0; i < outputLayer.size(); i++)
		{
			outputLayer[i]->pullInputs();
			result[i] = outputLayer[i]->getValue();
		}
		return result;
	}		
	
	string ConvertNumber(numType num)
	{
		stringstream ss;
		ss << num;
		return ss.str();
	}
	
	void writeToFile(xmlwriter & writer)
	{
		writer.AddAtributes("NumInputNodes", ConvertNumber(inputLayer.size()));
		writer.AddAtributes("NumHidden1Nodes", ConvertNumber(hiddenLayer1.size()));
		writer.AddAtributes("NumHidden2Nodes", ConvertNumber(hiddenLayer2.size()));
		writer.AddAtributes("NumOutputNodes", ConvertNumber(outputLayer.size()));
		
		writer.Createtag("NeuralNetwork");
		writer.Createtag("InputToHidden1Links");
		for(unsigned int i = 0; i < inputLayer.size(); i++)
		{
			Node<numType> *curNode = inputLayer[i];
			for(unsigned int j = 0; j < hiddenLayer1.size()-1; j++)
			{
				writer.AddAtributes("InputNodeNum", ConvertNumber(i));
				writer.AddAtributes("OutputNodeNum", ConvertNumber(j));
				writer.AddAtributes("Weight", 
														ConvertNumber(curNode->getOutputLink(j)->getWeight()));
				
				writer.Createtag("Link");
				writer.CloseLasttag();
			}
		}
		writer.CloseLasttag();
		
		writer.Createtag("Hidden1ToHidden2Links");
		for(unsigned int i = 0; i < hiddenLayer1.size(); i++)
		{
			Node<numType> *curNode = hiddenLayer1[i];
			for(unsigned int j = 0; j < hiddenLayer2.size()-1; j++)
			{
				writer.AddAtributes("InputNodeNum", ConvertNumber(i));
				writer.AddAtributes("OutputNodeNum", ConvertNumber(j));
				writer.AddAtributes("Weight", 
														ConvertNumber(curNode->getOutputLink(j)->getWeight()));
				writer.Createtag("Link");
				writer.CloseLasttag();
			}
		}
		writer.CloseLasttag();
		
		writer.Createtag("Hidden2ToOutputLinks");
		for(unsigned int i = 0; i < hiddenLayer2.size(); i++)
		{
			Node<numType> *curNode = hiddenLayer2[i];
			for(unsigned int j = 0; j < outputLayer.size()-1; j++)
			{
				writer.AddAtributes("InputNodeNum", ConvertNumber(i));
				writer.AddAtributes("OutputNodeNum", ConvertNumber(j));
				writer.AddAtributes("Weight", 
														ConvertNumber(curNode->getOutputLink(j)->getWeight()));
				writer.Createtag("Link");
				writer.CloseLasttag();
			}
		}
		writer.CloseLasttag();
		writer.CloseLasttag();
	}
	
	void writeToFile()
	{
		ofstream oFile;
		oFile.open("Code.txt");
		for(unsigned int j = 0; j < hiddenLayer1.size()-1; j++)
		{
			for(unsigned int i = 0; i < inputLayer.size(); i++)
			{
				Node<numType> *curNode = inputLayer[i];
				oFile << "nn->inputToHidden1Weights[" << j * (inputLayer.size()) + i;
				oFile << "] = " << ConvertNumber(curNode->getOutputLink(j)->getWeight());
				oFile << ";" << endl;
			}
		}
		
		for(unsigned int j = 0; j < hiddenLayer2.size()-1; j++)
		{
			for(unsigned int i = 0; i < hiddenLayer1.size(); i++)
			{
				Node<numType> *curNode = hiddenLayer1[i];
				oFile << "nn->hidden1ToHidden2Weights[" << j * (hiddenLayer1.size()) + i;
				oFile << "] = " << ConvertNumber(curNode->getOutputLink(j)->getWeight());
				oFile << ";" << endl;
			}
		}
		
		for(unsigned int j = 0; j < outputLayer.size(); j++)
		{
			for(unsigned int i = 0; i < hiddenLayer2.size(); i++)
			{
				Node<numType> *curNode = hiddenLayer2[i];
				oFile << "nn->hidden2ToOutputWeights[" << j * (hiddenLayer2.size()) + i;
				oFile << "] = " << ConvertNumber(curNode->getOutputLink(j)->getWeight());
				oFile << ";" << endl;
			}
		}
		
		oFile.close();
	}
	
private:
	vector<Node<numType>*> inputLayer, hiddenLayer1, hiddenLayer2, outputLayer;
};

#endif