#pragma once
#include "nnse_support.h"

namespace nnse
{
	template <class T, class U> public ref class MLP
	{
		// Constructor and Destructor
			public: MLP(solver<T, U>^ p, unsigned int MLP_n);
			public: virtual ~MLP();
		// Member Variables and their Accessor Functions
			// parent - the solver to which this MLP belongs
				private: solver<T, U>^ parent;
				public: problem<T, U>^ get_parent_problem();
				public: solver<T, U>^ get_parent_solver();
			// number - the MLP's number relative to the other MLPs in the parent solver
				private: unsigned int number;
				public: unsigned int get_number();
				protected: void put_number(unsigned int n);
			// seed - a seed to initialise the random number generator (the dendrite connection strengths are initialised to random values)
				private: unsigned int seed;
				public: unsigned int get_seed();
				protected: void put_seed(unsigned int s);
			// number_of_iterations - the number of iterations to execute during training
				private: unsigned int number_of_iterations;
				public: unsigned int get_number_of_iterations();
				protected: void put_number_of_iterations(unsigned int n_i);
			// learning_rate - the learning rate to use during training
				private: U learning_rate;
				public: U get_learning_rate();
				protected: void put_learning_rate(U l_r);
			// momentum_flag - a flag to determine whether momentum should be used during training
				private: bool momentum_flag;
				public: bool get_momentum_flag();
				protected: void put_momentum_flag(bool m_f);
			// momentum_value - the momentum value to use during training if the momentum flag is set
				private: U momentum_value;
				public: U get_momentum_value();
				protected: void put_momentum_value(U m_v);
			// number_of_hidden_layers - the number of hidden layers in this MLP
				private: unsigned int number_of_hidden_layers;
				public: unsigned int get_number_of_hidden_layers();
				public: unsigned int get_number_of_layers();
				protected: void put_number_of_hidden_layers(unsigned int n_h_l);
			// layers - the layers in this MLP
				private: array<layer<T, U>^>^ layers;
				public: layer<T, U>% get_layer(int l_n);
				protected: void put_layers();
			// MLP_form - the MLP's user interface
				protected: SimpleGUI::MLP<T, U>^ MLP_form;
		// Worker Functions
			public: virtual U training();
			public: virtual U testing();
			protected: virtual void forward(bool update_flag);
			protected: virtual U reverse();
		// GUI Functions
			public: void display_form();
			public: void invalidate_form();
			public: void display_layers();
		// Serialisation Functions
			public: virtual void read(std::wifstream& in);
			public: virtual void write(std::wofstream& out);
			public: virtual void display(std::wofstream& out);
	};
	template <class T, class U> std::wifstream& operator>>(std::wifstream& in, MLP<T, U>% mlp);
	template <class T, class U> std::wofstream& operator<<(std::wofstream& out, MLP<T, U>% mlp);
}
