package hu.myai.runtimeneural;

import hu.myai.util.DatabaseUtil;
import hu.myai.util.SanfordNet;

import org.joone.engine.ContextLayer;
import org.joone.engine.DelayLayer;
import org.joone.engine.FullSynapse;
import org.joone.engine.Monitor;
import org.joone.engine.NeuralNetEvent;
import org.joone.engine.NeuralNetListener;
import org.joone.engine.SigmoidLayer;
import org.joone.engine.learning.TeachingSynapse;
import org.joone.io.MemoryInputSynapse;
import org.joone.log.ILogger;
import org.joone.log.LoggerFactory;
import org.joone.net.NeuralNet;
import org.joone.net.NeuralNetValidator;
import org.joone.net.NeuralValidationEvent;
import org.joone.net.NeuralValidationListener;
import org.joone.util.LearningSwitch;

public abstract class InitNeuralNetwork implements NeuralNetListener, NeuralValidationListener {

	public NeuralNet nnet;

	private long startms;

	private static final ILogger log = LoggerFactory.getLogger(InitNeuralNetwork.class);

	private String symbol;

	private double trainError = 1;

	// for database util reference
	private DatabaseUtil dbUtil = new DatabaseUtil();

	private SanfordNet netUtil = new SanfordNet();

	public InitNeuralNetwork() {
	}

	public void go(String symbol, int elso, int masodik, int harmadik) {

		this.symbol = symbol;

		/* Creates the three layers and connect them */
		DelayLayer ILayer = new DelayLayer(); // Input Layer
		ContextLayer HLayer1 = new ContextLayer(); // Hidden Layer
		SigmoidLayer HLayer2 = new SigmoidLayer(); // Hidden Layer
		SigmoidLayer HLayer3 = new SigmoidLayer(); // Hidden Layer
		SigmoidLayer OLayer = new SigmoidLayer(); // Output Layer

		int timeSeries = netUtil.getTimeSeries();

		ILayer.setTaps(timeSeries - 1);
		ILayer.setRows(7); // The input pattern has XX columns

		log.info("Layerek: 1:" + elso + "  2:" + masodik + "  3:" + harmadik + "\n");
		HLayer1.setRows(elso);
		HLayer1.setLayerName("1. layer");

		HLayer2.setRows(masodik);
		HLayer2.setLayerName("2. layer");

		HLayer3.setRows(harmadik);
		HLayer3.setLayerName("3. layer");

		OLayer.setRows(3); // The desired pattern has 1 column

		FullSynapse synIH1 = new FullSynapse();
		FullSynapse synH1H2 = new FullSynapse();
		FullSynapse synH2H3 = new FullSynapse();
		FullSynapse synH3O = new FullSynapse();

		synIH1.setName("I-H1 Synapse");
		synH1H2.setName("H1-H2 Synapse");
		synH2H3.setName("H2-H3 Synapse");
		synH3O.setName("H3-O Synapse");

		netUtil.connect(ILayer, synIH1, HLayer1);
		netUtil.connect(HLayer1, synH1H2, HLayer2);
		netUtil.connect(HLayer2, synH2H3, HLayer3);
		netUtil.connect(HLayer3, synH3O, OLayer);

		double[][] historyDataArray = dbUtil.getHistoryForExecute(netUtil.geTZeroDate(), symbol);
		double[][] historyValidArray = dbUtil.getHistoryForExecute(netUtil.geTZeroDate(), symbol);

		if (historyDataArray == null || historyValidArray == null) {
			log.error("Error - no data in the database");
			return;
		}

		int numberOfData = historyDataArray.length;
		int numOfValRecords = historyValidArray.length;

		// log.debug("Number of data line are "+ numberOfData+" \t "+ dataFile);
		// log.debug("Number of val. rekords line are "+ numOfValRecords+" \t "+
		// validFile);

		// skipp if less data
		if (numberOfData < timeSeries + 1 || numOfValRecords < timeSeries + 1) {
			log.error("no aim to make the training, less then " + timeSeries + " records");
			throw new org.joone.exception.JooneRuntimeException("Not enough input for training.");
		}

		/* Creates all the required input data sets */
		MemoryInputSynapse ITdata = netUtil.createInput(historyDataArray, 1, 2, 8);
		/*
		 * The input training data set
		 */
		MemoryInputSynapse DTdata = netUtil.createValidInput(historyDataArray, 3, 9, 11);
		/*
		 * The desired training data set
		 */

		MemoryInputSynapse IVdata = netUtil.createInput(historyValidArray, 1, 2, 8);
		/*
		 * The input validation data set
		 */
		MemoryInputSynapse DVdata = netUtil.createValidInput(historyValidArray, 3, 9, 11);
		/*
		 * The desired validation data set
		 */

		/* Creates and attach the input learning switch */
		LearningSwitch Ilsw = netUtil.createSwitch(ITdata, IVdata);
		ILayer.addInputSynapse(Ilsw);

		/* Creates and attach the desired learning switch */
		LearningSwitch Dlsw = netUtil.createSwitch(DTdata, DVdata);
		TeachingSynapse ts = new TeachingSynapse(); // The teacher of the net
		ts.setDesired(Dlsw);
		OLayer.addOutputSynapse(ts);

		/* Now we put all togheter into a NeuralNet object */
		nnet = new NeuralNet();
		nnet.addLayer(ILayer, NeuralNet.INPUT_LAYER);
		nnet.addLayer(HLayer1, NeuralNet.HIDDEN_LAYER);
		nnet.addLayer(HLayer2, NeuralNet.HIDDEN_LAYER);
		nnet.addLayer(HLayer3, NeuralNet.HIDDEN_LAYER);
		nnet.addLayer(OLayer, NeuralNet.OUTPUT_LAYER);
		nnet.setTeacher(ts);

		nnet.randomize(5);
		/* Sets the Monitor's parameters */
		Monitor mon = nnet.getMonitor();

		// set the monitor parameters
		mon.setLearningRate(0.3);
		mon.setMomentum(0.13);
		mon.setSupervisioned(true);
		mon.setUseRMSE(true);
		mon.setTrainingPatterns(numberOfData - 2);
		mon.setValidationPatterns(numOfValRecords - 2);
		mon.setTotCicles(420);
		mon.setPreLearning(timeSeries);
		mon.setLearning(true);

		nnet.getMonitor().addNeuralNetListener(this);
		nnet.start();
		startms = System.currentTimeMillis();
		nnet.getMonitor().Go();
	}

	/* Events */
	public void netValidated(NeuralValidationEvent event) {
		// Shows the RMSE at the end of the cycle
		NeuralNet NN = (NeuralNet) event.getSource();
		double validError = NN.getMonitor().getGlobalError();
		log.info("    Validation Error: " + validError);

		/* if the error is less the a limit the net is going to be saved */
		if (validError < 0.010 && trainError < 0.010) {
			netUtil.saveNeuralNet(NN, symbol + "_netValidated_" + (validError + trainError) / 2 + ".snet");
			log.info("    NeuralNet is saved as: " + symbol + "_netValidated_" + (validError + trainError) / 2
					+ ".snet");

		}
	}

	public void cicleTerminated(NeuralNetEvent e) {
		// Prints out the cycle and the training error
		int cycle = nnet.getMonitor().getTotCicles() - nnet.getMonitor().getCurrentCicle() + 1;
		if (cycle % 20 == 0) { // We validate the net every 200 cycles
			trainError = nnet.getMonitor().getGlobalError();

			log.info("Cycle #" + cycle + "   " + (System.currentTimeMillis() - startms) + " ms");
			log.info("    Training Error:   " + trainError);

			// reset the timer
			startms = System.currentTimeMillis();

			// Creates a copy of the neural network
			nnet.getMonitor().setExporting(true);
			NeuralNet newNet = nnet.cloneNet();
			nnet.getMonitor().setExporting(false);

			// Cleans the old listeners
			// This is a fundamental action to avoid that the validating net
			// calls the cicleTerminated method of this class
			newNet.removeAllListeners();

			// Set all the parameters for the validation
			NeuralNetValidator nnv = new NeuralNetValidator(newNet);
			nnv.addValidationListener(this);
			nnv.start(); // Validates the net
			// newNet.join();
		}
	}

	public void errorChanged(NeuralNetEvent e) {
		// Monitor NN = (Monitor)e.getSource();
		// log.info(" Actual training error: "+NN.getGlobalError());
	}

	public void netStarted(NeuralNetEvent e) {
	}

	public void netStopped(NeuralNetEvent e) {
		log.info("Stopped after " + (System.currentTimeMillis() - startms) + " ms");
	}

	public void netStoppedError(NeuralNetEvent e, String error) {
	}
}
