package com.pwr.app.classification;

import java.io.BufferedWriter;
import java.io.File;
import java.io.FileWriter;
import java.io.IOException;
import java.util.ArrayList;
import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.propagation.back.Backpropagation;
import org.encog.neural.networks.training.propagation.resilient.ResilientPropagation;

import com.pwr.app.Pattern;

public class NeuralNet implements BaseClassification {

	BasicNetwork networkEncog;

	@Override
	public int present(double[] pattern) {
		double[] output = new double[26];
		for (int i = 0; i < pattern.length; i++)
			pattern[i] = round(pattern[i], 5);
		networkEncog.compute(pattern, output);
		for (int i = 0; i < (int) output.length; i++) {
			if (Math.round(output[i]) == 1)
				return i + 65;
		}
		return -1;
	}

	@Override
	public void teach(ArrayList<Pattern> patterns) {
		System.out.println("Neural net - teach");
		int inputNeurons = patterns.get(0).getPattern().length;
		int outputNeurons = 26;
		int firstHidden = (int) ((double) outputNeurons * Math.pow((double) inputNeurons / (double) outputNeurons, (double) 2 / (double) 3));
		int secondHidden = (int) ((double) outputNeurons * Math.pow((double) inputNeurons / (double) outputNeurons, (double) 1 / (double) 3));
		ActivationSigmoid a = new ActivationSigmoid();
		networkEncog = new BasicNetwork();
		networkEncog.addLayer(new BasicLayer(a, true, inputNeurons));
		networkEncog.addLayer(new BasicLayer(a, true, firstHidden));
		networkEncog.addLayer(new BasicLayer(a, true, secondHidden));
		networkEncog.addLayer(new BasicLayer(a, true, outputNeurons));
		networkEncog.getStructure().finalizeStructure();
		networkEncog.reset();

		double[][] inputs = new double[patterns.size()][patterns.get(0).getPattern().length];
		double[][] outputEncog = new double[patterns.size()][26];
		for (int i = 0; i < patterns.size(); i++) {

			for (int l = 0; l < patterns.get(i).getPattern().length; l++) {
				inputs[i][l] = round(patterns.get(i).getPattern()[l], 5);
			}
			for (int l = 0; l < 26; l++) {
				outputEncog[i][l] = 0;
				if (l == patterns.get(i).getLetter() - 65)
					outputEncog[i][l] = 1;
			}
		}
		MLDataSet traningSetEncog = new BasicMLDataSet(inputs, outputEncog);

		final Backpropagation train = new Backpropagation(networkEncog, traningSetEncog);
		train.setThreadCount(2);
		int epoch = 1;
		double lastValue = 0;
		do {
			train.iteration();
			if (Double.compare(lastValue, train.getError()) == 0) {
				break;
			}
			if (epoch % 100 == 0) {
				System.out.println("Epoch #" + epoch + " Error:" + train.getError());
			}
			epoch++;
			lastValue = train.getError();

		} while (train.getError() > 0.001 && epoch < 100000);
		train.finishTraining();
	}

	static long[] TENS = new long[] { 1, 10, 100, 1000, 10000, 100000, 1000000, 10000000, 100000000 };

	public static double round(double v, int precision) {
		long t = TENS[precision]; // contains powers of ten.
		return (double) (long) (v > 0 ? v * t + 0.5 : v * t - 0.5) / t;
	}
}
