/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */

package babydisco.NN;

import java.util.ArrayList;
import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.ml.data.MLData;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLData;
import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.ml.train.MLTrain;
import org.encog.ml.train.strategy.Greedy;
import org.encog.ml.train.strategy.HybridStrategy;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.layers.BasicLayer;
import org.encog.neural.networks.training.CalculateScore;
import org.encog.neural.networks.training.TrainingSetScore;
import org.encog.neural.networks.training.anneal.NeuralSimulatedAnnealing;
import org.encog.neural.networks.training.propagation.back.Backpropagation;

/**
 *
 * @author Stefan
 */
public class DelayedNetwork {
    public BasicNetwork network;
    private int delay = 0;
    private final int scale = 100000;

    public DelayedNetwork(int inputNeurons, int hiddenNeurons){
        delay = inputNeurons;
        network = new BasicNetwork();
        BasicLayer inputLayer = new BasicLayer(new ActivationSigmoid(), true, inputNeurons);
        BasicLayer hiddenLayer = new BasicLayer(new ActivationSigmoid(), true, hiddenNeurons);
        BasicLayer outputLayer = new BasicLayer(new ActivationSigmoid(), true, 1);

        network.addLayer(inputLayer);
        network.addLayer(hiddenLayer);
        network.addLayer(outputLayer);

        network.getStructure().finalizeStructure();
    }

    public DelayedNetwork(BasicNetwork n){
        network = n;
        delay = network.getInputCount();
    }
    
    private MLDataSet makeTrainingDelayedMLDataSet(ArrayList<Double> input, ArrayList<Double> ideal){
        if(input.size() != ideal.size()){
            System.out.println("Input and ideal sizes don't match!");
            return null;
        }
        delay = network.getInputCount();
        double[][] in = new double[input.size() - delay +1][delay];
        double[][] id = new double[ideal.size() - delay +1][1];


        for(int i = delay-1; i<input.size(); i++){
            for(int j = 0; j<delay; j++){
                in[i-delay +1][j] = input.get(i-j)*scale;
            }
            id[i-delay+1][0] = ideal.get(i)*scale;
        }

        //for testing:
//        for(int k = 0; k < in.length; k ++){
//            for(int l = 0; l < delay; l++){
//                System.out.println("In: " + in[k][l]);
//            }
//            System.out.println(" Id: " + id[k][0]);
//        }

        return new BasicMLDataSet(in, id);
    }

    public double train(ArrayList<Double> input, ArrayList<Double> ideal, int epochs){
        MLDataSet training = makeTrainingDelayedMLDataSet(input, ideal);

         // train the neural network
        CalculateScore score = new TrainingSetScore(training);

        MLTrain trainMain = new Backpropagation(network, training, 0.000001, 0.0);
        MLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);

        // Using a stoptraining which will stop the training when the neural network isnt improving
        trainMain.addStrategy(new Greedy());
        trainMain.addStrategy(new HybridStrategy(trainAlt)); // When the first doesnt work

        int epoch = 0;
        while (epoch <= epochs)
        {
            trainMain.iteration();
            System.out.println("Training Epoch #" + epoch
                    + " Error:" + trainMain.getError());
            epoch++;
        }
        return trainMain.getError();
    }

    public Double[] computeSet(Double[] input){
        delay = network.getInputCount();
        double[][] in = new double[input.length - delay +1][delay];
        Double[] out = new Double[input.length];
        for(int i = 0; i<delay; i++){
            out[i] = 0.0;
        }

        for(int i = delay-1; i<input.length; i++){
            for(int j = 0; j<delay; j++){
                in[i-delay +1][j] = input[i-j]*scale;
            }
        }
//        BasicMLData inp = new BasicMLData(input.length - delay);
//        for(int a = 0; a<input.length-delay; a++){
//            inp.add(a, in[a]);
//        }
        
        
        
        for(int i = 0; i< input.length - delay +1; i++){
            
            double[] outp = new double[1];
            network.compute(in[i], outp);
//            Double[] output = new Double[outp.size()];
//            for(int j = 0; j < outp.size(); j++)
//            {
//                output[j] = outp.getData(j);
//            }
//            for(double d: in[i]){
//                System.out.println("Input #" + i + ": " + d); 
//            }
//            for(int l = 0; l<outp.length;l++){
//                System.out.println("Output #" + i + ": " + outp[l]); 
//            }
            out[i+delay-1] = outp[0]/scale;
        }

        return out;
    }

//    for testing:
//    public static void main(String[] args){
//        DelayedNetwork net = new DelayedNetwork(3,5);
//        ArrayList<Double> in = new ArrayList<Double>();
//        in.add(0.0);
//        in.add(1.0);
//        in.add(2.0);
//        in.add(3.0);
//        in.add(0.0);
//        in.add(1.0);
//        in.add(2.0);
//        in.add(3.0);
//        ArrayList<Double> id = new ArrayList<Double>();
//        id.add(0.0);
//        id.add(1.0);
//        id.add(1.0);
//        id.add(0.0);
//        id.add(0.0);
//        id.add(1.0);
//        id.add(2.0);
//        id.add(3.0);
//
//        net.train(in, id);
//        ArrayList<Double> out = net.compute(in);
//        for(double d: out){
//            System.out.println(d);
//        }
//    }
}
