package babydisco.NN;

import java.io.EOFException;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.ObjectOutputStream;
import java.util.ArrayList;
import org.encog.Encog;
import org.encog.engine.network.activation.ActivationSigmoid;
import org.encog.engine.network.activation.ActivationTANH;
import org.encog.mathutil.error.ErrorCalculation;
import org.encog.mathutil.error.ErrorCalculationMode;
import org.encog.ml.data.MLData;
import org.encog.ml.data.MLDataSet;
import org.encog.ml.data.basic.BasicMLData;
import org.encog.ml.data.basic.BasicMLDataSet;
import org.encog.ml.train.MLTrain;
import org.encog.ml.train.strategy.Greedy;
import org.encog.ml.train.strategy.HybridStrategy;
import org.encog.ml.train.strategy.StopTrainingStrategy;
import org.encog.neural.networks.BasicNetwork;
import org.encog.neural.networks.training.CalculateScore;
import org.encog.neural.networks.training.TrainingSetScore;
import org.encog.neural.networks.training.anneal.NeuralSimulatedAnnealing;
import org.encog.neural.networks.training.propagation.Propagation;
import org.encog.neural.networks.training.propagation.back.Backpropagation;
import org.encog.neural.pattern.ElmanPattern;
import org.encog.neural.pattern.FeedForwardPattern;

/**
 * Creates an Elman Network to solve problems over time
 * @author frans
 */
public class ElmanNetwork
{

    private ElmanPattern pattern;
    private MLDataSet trainingSet;
    public BasicNetwork network;
    
    private static final int scale = 100000;

    /**
     * Creates an Elman recurrent network with 1 input node, n internal neurons and 1 output node.
     * Especially for the ECG filtering problem
     * @param internalNodes amount of internal neurons
     */
    public ElmanNetwork(int internalNeurons)
    {
        ElmanPattern pattern = new ElmanPattern();
        pattern.setActivationFunction(new ActivationSigmoid());
        pattern.setInputNeurons(1);
        pattern.addHiddenLayer(internalNeurons);
        // Need more layers ?? -> add them here!
        pattern.setOutputNeurons(1);
        network = (BasicNetwork) pattern.generate();
    }
    
    /**
     * To continue working with a previously used network
     * @param network 
     */
    public ElmanNetwork(BasicNetwork network)
    {
        this.network = network;
    }

    /**
     * Sets the trainingset used for training the network
     * (Works best in conjunction with convertTrainData
     */
    public void setTraining(MLDataSet trainingSet)
    {
        this.trainingSet = trainingSet;
    }

    /**
     * Train the network, uses a StopTrainingStrategy to decide when it does not
     * improve anymore
     * @return final error
     */
    public double trainNetwork()
    {
        if (trainingSet == null)
        {
            System.err.println("Your circuit's dead, there's something wrong");
            return 0.0;
        }

        // train the neural network
        CalculateScore score = new TrainingSetScore(trainingSet);

        MLTrain trainMain = new Backpropagation(network, trainingSet, 0.000001, 0.0);
        MLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);

        // Using a stoptraining which will stop the training when the neural network isnt improving
        StopTrainingStrategy stop = new StopTrainingStrategy();
        trainMain.addStrategy(new Greedy());
        trainMain.addStrategy(new HybridStrategy(trainAlt)); // When the first doesnt work
        trainMain.addStrategy(stop);

        int epoch = 0;
        while (!stop.shouldStop())
        {
            trainMain.iteration();
            System.out.println("Training Epoch #" + epoch
                    + " Error:" + trainMain.getError());
            epoch++;
        }
        return trainMain.getError();
    }

    /**
     * Trains the network for a set amount of epochs
     * @param epochs the amount of epochs it trains the network
     * @return final error rate
     */
    public double trainNetwork(int epochs)
    {
        if (trainingSet == null)
        {
            System.err.println("Can you hear me major tom?");
            return 0.0;
        }

        // train the neural network
        CalculateScore score = new TrainingSetScore(trainingSet);

        MLTrain trainMain = new Backpropagation(network, trainingSet, 0.000001, 0.0);
        MLTrain trainAlt = new NeuralSimulatedAnnealing(network, score, 10, 2, 100);

        // Using a stoptraining which will stop the training when the neural network isnt improving
        trainMain.addStrategy(new Greedy());
        trainMain.addStrategy(new HybridStrategy(trainAlt)); // When the first doesnt work

        int epoch = 0;
        while (epoch <= epochs)
        {
            trainMain.iteration();
            System.out.println("Training Epoch #" + epoch
                    + " Error:" + trainMain.getError());
            epoch++;
        }
        return trainMain.getError();
    }

    /**
     * Compute the actual output (once the weights have been trained)
     * use new BasicMLData(data[]) for creation of this data-point 
     * @param input single input point with 1 input
     * @return MLData with a single output point
     */
    public MLData compute(MLData input)
    {
        return network.compute(input);
    }
    
    /**
     * Compute wrapper for ArrayLists
     * @param input arraylist with Double objects
     * @return arraylist with double objects
     */
    public ArrayList<Double> compute(ArrayList<Double> input)
    {
        MLData computeInput = new BasicMLData(input.size());
        for(int i = 0; i < input.size(); i++)
        {
            computeInput.add(i, (input.get(i) * scale)); 
        }
        
        MLData computeOutput = this.compute(computeInput);
        ArrayList<Double> output = new ArrayList<Double>();
        
        for(int j = 0; j < computeOutput.size(); j++)
        {
            output.add(new Double(computeOutput.getData(j)));
        }
        
        return output;
    }

    /**
     * Compute wrapper for Double arrays
     * @param input is 1 input point, an array with all the neural-inputs (in our case only 1)
     * @return 
     */
    public Double[] compute(Double[] input)
    {
        MLData computeInput = new BasicMLData(input.length);
        for(int i = 0; i < input.length; i++)
        {
            computeInput.add(i, (input[i]*scale));
        }

        MLData computeOutput = this.compute(computeInput);
        Double[] output = new Double[computeOutput.size()];

        for(int j = 0; j < computeOutput.size(); j++)
        {
            output[j] = computeOutput.getData(j);
        }
        
        return output;
    }
    
    /**
     * Computes the output for a set of input points with only 1 input per point
     * @param input an array of all the single-input points
     * @return and array with all the single-output points
     */
    public Double[] computeSet(Double[] input)
    {
        /**
         * Compute seems to calculate online 1 point for an array of inputs
         * This way we wrap everything into 1 method doing all the input points
         * and give all the output points
         * 
         * For a network with multiple inputs, this should work with Double[][]
         */
        Double[] output = new Double[input.length];
        Double[] value, outVal;
        value = new Double[1];
        
        for( int i = 0; i < input.length; i++)
        {
            value[0] = input[i]*scale;
            outVal = this.compute(value);
            output[i] = outVal[0]/scale;
        }
        
        return output;
    }
    
    
    /**
     * All the weights in a string (only for debugging perposes)
     * @return String with a load of weights
     */
    public String giveWeights()
    {
        return network.dumpWeights();
    }

    /**
     * Converts training data to something the Neural Net can use
     * @param in input data
     * @param id ideal data output
     * @return  Same as input but in MLData form
     */
    public static MLDataSet convertTrainData(double[] in, double[] id)
    {
        if (in.length == id.length)
        {

            // Because we use a single input/output, it will be only 1 high
            double[][] input = new double[in.length][1];
            double[][] ideal = new double[id.length][1];

            for (int i = 0; i < in.length; i++)
            {
                input[i][0] = in[i]*scale;
                ideal[i][0] = id[i]*scale;
            }

            return new BasicMLDataSet(input, ideal);
        }
        else
        {
            System.err.println("Ground control to major tom!");
            return null;
        }
    }

    /**
     * Converts training data to something the Neural Net can use
     * @param data Input data, needs to be 2 arraylists full of doubles
     * @return  Same as input but in MLData form
     */
    public static MLDataSet convertTrainData(ArrayList[] data)
    {
        if (data[0].size() == data[1].size())
        {

            // Because we use a single input/output, it will be only 1 high
            double[][] input = new double[data[0].size()][1];
            double[][] ideal = new double[data[1].size()][1];

            for (int i = 0; i < data[0].size(); i++)
            {
                input[i][0] = ((Double) data[0].get(i)) * scale;  // lets hope autoboxing worx
                ideal[i][0] = ((Double) data[1].get(i)) * scale;
            }

            return new BasicMLDataSet(input, ideal);
        }
        else
        {
            System.err.println("Commencing countdown, engines on");
            return null;
        }
    }
}
