package ppOO;

import java.text.DecimalFormat;
import java.util.ArrayList;

public class NeuralNetwork extends General {

   private double[] desired;
   private double[] input;
   private double learningRate = 0.1;
   private double accumulatedError = 0;

   private ArrayList<NeuronLayer> NNLayers;

   public NeuralNetwork(double learningRate) {
      NNPrintln("Initialising Neural Network...\n Learning Rate: "
            + learningRate);
      NNLayers = new ArrayList<NeuronLayer>(); // Array of Layers with index(0)
      // being the first hidden layer
      // and
      // index(NNLayers.length - 1) being the output layer.
      setLearningRate(learningRate);
   }

   public void addLayer(int numberOfNeurons,
         double[][] layerWeights) {

      /*
       * for(int i=0 ; i < (numberOfNeurons - 1) ; i++) {
       */
      NNPrintln("Adding Hidden Layer to Neural Network ("
            + NNLayers.size() + ")");
      NNLayers.add(new NeuronLayer(numberOfNeurons,
            layerWeights, NNLayers.size()));
      // }

      // exception to (numberOfNeurons - 1)
      // since numberOfNeurons == 1 prevents the above loop from running
      if (numberOfNeurons == 1) {
         NNPrintln("Adding Hidden Layer to Neural Network ("
               + NNLayers.size() + ")");
         NNLayers.add(new NeuronLayer(numberOfNeurons,
               layerWeights, NNLayers.size()));

      }
   }

   public void setNetworkInput(double[] inputArray,
         double[] desired) {
      // later change to 'Identify'
      this.input = inputArray;
      this.NNLayers.get(0).setInput(input);

      // need to process network output here and return

   }

   public double[] calcNetworkOutput() {
      double[] nxtLayerInput = null;
      for (NeuronLayer layer : NNLayers) {
         if (nxtLayerInput != null) {
            // we have passed the first hidden layer..
            layer.setInput(nxtLayerInput, "");
            ;
         }
         nxtLayerInput = layer.getLayerOutput();

      }

      return nxtLayerInput;
   }

   public double[] train(double[] inputArray,
         double[] desired) {
      double[] output;
      NeuronLayer lastLayer = this.NNLayers.get(NNLayers
            .size() - 1);
      double[] errorSignalTerm = new double[lastLayer
            .getLayerNeurons().size()];
      double[] errorSig2 = null;

      setNetworkInput(inputArray, desired);
      output = calcNetworkOutput();

      // TLU Output
      for (int i = 0; i < output.length; i++) {
         output[i] = (output[i]);
      }

      // Determine output error
      accumulatedError += calcError(output, desired);

      if (desired.length != output.length) {
         System.err
               .println("err: NeuralNetwork::Train - desired array length("
                     + desired.length
                     + ") != output array length("
                     + output.length + ")");
      } else {
         NNPrintln("-------------------------------");
         NNPrintln("OUTPUT\t == "
               + printArray(output) + " \t DESIRED \t == "
               + printArray(desired)
               + " \t OUTPUT ERROR \t == "
               + accumulatedError);
      }

      NNPrintln("-------------------------------");
      // End output error calculations

      // Backpropagate the error
      // Calc errorSignals
      for (int layerIndex = (NNLayers.size() - 1); layerIndex >= 0; layerIndex--) {
         NeuronLayer cLayer = NNLayers.get(layerIndex);
         if (cLayer.isOutputLayer()) {
            errorSignalTerm = calcErrorSig(output, desired);

         } else {
            NeuronLayer nLayer = this.getNextLayer(cLayer);
            errorSig2 = new double[cLayer.getLayerNeurons()
                  .size() - 1]; // -2 for bias and size is +1 greater than index

            for (ContPerceptron curLyrNeuron : cLayer
                  .getLayerNeurons()) {

               if (curLyrNeuron.getPerceptronIndex() == (cLayer
                     .getLayerNeurons().size() - 1)) {
                  break; // bias neuron(input) doesnt need this
               } else {

                  for (ContPerceptron nxtLyrNeuron : nLayer
                        .getLayerNeurons()) {
                     if ((nxtLyrNeuron.getPerceptronIndex() == (nLayer
                           .getLayerNeurons().size() - 1))
                           && nxtLyrNeuron.isInputBias()) {
                        break; // bias neuron(input) doesnt need this.
                     } else {
                        errorSig2[curLyrNeuron
                              .getPerceptronIndex()] += errorSignalTerm[nxtLyrNeuron
                              .getPerceptronIndex()]
                              * nxtLyrNeuron.getWeights()[curLyrNeuron
                                    .getPerceptronIndex()];
                     }
                  }
                  errorSig2[curLyrNeuron
                        .getPerceptronIndex()] = (errorSig2[curLyrNeuron
                        .getPerceptronIndex()] * 0.5 * ((1 - Math
                        .pow(
                              cLayer.getLayerOutput()[curLyrNeuron
                                    .getPerceptronIndex()],
                              2))));
               }

            }
            NNPrintln("deltab"
                  + printArray(errorSig2));

         }

      }
      // Update Weightings
      for (int layerIndex = (NNLayers.size() - 1); layerIndex >= 0; layerIndex--) {
         NeuronLayer cLayer = NNLayers.get(layerIndex);
         if (cLayer.isOutputLayer()) {
            updateWeightings(errorSignalTerm, cLayer);
         } else {
            updateWeightings(errorSig2, cLayer);
         }
      }

      return output;

   }

   private void updateWeightings(double[] errorSignalTerm,
         NeuronLayer cLayer) {
      for (ContPerceptron neuron : cLayer.getLayerNeurons()) {
         double[] newWeights = neuron.getWeights();
         int weightno = 0;

         for (double weight : neuron.getWeights()) {
            if (neuron.getPerceptronIndex() < (cLayer
                  .getLayerNeurons().size() - 1)
                  || cLayer.isOutputLayer()) {
               NNPrintln("updateWeights("
                     + neuron.getPerceptronIndex()
                     + ": "
                     + fourDP(weight)
                     + ")::\t"
                     + fourDP(weight
                           + (getLearningRate()
                                 * errorSignalTerm[neuron
                                       .getPerceptronIndex()]
                                 * 0.5 * (1 - Math
                                 .pow(
                                       cLayer
                                             .getLayerOutput()[neuron
                                             .getPerceptronIndex()],
                                       2))))
                     + "\t\tErrorSignal\t"
                     + errorSignalTerm[neuron
                           .getPerceptronIndex()]
                     + "\tneuronInput\t"
                     + neuron.getInput()[weightno]);
               if (cLayer.isOutputLayer()) {
                  weight = weight
                        - (getLearningRate()
                              * errorSignalTerm[neuron
                                    .getPerceptronIndex()] * derivativeActivationFunction(input[neuron
                              .getPerceptronIndex()]));
               } else {
                  weight = weight
                        - (getLearningRate()
                              * errorSignalTerm[neuron
                                    .getPerceptronIndex()] * derivativeActivationFunction(cLayer
                              .getLayerOutput()[neuron
                              .getPerceptronIndex()]));
               }
            }
            newWeights[weightno] = weight;
            weightno++;
         }
         neuron.setWeights(newWeights);
      }

   }

   private double calcError(double[] output,
         double[] desired) {

      double error = 0;
      for (int i = 0; i < (output.length); i++) {

         error += Math.pow(desired[i]
               - output[i], 2);
      }
      error = 0.5 * error;
      NNPrintln("calcError:: error\t"
            + error);

      return error;
   }

   private double[] calcErrorSig(double[] output,
         double[] desired) {
      double[] errorSignalTerm = new double[output.length];
      // double[] weights =
      // this.NNLayers.get(NNLayers.size()).getLayerNeurons().get(0).getWeights();
      NeuronLayer lastLayer = this.NNLayers.get(NNLayers
            .size() - 1);
      for (ContPerceptron neuron : lastLayer
            .getLayerNeurons()) {
         errorSignalTerm[neuron.getPerceptronIndex()] = ((desired[neuron
               .getPerceptronIndex()]) - (output[neuron
               .getPerceptronIndex()]))
               * 0.5
               * ((1 - Math.pow(output[neuron
                     .getPerceptronIndex()], 2)));
      }

      NNPrintln("calcErrorSig:: delta\t"
            + (printArray(errorSignalTerm)));
      return errorSignalTerm;
   }

   private int getCurrentLayerIndex(NeuronLayer currentLayer) {
      int i = 0;
      for (NeuronLayer layer : NNLayers) {
         if (layer.equals(currentLayer)) {
            return i;
         }
         i++;
      }
      return -1;
   }

   private NeuronLayer getNextLayer(NeuronLayer currentLayer) {
      int output = getCurrentLayerIndex(currentLayer);
      if (output < (NNLayers.size() - 1))
         return NNLayers.get(output + 1);
      return null;
   }

   private NeuronLayer getPrevLayer(NeuronLayer currentLayer) {
      int output = getCurrentLayerIndex(currentLayer);
      if (output > 0)
         return NNLayers.get(output - 1);
      return null;
   }

   public double getLearningRate() {
      return learningRate;
   }

   public double getAccumulatedError() {
      return accumulatedError;
   }

   public void resetNetworkCycleError() {
      this.accumulatedError = 0;
   }

   private double TLU(double rawOutput) {
      if (rawOutput < 0) {
         return -1;
      } else {
         return 1;
      }
   }

   private void setLearningRate(double inputLearningRate) {
      this.learningRate = inputLearningRate;
   }

   public void printNeuralNetwork() {
      println("-- Printing Neural Network --");
      DecimalFormat FourDP = new DecimalFormat("#0.0000");
      for (int layerCount = 0; layerCount < (NNLayers
            .size()); layerCount++) {
         println("Layer ("
               + layerCount + "/" + (NNLayers.size() - 1)
               + ")");
         for (int neuronCount = 0; neuronCount < (NNLayers
               .get(layerCount).getLayerNeurons().size()); neuronCount++) {
            if (NNLayers.get(layerCount).getLayerNeurons()
                  .get(neuronCount).isInputBias() == true) {
               println("("
                     + neuronCount
                     + "/"
                     + (NNLayers.get(layerCount)
                           .getLayerNeurons().size() - 1)
                     + ")\t\t"
                     + FourDP.format(
                           NNLayers.get(layerCount)
                                 .getLayerNeurons().get(
                                       neuronCount)
                                 .calcOutput()).toString()
                     + "\t(next layer input bias)");
            } else {
               println("("
                     + neuronCount
                     + "/"
                     + (NNLayers.get(layerCount)
                           .getLayerNeurons().size() - 1)
                     + ")\t\t"
                     + FourDP.format(
                           NNLayers.get(layerCount)
                                 .getLayerNeurons().get(
                                       neuronCount)
                                 .calcOutput()).toString()
                     + "\t|Weights| "
                     + printArray(NNLayers.get(layerCount)
                           .getLayerNeurons().get(
                                 neuronCount).getWeights()));
            }
         }
         // Print weightings
         // NNPrintln(printArray(NNLayers.get(layerCount).getLayerNeurons().get(0).getWeights()));
      }
      NNPrintln("-----------------------------");

   }

   public void finalise() {
      // cleanup output layer - remove bias output perceptron from last layer.
      // set last layer properties

      NeuronLayer lastlayer = this.NNLayers
            .get(this.NNLayers.size() - 1);

      lastlayer.remLastNeuron(); // input bias
      lastlayer.setOutputLayer(true);
      this.setNetworkInput(input, desired);
      calcNetworkOutput();

      printNeuralNetwork();
      NNPrintln("Finished Setting up Neural Network - Begin Learning\n\n\n " +
      		"-----------------------------------------------------");

   }

   /* --------System Debug Code----------------------- */
   private void NNPrintln(String output) {
      output += "\n";
      NNPrint(output);
   }

   private void NNPrint(String output) {
      if (DEBUG)
         System.out.print("DEBUG Neural Network:\t"
               + output);
   }

}
