/*
 * To change this template, choose Tools | Templates
 * and open the template in the editor.
 */
package javaapplication1;

import java.util.ArrayList;
import java.util.List;

/**
 *
 * @author jonathan
 */
public class NeuralNet {
    private int numInputs;
    private int numOutputs;
    private int numHiddenLayers;
    private int numNeuronsPerLayer;
    
    List<Nlayer> layers = new ArrayList<Nlayer>();
    
    NeuralNet(){
        
        numInputs = Config.numInputs;
        numOutputs = Config.numOutputs;
        numHiddenLayers = Config.numHiddenLayers;
        numNeuronsPerLayer = Config.numNeuronsPerHiddenLayer;
    
        createANN();
        
    }

    //create artificial Neural Network (initial state is random)
    private void createANN() {
        if(numHiddenLayers == 0){
            layers.add(new Nlayer(numInputs, numOutputs));
        }
        else
        {
            //input
            layers.add(new Nlayer(numInputs,numNeuronsPerLayer));

            //hidden
            for (int i=0; i<numHiddenLayers-1; i++){
                layers.add(new Nlayer(numNeuronsPerLayer, numNeuronsPerLayer));
            }

            //output
            layers.add(new Nlayer(numNeuronsPerLayer, numOutputs));
            
        }
    }


public void printStructure(){
    for(Nlayer layer : layers){
        System.out.println("Layer");
        for(Neuron N : layer.neurons){
            System.out.println("Neuron:");
            for(double wt : N.weights){
                System.out.print(" "+wt);
            }
            System.out.println("\n");
        }
    }
}
    
public List<Double> getWeights(){
    List<Double> weights = new ArrayList<Double>();
    
    for(Nlayer layer : layers)
        for(Neuron N : layer.neurons)
            for(double wt : N.weights)
                weights.add(wt);
    
    return weights;
}
public NeuralNet putWeights(List<Double> weights){    
    NeuralNet ret=new NeuralNet();
    int i=0;
    int l=0;
    for(Nlayer layer : layers){
        int n=0;
        for(Neuron N : layer.neurons){
            ret.layers.get(l).neurons.get(n).weights.clear();
            for(double wt : N.weights){
                ret.layers.get(l).neurons.get(n).weights.add(weights.get(i++));
            }
            n++;
        }
        l++;
    }
        
    return ret;

}
public int getNumWeights(){
    int count=0;
    for(Nlayer layer : layers)
        for(Neuron N : layer.neurons)
            for(double wt : N.weights)
                count++;
    
    return count;
}
    
//something crazy (for backprop)
    
  //takes in list of doubles as input, goes through all layers, returns list of doubles as output)  
  public List<Double> update(List<Double> inputs){
      List<Double> outputs = new ArrayList<Double>();
      
      if(inputs.size() != numInputs){
          System.out.println("inputsize="+inputs.size()+" expected="+numInputs);
          return outputs;
      }

      //enhanced for loops make this almost intuitive
      for(Nlayer layer : layers){
          if(!outputs.isEmpty()) {
              inputs = outputs;
              //outputs.clear();
              outputs = new ArrayList<Double>();
          }
          
          for(Neuron N : layer.neurons){
              
              int index=0;//easier than switching to unextended for loops... that quickly becomes ugly
              double sum=0;
              
              //System.out.println("w="+N.weights.size()+" in="+inputs.size());
              for(double wt : N.weights){
                  if(index<inputs.size()){
                    sum+= wt*inputs.get(index);
                    index++;
                  }else{
                    sum+=wt; //bias
                  }
                  
              }
              
              outputs.add(ActivationFunction(sum));
          }
          
      }
      
      return outputs;
  }   

    //I remember prof mentioning that multiple activation functions are possible
    private Double ActivationFunction(double sum) {
        
        //to keep it simple(okay, this isnt simpler, but this will be here for extending things later), just going to use sigmoid
        return Sigmoid(sum);
        
    }

    //this is called the logistic sigmoid goes from 0 to 1
    private Double Sigmoid(double sum) {
        return 1/(1+Math.exp(-sum));
    }
    
    //same as Sigmoid but ranges from -1 to 1 (it might encourage a quicker convergence)
    private Double TangentalSigmoid(double sum){
        return (Math.exp(sum)-Math.exp(-sum))/(Math.exp(sum)+Math.exp(-sum));
    }
    
}