package neural;

/*
 * Layer.java
 *
 * Created on January 27, 2007, 11:11 PM
 *
 * @author Greg Robinson
 */

public class Layer {
    
    // Number of neurons in this layer
    private int size;
    
    // N by 2 List of connections
    // [i][0] is previous layer's neuron
    // [i][1] is current layer's neuron
    private int[][] connections;
    
    // Basically a lookup table for weights.
    // weights[previousneuron][currentneuron] is connection weight
    private double[][] weights;
    
    // TLU activation function type
    private int activation;
    
    // Activation function types
    public static final char SIGMOID = 0;
    public static final char H_TANGENT = 1;
    public static final char LINEAR = 2;
    public static final char THRESHOLD = 3;
    
    
    /**
     *
     * @param size is number of neurons in this layer
     * @param numInputs is number of inputs
     * @param connections is array of connections (see this.connections)
     * @param activation is activation function type
     */
    public Layer(int size, int numInputs, int[][] connections, int activation) {
        this.size = size;
        this.connections = connections;
        this.activation = activation;
        
        this.weights = new double[numInputs][size];
        int i, j;
        for (i = 0; i < numInputs; i++){
            for (j = 0; j < size; j++){
                this.weights[i][j] = Math.random();
            }
        }
    }
    
    /**
     *
     * @param size is number of neurons in this layer
     * @param numInputs is number of inputs
     * @param connections is array of connections (see this.connections)
     * @param weights is array of weights where
     *   [previousNeuron][thisLayerNeuron] is the connection weight
     * @param activation is activation function type
     */
    public Layer(
            int size,
            int numInputs,
            int[][] connections,
            double[][] weights,
            int activation
            ) {
        this.size = size;
        this.connections = connections;
        this.activation = activation;
        
        this.weights = weights;
        }
    
    
    /**
     * Create new Layer object with full synapse and random weights
     *
     * @param size is number of neurons in this layer
     * @param numInputs is number of inputs
     * @param activation is activation function type
     */
    public Layer(int size, int numInputs, char activation) {
        this.size = size;
        this.activation = activation;
        
        this.connections = new int[size*numInputs][2];
        
        this.weights = new double[numInputs][size];
        
        int i, j;
        for (i = 0; i < numInputs; i++){
            for (j = 0; j < size; j++){
                this.weights[i][j] = Math.random();
                this.connections[(i+1)*(j+1)][0] = i;
                this.connections[(i+1)*(j+1)][0] = j;
            }
        }
    }
    
    /**
     * Create new Layer object with full synapse and specified weights
     *
     * @param size is number of neurons in this layer
     * @param numInputs is number of inputs
     * @param weights is array of connection weights
     * @param activation is activation function type
     */
    public Layer(int size, int numInputs, double[][] weights, char activation) {
        this.size = size;
        this.activation = activation;
        this.weights = weights;
        this.connections = new int[size*numInputs][2];
        
        int i, j;
        for (i = 0; i < numInputs; i++){
            for (j = 0; j < size; j++){
                this.connections[(i+1)*(j+1)][0] = i;
                this.connections[(i+1)*(j+1)][0] = j;
            }
        }
    }
    
    /**
     * This is the cutoff function part of the TLU
     *
     * @param x is the sum of weighted inputs going through the TLU
     * @return the activation value
     */
    public double threshold(double x){
        switch (this.activation){
            case Layer.SIGMOID:
                return 1/(1+Math.exp(-x));
            case Layer.H_TANGENT:
                return (Math.exp(2*x)-1) / (Math.exp(2*x)+1);
            case Layer.LINEAR:
                return x;
            case Layer.THRESHOLD:
                if (x>0) return 1;
                else return 0;
            default:
                return (Math.exp(2*x)-1) / (Math.exp(2*x)+1);
        }
    }
    
    /**
     * Receives stimula
     *
     * @param inputs are the firing values of the previous layer
     * @return output values for layer
     */
    public double[] putValues(double[] inputs){
        double[] values = new double[this.size];
        int i;
        
        // Add the value of each input time the weight to each neuron
        for (i = 0; i < this.connections.length; i++){
            int[] conn = this.connections[i];
            values[conn[1]] += inputs[conn[0]]*this.weights[conn[1]][conn[0]];
        }
        
        for (i = 0; i < this.size; i++){
            values[i] = threshold(values[i]);
        }
        
        return values;
    }
    
    public int getSize(){
        return this.size;
    }
}
